././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1742224624.173563 multidict-6.2.0/0000755000175100001660000000000014766036360013133 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/.coveragerc0000644000175100001660000000120514766036355015256 0ustar00runnerdocker[html] show_contexts = true skip_covered = false [paths] _site-packages-to-src-mapping = . */lib/pypy*/site-packages */lib/python*/site-packages *\Lib\site-packages [report] exclude_also = if TYPE_CHECKING assert False : \.\.\.(\s*#.*)?$ ^ +\.\.\.$ # fail_under = 98.95 skip_covered = true skip_empty = true show_missing = true [run] branch = true cover_pylib = false # https://coverage.rtfd.io/en/latest/contexts.html#dynamic-contexts # dynamic_context = test_function # conflicts with `pytest-cov` if set here parallel = true # plugins = # covdefaults relative_files = true source = . source_pkgs = multidict ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742224624.1595628 multidict-6.2.0/CHANGES/0000755000175100001660000000000014766036360014203 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/CHANGES/.TEMPLATE.rst0000644000175100001660000000467514766036355016246 0ustar00runnerdocker{# TOWNCRIER TEMPLATE #} *({{ versiondata.date }})* {% for section, _ in sections.items() %} {% set underline = underlines[0] %}{% if section %}{{section}} {{ underline * section|length }}{% set underline = underlines[1] %} {% endif %} {% if sections[section] %} {% for category, val in definitions.items() if category in sections[section]%} {{ definitions[category]['name'] }} {{ underline * definitions[category]['name']|length }} {% if definitions[category]['showcontent'] %} {% for text, change_note_refs in sections[section][category].items() %} - {{ text }} {{- '\n' * 2 -}} {#- NOTE: Replacing 'e' with 'f' is a hack that prevents Jinja's `int` NOTE: filter internal implementation from treating the input as an NOTE: infinite float when it looks like a scientific notation (with a NOTE: single 'e' char in between digits), raising an `OverflowError`, NOTE: subsequently. 'f' is still a hex letter so it won't affect the NOTE: check for whether it's a (short or long) commit hash or not. Ref: https://github.com/pallets/jinja/issues/1921 -#} {%- set pr_issue_numbers = change_note_refs | map('lower') | map('replace', 'e', 'f') | map('int', default=None) | select('integer') | map('string') | list -%} {%- set arbitrary_refs = [] -%} {%- set commit_refs = [] -%} {%- with -%} {%- set commit_ref_candidates = change_note_refs | reject('in', pr_issue_numbers) -%} {%- for cf in commit_ref_candidates -%} {%- if cf | length in (7, 8, 40) and cf | int(default=None, base=16) is not none -%} {%- set _ = commit_refs.append(cf) -%} {%- else -%} {%- set _ = arbitrary_refs.append(cf) -%} {%- endif -%} {%- endfor -%} {%- endwith -%} {% if pr_issue_numbers %} *Related issues and pull requests on GitHub:* :issue:`{{ pr_issue_numbers | join('`, :issue:`') }}`. {{- '\n' * 2 -}} {%- endif -%} {% if commit_refs %} *Related commits on GitHub:* :commit:`{{ commit_refs | join('`, :commit:`') }}`. {{- '\n' * 2 -}} {%- endif -%} {% if arbitrary_refs %} *Unlinked references:* {{ arbitrary_refs | join(', ') }}. {{- '\n' * 2 -}} {%- endif -%} {% endfor %} {% else %} - {{ sections[section][category]['']|join(', ') }} {% endif %} {% if sections[section][category]|length == 0 %} No significant changes. {% else %} {% endif %} {% endfor %} {% else %} No significant changes. {% endif %} {% endfor %} ---- {{ '\n' * 2 }} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/CHANGES/.gitignore0000644000175100001660000000057614766036355016207 0ustar00runnerdocker* !.TEMPLATE.rst !.gitignore !README.rst !*.bugfix !*.bugfix.rst !*.bugfix.*.rst !*.breaking !*.breaking.rst !*.breaking.*.rst !*.contrib !*.contrib.rst !*.contrib.*.rst !*.deprecation !*.deprecation.rst !*.deprecation.*.rst !*.doc !*.doc.rst !*.doc.*.rst !*.feature !*.feature.rst !*.feature.*.rst !*.misc !*.misc.rst !*.misc.*.rst !*.packaging !*.packaging.rst !*.packaging.*.rst ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/CHANGES/README.rst0000644000175100001660000001050614766036355015700 0ustar00runnerdocker.. _Adding change notes with your PRs: Adding change notes with your PRs ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It is very important to maintain a log for news of how updating to the new version of the software will affect end-users. This is why we enforce collection of the change fragment files in pull requests as per `Towncrier philosophy`_. The idea is that when somebody makes a change, they must record the bits that would affect end-users, only including information that would be useful to them. Then, when the maintainers publish a new release, they'll automatically use these records to compose a change log for the respective version. It is important to understand that including unnecessary low-level implementation related details generates noise that is not particularly useful to the end-users most of the time. And so such details should be recorded in the Git history rather than a changelog. Alright! So how to add a news fragment? ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ``multidict`` uses `towncrier `_ for changelog management. To submit a change note about your PR, add a text file into the ``CHANGES/`` folder. It should contain an explanation of what applying this PR will change in the way end-users interact with the project. One sentence is usually enough but feel free to add as many details as you feel necessary for the users to understand what it means. **Use the past tense** for the text in your fragment because, combined with others, it will be a part of the "news digest" telling the readers **what changed** in a specific version of the library *since the previous version*. You should also use *reStructuredText* syntax for highlighting code (inline or block), linking parts of the docs or external sites. However, you do not need to reference the issue or PR numbers here as *towncrier* will automatically add a reference to all of the affected issues when rendering the news file. If you wish to sign your change, feel free to add ``-- by :user:`github-username``` at the end (replace ``github-username`` with your own!). Finally, name your file following the convention that Towncrier understands: it should start with the number of an issue or a PR followed by a dot, then add a patch type, like ``feature``, ``doc``, ``contrib`` etc., and add ``.rst`` as a suffix. If you need to add more than one fragment, you may add an optional sequence number (delimited with another period) between the type and the suffix. In general the name will follow ``..rst`` pattern, where the categories are: - ``bugfix``: A bug fix for something we deemed an improper undesired behavior that got corrected in the release to match pre-agreed expectations. - ``feature``: A new behavior, public APIs. That sort of stuff. - ``deprecation``: A declaration of future API removals and breaking changes in behavior. - ``breaking``: When something public gets removed in a breaking way. Could be deprecated in an earlier release. - ``doc``: Notable updates to the documentation structure or build process. - ``packaging``: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. - ``contrib``: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. - ``misc``: Changes that are hard to assign to any of the above categories. A pull request may have more than one of these components, for example a code change may introduce a new feature that deprecates an old feature, in which case two fragments should be added. It is not necessary to make a separate documentation fragment for documentation changes accompanying the relevant code changes. Examples for adding changelog entries to your Pull Requests ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File :file:`CHANGES/603.removal.1.rst`: .. code-block:: rst Dropped Python 3.5 support; Python 3.6 is the minimal supported Python version. File :file:`CHANGES/550.bugfix.rst`: .. code-block:: rst Started shipping Windows wheels for the x86 architecture. File :file:`CHANGES/553.feature.rst`: .. code-block:: rst Added support for ``GenericAliases`` (``MultiDict[str]``) under Python 3.9 and higher. .. _Towncrier philosophy: https://towncrier.readthedocs.io/en/stable/#philosophy ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/CHANGES.rst0000644000175100001660000002617314766036355014752 0ustar00runnerdocker========= Changelog ========= .. You should *NOT* be adding new change log entries to this file, this file is managed by towncrier. You *may* edit previous change logs to fix problems like typo corrections or such. To add a new change log entry, please see https://pip.pypa.io/en/latest/development/#adding-a-news-entry we named the news folder "changes". WARNING: Don't drop the next directive! .. towncrier release notes start 6.2.0 ====== *(2025-03-17)* Bug fixes --------- - Fixed ``in`` checks throwing an exception instead of returning :data:`False` when testing non-strings. *Related issues and pull requests on GitHub:* :issue:`1045`. - Fix a leak when the last accessed module in ``PyInit__multidict`` init is not released. *Related issues and pull requests on GitHub:* :issue:`1061`. Features -------- - Implemented support for the free-threaded build of CPython 3.13 -- by :user:`lysnikolaou`. *Related issues and pull requests on GitHub:* :issue:`1015`. Packaging updates and notes for downstreams ------------------------------------------- - Started publishing wheels made for the free-threaded build of CPython 3.13 -- by :user:`lysnikolaou`. *Related issues and pull requests on GitHub:* :issue:`1015`. Miscellaneous internal changes ------------------------------ - Used stricter typing across the code base, resulting in improved typing accuracy across multidict classes. Funded by an ``NLnet`` grant. *Related issues and pull requests on GitHub:* :issue:`1046`. ---- 6.1.0 (2024-09-09) ================== Bug fixes --------- - Covered the unreachable code path in ``multidict._multidict_base._abc_itemsview_register()`` with typing -- by :user:`skinnyBat`. *Related issues and pull requests on GitHub:* :issue:`928`. Features -------- - Added support for Python 3.13 -- by :user:`bdraco`. *Related issues and pull requests on GitHub:* :issue:`1002`. Removals and backward incompatible breaking changes --------------------------------------------------- - Removed Python 3.7 support -- by :user:`bdraco`. *Related issues and pull requests on GitHub:* :issue:`997`. Contributor-facing changes -------------------------- - Added tests to have full code coverage of the ``multidict._multidict_base._viewbaseset_richcmp()`` function -- by :user:`skinnyBat`. *Related issues and pull requests on GitHub:* :issue:`928`. - `The deprecated `_ ``::set-output`` workflow command has been replaced by the ``$GITHUB_OUTPUT`` environment variable in the GitHub Actions CI/CD workflow definition. *Related issues and pull requests on GitHub:* :issue:`940`. - `codecov-action `_ has been temporarily downgraded to ``v3`` in the GitHub Actions CI/CD workflow definitions in order to fix uploading coverage to `Codecov `_. See `this issue `_ for more details. *Related issues and pull requests on GitHub:* :issue:`941`. - In the GitHub Actions CI/CD workflow definition, the ``Get pip cache dir`` step has been fixed for Windows runners by adding ``shell: bash``. See `actions/runner#2224 `_ for more details. *Related issues and pull requests on GitHub:* :issue:`942`. - Interpolation of the ``pip`` cache keys has been fixed by adding missing ``$`` syntax in the GitHub Actions CI/CD workflow definition. *Related issues and pull requests on GitHub:* :issue:`943`. ---- 6.0.5 (2024-02-01) ================== Bug fixes --------- - Upgraded the C-API macros that have been deprecated in Python 3.9 and later removed in 3.13 -- by :user:`iemelyanov`. *Related issues and pull requests on GitHub:* :issue:`862`, :issue:`864`, :issue:`868`, :issue:`898`. - Reverted to using the public argument parsing API :c:func:`PyArg_ParseTupleAndKeywords` under Python 3.12 -- by :user:`charles-dyfis-net` and :user:`webknjaz`. The effect is that this change prevents build failures with clang 16.9.6 and gcc-14 reported in :issue:`926`. It also fixes a segmentation fault crash caused by passing keyword arguments to :py:meth:`MultiDict.getall() ` discovered by :user:`jonaslb` and :user:`hroncok` while examining the problem. *Related issues and pull requests on GitHub:* :issue:`862`, :issue:`909`, :issue:`926`, :issue:`929`. - Fixed a ``SystemError: null argument to internal routine`` error on a ``MultiDict.items().isdisjoint()`` call when using C Extensions. *Related issues and pull requests on GitHub:* :issue:`927`. Improved documentation ---------------------- - On the `Contributing docs `_ page, a link to the ``Towncrier philosophy`` has been fixed. *Related issues and pull requests on GitHub:* :issue:`911`. Packaging updates and notes for downstreams ------------------------------------------- - Stopped marking all files as installable package data -- by :user:`webknjaz`. This change helps ``setuptools`` understand that C-headers are not to be installed under :file:`lib/python3.{x}/site-packages/`. *Related commits on GitHub:* :commit:`31e1170`. - Started publishing pure-python wheels to be installed as a fallback -- by :user:`webknjaz`. *Related commits on GitHub:* :commit:`7ba0e72`. - Switched from ``setuptools``' legacy backend (``setuptools.build_meta:__legacy__``) to the modern one (``setuptools.build_meta``) by actually specifying the the ``[build-system] build-backend`` option in :file:`pyproject.toml` -- by :user:`Jackenmen`. *Related issues and pull requests on GitHub:* :issue:`802`. - Declared Python 3.12 supported officially in the distribution package metadata -- by :user:`hugovk`. *Related issues and pull requests on GitHub:* :issue:`877`. Contributor-facing changes -------------------------- - The test framework has been refactored. In the previous state, the circular imports reported in :issue:`837` caused the C-extension tests to be skipped. Now, there is a set of the ``pytest`` fixtures that is set up in a parametrized manner allowing to have a consistent way of accessing mirrored ``multidict`` implementations across all the tests. This change also implemented a pair of CLI flags (``--c-extensions`` / ``--no-c-extensions``) that allow to explicitly request deselecting the tests running against the C-extension. -- by :user:`webknjaz`. *Related issues and pull requests on GitHub:* :issue:`98`, :issue:`837`, :issue:`915`. - Updated the test pins lockfile used in the ``cibuildwheel`` test stage -- by :user:`hoodmane`. *Related issues and pull requests on GitHub:* :issue:`827`. - Added an explicit ``void`` for arguments in C-function signatures which addresses the following compiler warning: .. code-block:: console warning: a function declaration without a prototype is deprecated in all versions of C [-Wstrict-prototypes] -- by :user:`hoodmane` *Related issues and pull requests on GitHub:* :issue:`828`. - An experimental Python 3.13 job now runs in the CI -- :user:`webknjaz`. *Related issues and pull requests on GitHub:* :issue:`920`. - Added test coverage for the :ref:`and `, :ref:`or `, :py:obj:`sub `, and :py:obj:`xor ` operators in the :file:`multidict/_multidict_base.py` module. It also covers :py:data:`NotImplemented` and ":py:class:`~typing.Iterable`-but-not-:py:class:`~typing.Set`" cases there. -- by :user:`a5r0n` *Related issues and pull requests on GitHub:* :issue:`936`. - The version of pytest is now capped below 8, when running MyPy against Python 3.7. This pytest release dropped support for said runtime. *Related issues and pull requests on GitHub:* :issue:`937`. ---- 6.0.4 (2022-12-24) ================== Bugfixes -------- - Fixed a type annotations regression introduced in v6.0.2 under Python versions <3.10. It was caused by importing certain types only available in newer versions. (:issue:`798`) 6.0.3 (2022-12-03) ================== Features -------- - Declared the official support for Python 3.11 — by :user:`mlegner`. (:issue:`872`) 6.0.2 (2022-01-24) ================== Bugfixes -------- - Revert :issue:`644`, restore type annotations to as-of 5.2.0 version. (:issue:`688`) 6.0.1 (2022-01-23) ================== Bugfixes -------- - Restored back ``MultiDict``, ``CIMultiDict``, ``MultiDictProxy``, and ``CIMutiDictProxy`` generic type arguments; they are parameterized by value type, but the key type is fixed by container class. ``MultiDict[int]`` means ``MutableMultiMapping[str, int]``. The key type of ``MultiDict`` is always ``str``, while all str-like keys are accepted by API and converted to ``str`` internally. The same is true for ``CIMultiDict[int]`` which means ``MutableMultiMapping[istr, int]``. str-like keys are accepted but converted to ``istr`` internally. (:issue:`682`) 6.0.0 (2022-01-22) ================== Features -------- - Use ``METH_FASTCALL`` where it makes sense. ``MultiDict.add()`` is 2.2 times faster now, ``CIMultiDict.add()`` is 1.5 times faster. The same boost is applied to ``get*()``, ``setdefault()``, and ``pop*()`` methods. (:issue:`681`) Bugfixes -------- - Fixed type annotations for keys of multidict mapping classes. (:issue:`644`) - Support Multidict[int] for pure-python version. ``__class_getitem__`` is already provided by C Extension, making it work with the pure-extension too. (:issue:`678`) Deprecations and Removals ------------------------- - Dropped Python 3.6 support (:issue:`680`) Misc ---- - :issue:`659` 5.2.0 (2021-10-03) ===================== Features -------- - 1. Added support Python 3.10 2. Started shipping platform-specific wheels with the ``musl`` tag targeting typical Alpine Linux runtimes. 3. Started shipping platform-specific arm64 wheels for Apple Silicon. (:issue:`629`) Bugfixes -------- - Fixed pure-python implementation that used to raise "Dictionary changed during iteration" error when iterated view (``.keys()``, ``.values()`` or ``.items()``) was created before the dictionary's content change. (:issue:`620`) 5.1.0 (2020-12-03) ================== Features -------- - Supported ``GenericAliases`` (``MultiDict[str]``) for Python 3.9+ :issue:`553` Bugfixes -------- - Synchronized the declared supported Python versions in ``setup.py`` with actually supported and tested ones. :issue:`552` ---- 5.0.1 (2020-11-14) ================== Bugfixes -------- - Provided x86 Windows wheels :issue:`550` ---- 5.0.0 (2020-10-12) ================== Features -------- - Provided wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on Linux as well as ``x86_64``. :issue:`500` - Provided wheels for Python 3.9. :issue:`534` Removal ------- - Dropped Python 3.5 support; Python 3.6 is the minimal supported Python version. Misc ---- - :issue:`503` ---- ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/LICENSE0000644000175100001660000000114314766036355014143 0ustar00runnerdocker Copyright 2016 Andrew Svetlov and aio-libs contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/MANIFEST.in0000644000175100001660000000056414766036355014702 0ustar00runnerdockerinclude .coveragerc include pyproject.toml include pytest.ini include LICENSE include CHANGES.rst include README.rst include Makefile graft multidict graft docs graft CHANGES graft requirements graft tests global-exclude *.pyc include multidict/*.c exclude multidict/_multidict.html exclude multidict/*.so exclude multidict/*.pyd exclude multidict/*.pyd prune docs/_build ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/Makefile0000644000175100001660000000365014766036355014603 0ustar00runnerdocker# Some simple testing tasks (sorry, UNIX only). .PHONY: all build test vtest cov clean doc PYXS = $(wildcard multidict/*.pyx) SRC = multidict tests setup.py all: test .install-deps: $(shell find requirements -type f) pip install -r requirements/dev.txt @touch .install-deps isort-check: @if ! isort --check $(SRC); then \ echo "Import sort errors, run 'make fmt' to fix them!!!"; \ isort --diff --check $(SRC); \ false; \ fi black-check: @if ! isort --check $(SRC); then \ echo "black errors, run 'make fmt' to fix them!!!"; \ black -t py35 --diff --check $(SRC); \ false; \ fi lint: black-check isort-check python -Im pre_commit run --all-files --show-diff-on-failure fmt: black -t py35 $(SRC) isort $(SRC) .develop: .install-deps $(shell find multidict -type f) pip install -e . @touch .develop test: .develop @pytest -q vtest: .develop @pytest -s -v cov-dev: .develop @pytest --cov-report=html @echo "open file://`pwd`/htmlcov/index.html" cov-ci-run: .develop @echo "Regular run" @pytest --cov-report=html cov-dev-full: cov-ci-run @echo "open file://`pwd`/htmlcov/index.html" doc: @make -C docs html SPHINXOPTS="-W -n --keep-going -E" @echo "open file://`pwd`/docs/_build/html/index.html" doc-spelling: @make -C docs spelling SPHINXOPTS="-W -n --keep-going -E" install: @pip install -U 'pip' @pip install -Ur requirements/dev.txt install-dev: .develop clean: rm -rf `find . -name __pycache__` rm -f `find . -type f -name '*.py[co]' ` rm -f `find . -type f -name '*~' ` rm -f `find . -type f -name '.*~' ` rm -f `find . -type f -name '@*' ` rm -f `find . -type f -name '#*#' ` rm -f `find . -type f -name '*.orig' ` rm -f `find . -type f -name '*.rej' ` rm -f .coverage rm -rf coverage rm -rf build rm -rf cover rm -rf htmlcov make -C docs clean SPHINXBUILD=false python3 setup.py clean rm -f multidict/*.html rm -f multidict/*.so rm -f multidict/*.pyd rm -rf .tox ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1742224624.173563 multidict-6.2.0/PKG-INFO0000644000175100001660000001147714766036360014242 0ustar00runnerdockerMetadata-Version: 2.2 Name: multidict Version: 6.2.0 Summary: multidict implementation Home-page: https://github.com/aio-libs/multidict Author: Andrew Svetlov Author-email: andrew.svetlov@gmail.com License: Apache 2 Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org Project-URL: CI: GitHub, https://github.com/aio-libs/multidict/actions Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/multidict Project-URL: Docs: Changelog, https://multidict.aio-libs.org/en/latest/changes/ Project-URL: Docs: RTD, https://multidict.aio-libs.org Project-URL: GitHub: issues, https://github.com/aio-libs/multidict/issues Project-URL: GitHub: repo, https://github.com/aio-libs/multidict Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 Classifier: Programming Language :: Python :: 3.13 Requires-Python: >=3.9 Description-Content-Type: text/x-rst License-File: LICENSE Requires-Dist: typing-extensions>=4.1.0; python_version < "3.11" ========= multidict ========= .. image:: https://github.com/aio-libs/multidict/actions/workflows/ci-cd.yml/badge.svg :target: https://github.com/aio-libs/multidict/actions :alt: GitHub status for master branch .. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg :target: https://codecov.io/gh/aio-libs/multidict :alt: Coverage metrics .. image:: https://img.shields.io/pypi/v/multidict.svg :target: https://pypi.org/project/multidict :alt: PyPI .. image:: https://readthedocs.org/projects/multidict/badge/?version=latest :target: https://multidict.aio-libs.org :alt: Read The Docs build status badge .. image:: https://img.shields.io/pypi/pyversions/multidict.svg :target: https://pypi.org/project/multidict :alt: Python versions .. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat :target: https://matrix.to/#/%23aio-libs:matrix.org :alt: Matrix Room — #aio-libs:matrix.org .. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat :target: https://matrix.to/#/%23aio-libs-space:matrix.org :alt: Matrix Space — #aio-libs-space:matrix.org Multidict is dict-like collection of *key-value pairs* where key might occur more than once in the container. Introduction ------------ *HTTP Headers* and *URL query string* require specific data structure: *multidict*. It behaves mostly like a regular ``dict`` but it may have several *values* for the same *key* and *preserves insertion ordering*. The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries). ``multidict`` has four multidict classes: ``MultiDict``, ``MultiDictProxy``, ``CIMultiDict`` and ``CIMultiDictProxy``. Immutable proxies (``MultiDictProxy`` and ``CIMultiDictProxy``) provide a dynamic view for the proxied multidict, the view reflects underlying collection changes. They implement the ``collections.abc.Mapping`` interface. Regular mutable (``MultiDict`` and ``CIMultiDict``) classes implement ``collections.abc.MutableMapping`` and allows them to change their own content. *Case insensitive* (``CIMultiDict`` and ``CIMultiDictProxy``) assume the *keys* are case insensitive, e.g.:: >>> dct = CIMultiDict(key='val') >>> 'Key' in dct True >>> dct['Key'] 'val' *Keys* should be ``str`` or ``istr`` instances. The library has optional C Extensions for speed. License ------- Apache 2 Library Installation -------------------- .. code-block:: bash $ pip install multidict The library is Python 3 only! PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install ``multidict`` on another operating system (or *Alpine Linux* inside a Docker) the tarball will be used to compile the library from source. It requires a C compiler and Python headers to be installed. To skip the compilation, please use the `MULTIDICT_NO_EXTENSIONS` environment variable, e.g.: .. code-block:: bash $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict Please note, the pure Python (uncompiled) version is about 20-50 times slower depending on the usage scenario!!! Changelog --------- See `RTD page `_. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/README.rst0000644000175100001660000000647414766036355014641 0ustar00runnerdocker========= multidict ========= .. image:: https://github.com/aio-libs/multidict/actions/workflows/ci-cd.yml/badge.svg :target: https://github.com/aio-libs/multidict/actions :alt: GitHub status for master branch .. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg :target: https://codecov.io/gh/aio-libs/multidict :alt: Coverage metrics .. image:: https://img.shields.io/pypi/v/multidict.svg :target: https://pypi.org/project/multidict :alt: PyPI .. image:: https://readthedocs.org/projects/multidict/badge/?version=latest :target: https://multidict.aio-libs.org :alt: Read The Docs build status badge .. image:: https://img.shields.io/pypi/pyversions/multidict.svg :target: https://pypi.org/project/multidict :alt: Python versions .. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat :target: https://matrix.to/#/%23aio-libs:matrix.org :alt: Matrix Room — #aio-libs:matrix.org .. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat :target: https://matrix.to/#/%23aio-libs-space:matrix.org :alt: Matrix Space — #aio-libs-space:matrix.org Multidict is dict-like collection of *key-value pairs* where key might occur more than once in the container. Introduction ------------ *HTTP Headers* and *URL query string* require specific data structure: *multidict*. It behaves mostly like a regular ``dict`` but it may have several *values* for the same *key* and *preserves insertion ordering*. The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries). ``multidict`` has four multidict classes: ``MultiDict``, ``MultiDictProxy``, ``CIMultiDict`` and ``CIMultiDictProxy``. Immutable proxies (``MultiDictProxy`` and ``CIMultiDictProxy``) provide a dynamic view for the proxied multidict, the view reflects underlying collection changes. They implement the ``collections.abc.Mapping`` interface. Regular mutable (``MultiDict`` and ``CIMultiDict``) classes implement ``collections.abc.MutableMapping`` and allows them to change their own content. *Case insensitive* (``CIMultiDict`` and ``CIMultiDictProxy``) assume the *keys* are case insensitive, e.g.:: >>> dct = CIMultiDict(key='val') >>> 'Key' in dct True >>> dct['Key'] 'val' *Keys* should be ``str`` or ``istr`` instances. The library has optional C Extensions for speed. License ------- Apache 2 Library Installation -------------------- .. code-block:: bash $ pip install multidict The library is Python 3 only! PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install ``multidict`` on another operating system (or *Alpine Linux* inside a Docker) the tarball will be used to compile the library from source. It requires a C compiler and Python headers to be installed. To skip the compilation, please use the `MULTIDICT_NO_EXTENSIONS` environment variable, e.g.: .. code-block:: bash $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict Please note, the pure Python (uncompiled) version is about 20-50 times slower depending on the usage scenario!!! Changelog --------- See `RTD page `_. ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1742224624.161563 multidict-6.2.0/docs/0000755000175100001660000000000014766036360014063 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/docs/Makefile0000644000175100001660000001533314766036355015534 0ustar00runnerdocker# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/aiohttp.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/aiohttp.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/aiohttp" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/aiohttp" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." spelling: $(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling @echo @echo "Build finished." ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/docs/benchmark.rst0000644000175100001660000000322414766036355016554 0ustar00runnerdocker.. _benchmarking-reference: ========== Benchmarks ========== Introduction ------------ Benchmarks allow to track performance from release to release and verify that latest changes haven not affected it drastically. Benchmarks are based on the :doc:`pyperf:index`. How to run ---------- ``requirements/dev.txt`` should be installed before we can proceed with benchmarks. Please also make sure that you have :doc:`configured ` your OS to have reliable results. To run benchmarks next command can be executed: .. code-block:: bash $ python benchmarks/benchmark.py This would run benchmarks for both classes (:class:`~multidict.MultiDict` and :class:`~multidict.CIMultiDict`) of both implementations (pure-Python and C). To run benchmarks for a specific class of specific implementation please use ``--impl`` option: .. code-block:: bash $ python benchmarks/benchmark.py --impl multidict_c would run benchmarks only for :class:`~multidict.MultiDict` implemented in C. Please use ``--help`` to see all available options. Most of the options are described at :doc:`perf's Runner ` documentation. How to compare implementations ------------------------------ ``--impl`` option allows to run benchmarks for a specific implementation of class. Combined with the :ref:`compare_to ` command of :doc:`pyperf:index` we can get a good picture of how implementation performs: .. code-block:: bash $ python benchmarks/benchmark.py --impl multidict_c -o multidict_cy.json $ python benchmarks/benchmark.py --impl multidict_py -o multidict_py.json $ python -m perf compare_to multidict_cy.json multidict_py.json ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/docs/changes.rst0000644000175100001660000000060414766036355016231 0ustar00runnerdocker.. _multidict_changes: ========= Changelog ========= .. only:: not is_release To be included in v\ |release| (if present) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. towncrier-draft-entries:: |release| [UNRELEASED DRAFT] Released versions ^^^^^^^^^^^^^^^^^ .. include:: ../CHANGES.rst :start-after: .. towncrier release notes start .. include:: ../HISTORY.rst ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/docs/conf.py0000644000175100001660000003060214766036355015367 0ustar00runnerdocker#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # multidict documentation build configuration file, created by # sphinx-quickstart on Wed Mar 5 12:35:35 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. from __future__ import annotations import os import re from contextlib import suppress from pathlib import Path import alabaster from sphinx.addnodes import pending_xref from sphinx.application import Sphinx from sphinx.environment import BuildEnvironment # isort: split from docutils.nodes import literal, reference PROJECT_ROOT_DIR = Path(__file__).parents[1].resolve() IS_RELEASE_ON_RTD = ( os.getenv("READTHEDOCS", "False") == "True" and os.environ["READTHEDOCS_VERSION_TYPE"] == "tag" ) if IS_RELEASE_ON_RTD: tags: set[str] tags.add("is_release") # noqa: F821 _docs_path = Path(__file__).parent _version_path = _docs_path / ".." / "multidict" / "__init__.py" with _version_path.open(encoding="utf-8") as fp: _version_search_result = re.search( r'^__version__ = "' r"(?P\d+)" r"\.(?P\d+)" r"\.(?P\d+)" r'(?P.*)?"$', fp.read(), re.M, ) if _version_search_result is None: raise RuntimeError("Unable to determine version.") try: _version_info = _version_search_result.groupdict() except IndexError: raise RuntimeError("Unable to determine version.") # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ # stdlib-party extensions: "sphinx.ext.extlinks", "sphinx.ext.intersphinx", "sphinx.ext.viewcode", # Third-party extensions: "alabaster", "sphinxcontrib.towncrier.ext", # provides `towncrier-draft-entries` directive ] with suppress(ImportError): # spelling extension is optional, only add it when installed import sphinxcontrib.spelling # noqa extensions.append("sphinxcontrib.spelling") intersphinx_mapping = { "pyperf": ("https://pyperf.readthedocs.io/en/latest", None), "python": ("http://docs.python.org/3", None), "aiohttp": ("https://aiohttp.readthedocs.io/en/stable/", None), } # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix of source filenames. source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = "index" # General information about the project. github_url = "https://github.com" github_repo_org = "aio-libs" github_repo_name = "multidict" github_repo_slug = f"{github_repo_org}/{github_repo_name}" github_repo_url = f"{github_url}/{github_repo_slug}" github_sponsors_url = f"{github_url}/sponsors" project = github_repo_name copyright = "2016, Andrew Svetlov and aio-libs contributors" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = "{major}.{minor}".format(**_version_info) # The full version, including alpha/beta/rc tags. release = "{major}.{minor}.{patch}{tag}".format_map(_version_info) rst_epilog = f""" .. |project| replace:: {project} """ # pylint: disable=invalid-name # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. default_role = "any" # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # The default language to highlight source code in. highlight_language = "python" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # -- Extension configuration ------------------------------------------------- # -- Options for extlinks extension --------------------------------------- extlinks = { "issue": (f"{github_repo_url}/issues/%s", "#%s"), "pr": (f"{github_repo_url}/pull/%s", "PR #%s"), "commit": (f"{github_repo_url}/commit/%s", "%s"), "gh": (f"{github_url}/%s", "GitHub: %s"), "user": (f"{github_sponsors_url}/%s", "@%s"), } # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { # 'logo': 'aiohttp-icon-128x128.png', "description": project, "github_user": github_repo_org, "github_repo": github_repo_name, "github_button": True, "github_type": "star", "github_banner": True, "codecov_button": True, "pre_bg": "#FFF6E5", "note_bg": "#E5ECD1", "note_border": "#BFCF8C", "body_text": "#482C0A", "sidebar_text": "#49443E", "sidebar_header": "#4B4032", } # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [alabaster.get_path()] # type: ignore[no-untyped-call] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = 'aiohttp-icon.svg' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = 'aiohttp-icon.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { "**": [ "about.html", "navigation.html", "searchbox.html", ] } # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = "multidictdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements: dict[str, str] = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ("index", "multidict.tex", "multidict Documentation", "Andrew Svetlov", "manual"), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [("index", project, "multidict Documentation", ["Andrew Svetlov"], 1)] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( "index", project, "multidict Documentation", "Andrew Svetlov", project, "One line description of project.", "Miscellaneous", ), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # -- Strictness options -------------------------------------------------- nitpicky = True nitpick_ignore: list[str] = [] # -- Options for towncrier_draft extension ----------------------------------- towncrier_draft_autoversion_mode = "draft" # or: 'sphinx-version', 'sphinx-release' towncrier_draft_include_empty = True towncrier_draft_working_directory = PROJECT_ROOT_DIR def _replace_missing_aiohttp_hdrs_reference( app: Sphinx, env: BuildEnvironment, node: pending_xref, contnode: literal, ) -> "reference | None": if (node.get('refdomain'), node.get('reftype')) != ("py", "mod"): return None ref_target = node.get("reftarget", "") if ref_target != "aiohttp:aiohttp.hdrs": return None normalized_ref_target = "aiohttp:aiohttp.hdrs".split(":", 1)[-1] return reference( normalized_ref_target, normalized_ref_target, internal=False, refuri="https://github.com/aio-libs/aiohttp/blob/43f3e23/aiohttp/hdrs.py", ) def setup(app: Sphinx) -> dict[str, bool | str]: app.connect('missing-reference', _replace_missing_aiohttp_hdrs_reference) return { "version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True, } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/docs/index.rst0000644000175100001660000000565014766036355015736 0ustar00runnerdocker.. aiohttp documentation master file, created by sphinx-quickstart on Wed Mar 5 12:35:35 2014. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. multidict ========= Multidicts are useful for working with HTTP headers, URL query args etc. The code was extracted from aiohttp library. Introduction ------------ *HTTP Headers* and *URL query string* require specific data structure: *multidict*. It behaves mostly like a regular :class:`dict` but it may have several *values* for the same *key* and *preserves insertion ordering*. The *key* is :class:`str` (or :class:`~multidict.istr` for case-insensitive dictionaries). :mod:`multidict` has four multidict classes: :class:`~multidict.MultiDict`, :class:`~multidict.MultiDictProxy`, :class:`~multidict.CIMultiDict` and :class:`~multidict.CIMultiDictProxy`. Immutable proxies (:class:`~multidict.MultiDictProxy` and :class:`~multidict.CIMultiDictProxy`) provide a dynamic view for the proxied multidict, the view reflects underlying collection changes. They implement the :class:`~collections.abc.Mapping` interface. Regular mutable (:class:`~multidict.MultiDict` and :class:`~multidict.CIMultiDict`) classes implement :class:`~collections.abc.MutableMapping` and allows to change their own content. *Case insensitive* (:class:`~multidict.CIMultiDict` and :class:`~multidict.CIMultiDictProxy`) ones assume the *keys* are case insensitive, e.g.:: >>> dct = CIMultiDict(key='val') >>> 'Key' in dct True >>> dct['Key'] 'val' *Keys* should be either :class:`str` or :class:`~multidict.istr` instance. The library has optional C Extensions for sake of speed. Library Installation -------------------- .. code-block:: bash $ pip install multidict The library is Python 3 only! PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install ``multidict`` on another operation system (or *Alpine Linux* inside a Docker) the Tarball will be used to compile the library from sources. It requires C compiler and Python headers installed. To skip the compilation please use the :envvar:`MULTIDICT_NO_EXTENSIONS` environment variable, e.g.: .. code-block:: bash $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict Please note, Pure Python (uncompiled) version is about 20-50 times slower depending on the usage scenario!!! Source code ----------- The project is hosted on GitHub_ Please file an issue on the `bug tracker `_ if you have found a bug or have some suggestion in order to improve the library. Authors and License ------------------- The ``multidict`` package is written by Andrew Svetlov. It's *Apache 2* licensed and freely available. Contents -------- .. toctree:: multidict benchmark changes Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` .. _GitHub: https://github.com/aio-libs/multidict ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/docs/make.bat0000644000175100001660000001505714766036355015504 0ustar00runnerdocker@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\aiohttp.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\aiohttp.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/docs/multidict.rst0000644000175100001660000002710714766036355016626 0ustar00runnerdocker.. _multidict-reference: ============ Reference ============ .. module:: multidict MultiDict ========= .. class:: MultiDict(**kwargs) MultiDict(mapping, **kwargs) MultiDict(iterable, **kwargs) Creates a mutable multidict instance. Accepted parameters are the same as for :class:`dict`. If the same key appears several times it will be added, e.g.:: >>> d = MultiDict([('a', 1), ('b', 2), ('a', 3)]) >>> d .. method:: len(d) Return the number of items in multidict *d*. .. method:: d[key] Return the **first** item of *d* with key *key*. Raises a :exc:`KeyError` if key is not in the multidict. .. method:: d[key] = value Set ``d[key]`` to *value*. Replace all items where key is equal to *key* with single item ``(key, value)``. .. method:: del d[key] Remove all items where key is equal to *key* from *d*. Raises a :exc:`KeyError` if *key* is not in the map. .. method:: key in d Return ``True`` if d has a key *key*, else ``False``. .. method:: key not in d Equivalent to ``not (key in d)`` .. method:: iter(d) Return an iterator over the keys of the dictionary. This is a shortcut for ``iter(d.keys())``. .. method:: add(key, value) Append ``(key, value)`` pair to the dictionary. .. method:: clear() Remove all items from the dictionary. .. method:: copy() Return a shallow copy of the dictionary. .. method:: extend([other]) Extend the dictionary with the key/value pairs from *other*, appending the pairs to this dictionary. For existing keys, values are added. Returns ``None``. :meth:`extend` accepts either another dictionary object or an iterable of key/value pairs (as tuples or other iterables of length two). If keyword arguments are specified, the dictionary is then extended with those key/value pairs: ``d.extend(red=1, blue=2)``. Effectively the same as calling :meth:`add` for every ``(key, value)`` pair. Also see :meth:`update`, for a version that replaces existing keys. .. method:: getone(key[, default]) Return the **first** value for *key* if *key* is in the dictionary, else *default*. Raises :exc:`KeyError` if *default* is not given and *key* is not found. ``d[key]`` is equivalent to ``d.getone(key)``. .. method:: getall(key[, default]) Return a list of all values for *key* if *key* is in the dictionary, else *default*. Raises :exc:`KeyError` if *default* is not given and *key* is not found. .. method:: get(key[, default]) Return the **first** value for *key* if *key* is in the dictionary, else *default*. If *default* is not given, it defaults to ``None``, so that this method never raises a :exc:`KeyError`. ``d.get(key)`` is equivalent to ``d.getone(key, None)``. .. method:: keys() Return a new view of the dictionary's keys. View contains all keys, possibly with duplicates. .. method:: items() Return a new view of the dictionary's items (``(key, value)`` pairs). View contains all items, multiple items can have the same key. .. method:: values() Return a new view of the dictionary's values. View contains all values. .. method:: popone(key[, default]) If *key* is in the dictionary, remove it and return its the **first** value, else return *default*. If *default* is not given and *key* is not in the dictionary, a :exc:`KeyError` is raised. .. versionadded:: 3.0 .. method:: pop(key[, default]) An alias to :meth:`popone` .. versionchanged:: 3.0 Now only *first* occurrence is removed (was all). .. method:: popall(key[, default]) If *key* is in the dictionary, remove all occurrences and return a :class:`list` of all values in corresponding order (as :meth:`getall` does). If *key* is not found and *default* is provided return *default*. If *default* is not given and *key* is not in the dictionary, a :exc:`KeyError` is raised. .. versionadded:: 3.0 .. method:: popitem() Remove and return an arbitrary ``(key, value)`` pair from the dictionary. :meth:`popitem` is useful to destructively iterate over a dictionary, as often used in set algorithms. If the dictionary is empty, calling :meth:`popitem` raises a :exc:`KeyError`. .. method:: setdefault(key[, default]) If *key* is in the dictionary, return its the **first** value. If not, insert *key* with a value of *default* and return *default*. *default* defaults to ``None``. .. method:: update([other]) Update the dictionary with the key/value pairs from *other*, overwriting existing keys. Returns ``None``. :meth:`update` accepts either another dictionary object or an iterable of key/value pairs (as tuples or other iterables of length two). If keyword arguments are specified, the dictionary is then updated with those key/value pairs: ``d.update(red=1, blue=2)``. Also see :meth:`extend` for a method that adds to existing keys rather than update them. .. seealso:: :class:`MultiDictProxy` can be used to create a read-only view of a :class:`MultiDict`. CIMultiDict =========== .. class:: CIMultiDict(**kwargs) CIMultiDict(mapping, **kwargs) CIMultiDict(iterable, **kwargs) Create a case insensitive multidict instance. The behavior is the same as of :class:`MultiDict` but key comparisons are case insensitive, e.g.:: >>> dct = CIMultiDict(a='val') >>> 'A' in dct True >>> dct['A'] 'val' >>> dct['a'] 'val' >>> dct['b'] = 'new val' >>> dct['B'] 'new val' The class is inherited from :class:`MultiDict`. .. seealso:: :class:`CIMultiDictProxy` can be used to create a read-only view of a :class:`CIMultiDict`. MultiDictProxy ============== .. class:: MultiDictProxy(multidict) Create an immutable multidict proxy. It provides a dynamic view on the multidict’s entries, which means that when the multidict changes, the view reflects these changes. Raises :exc:`TypeError` if *multidict* is not a :class:`MultiDict` instance. .. method:: len(d) Return number of items in multidict *d*. .. method:: d[key] Return the **first** item of *d* with key *key*. Raises a :exc:`KeyError` if key is not in the multidict. .. method:: key in d Return ``True`` if d has a key *key*, else ``False``. .. method:: key not in d Equivalent to ``not (key in d)`` .. method:: iter(d) Return an iterator over the keys of the dictionary. This is a shortcut for ``iter(d.keys())``. .. method:: copy() Return a shallow copy of the underlying multidict. .. method:: getone(key[, default]) Return the **first** value for *key* if *key* is in the dictionary, else *default*. Raises :exc:`KeyError` if *default* is not given and *key* is not found. ``d[key]`` is equivalent to ``d.getone(key)``. .. method:: getall(key[, default]) Return a list of all values for *key* if *key* is in the dictionary, else *default*. Raises :exc:`KeyError` if *default* is not given and *key* is not found. .. method:: get(key[, default]) Return the **first** value for *key* if *key* is in the dictionary, else *default*. If *default* is not given, it defaults to ``None``, so that this method never raises a :exc:`KeyError`. ``d.get(key)`` is equivalent to ``d.getone(key, None)``. .. method:: keys() Return a new view of the dictionary's keys. View contains all keys, possibly with duplicates. .. method:: items() Return a new view of the dictionary's items (``(key, value)`` pairs). View contains all items, multiple items can have the same key. .. method:: values() Return a new view of the dictionary's values. View contains all values. CIMultiDictProxy ================ .. class:: CIMultiDictProxy(multidict) Case insensitive version of :class:`MultiDictProxy`. Raises :exc:`TypeError` if *multidict* is not :class:`CIMultiDict` instance. The class is inherited from :class:`MultiDict`. Version ======= All multidicts have an internal version flag. It's changed on every dict update, thus the flag could be used for checks like cache expiring etc. .. function:: getversion(mdict) Return a version of given *mdict* object (works for proxies also). The type of returned value is opaque and should be used for equality tests only (``==`` and ``!=``), ordering is not allowed while not prohibited explicitly. .. versionadded:: 3.0 .. seealso:: :pep:`509` istr ==== :class:`CIMultiDict` accepts :class:`str` as *key* argument for dict lookups but uses case-folded (lower-cased) strings for the comparison internally. For more effective processing it should know if the *key* is already case-folded to skip the :meth:`~str.lower()` call. The performant code may create case-folded string keys explicitly hand, e.g:: >>> key = istr('Key') >>> key 'Key' >>> mdict = CIMultiDict(key='value') >>> key in mdict True >>> mdict[key] 'value' For performance :class:`istr` strings should be created once and stored somewhere for the later usage, see :mod:`aiohttp:aiohttp.hdrs` for example. .. class:: istr(object='') istr(bytes_or_buffer[, encoding[, errors]]) Create a new **case-folded** string object from the given *object*. If *encoding* or *errors* are specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of ``object.__str__()`` (if defined) or ``repr(object)``. *encoding* defaults to ``sys.getdefaultencoding()``. *errors* defaults to ``'strict'``. The class is inherited from :class:`str` and has all regular string methods. .. versionchanged:: 2.0 ``upstr()`` is a deprecated alias for :class:`istr`. .. versionchanged:: 3.7 :class:`istr` doesn't title-case its argument anymore but uses internal lower-cased data for fast case-insensitive comparison. Abstract Base Classes ===================== The module provides two ABCs: ``MultiMapping`` and ``MutableMultiMapping``. They are similar to :class:`collections.abc.Mapping` and :class:`collections.abc.MutableMapping` and inherited from them. .. versionadded:: 3.3 Typing ====== The library is shipped with embedded type annotations, mypy just picks the annotations by default. :class:`MultiDict`, :class:`CIMultiDict`, :class:`MultiDictProxy`, and :class:`CIMultiDictProxy` are *generic* types; please use the corresponding notation for multidict value types, e.g. ``md: MultiDict[str] = MultiDict()``. The type of multidict keys is always :class:`str` or a class derived from a string. .. versionadded:: 3.7 Environment variables ===================== .. envvar:: MULTIDICT_NO_EXTENSIONS An environment variable that instructs the packaging scripts to skip compiling the C-extension based variant of :mod:`multidict`. When used in runtime, it instructs the pure-Python variant to be imported from the top-level :mod:`multidict` entry-point package, even when the C-extension implementation is available. .. caution:: The pure-Python (uncompiled) version is roughly 20-50 times slower than its C counterpart, depending on the way it's used. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/docs/spelling_wordlist.txt0000644000175100001660000000156314766036355020401 0ustar00runnerdockeraarch64 i686 ppc64le s390x x86_64 aiohttp args async autocalculated autodetection autogenerates autogeneration backend basename bugfixes cchardet cChardet changelog charset charsetdetect CPython criterias css ctor Ctrl cython Deprecations deallocation dev dict docstrings downstreams eof fallback fastpath filename gcc getitem github google gunicorn Gunicorn Indices inplace IP IPv ish istr iterable iterables javascript json keepalive keepalives keepaliving lockfile lookups manylinux middleware middlewares multidict multidicts Multidicts multipart Multipart mypy Nikolay param params performant pickable pre proxied pyenv pyinstaller pytest refactor refactored regex regexs repo runtime runtimes str subclassable subclassing subprotocol subprotocols Svetlov toolbar toolset tuples un uncompiled upstr url urlencoded urls utf websocket websockets Websockets wildcard Workflow wsgi ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742224624.1625628 multidict-6.2.0/multidict/0000755000175100001660000000000014766036360015131 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/__init__.py0000644000175100001660000000161614766036355017252 0ustar00runnerdocker"""Multidict implementation. HTTP Headers and URL query string require specific data structure: multidict. It behaves mostly like a dict but it can have several values for the same key. """ from typing import TYPE_CHECKING from ._abc import MultiMapping, MutableMultiMapping from ._compat import USE_EXTENSIONS __all__ = ( "MultiMapping", "MutableMultiMapping", "MultiDictProxy", "CIMultiDictProxy", "MultiDict", "CIMultiDict", "upstr", "istr", "getversion", ) __version__ = "6.2.0" if TYPE_CHECKING or not USE_EXTENSIONS: from ._multidict_py import ( CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, getversion, istr, ) else: from ._multidict import ( CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, getversion, istr, ) upstr = istr ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/_abc.py0000644000175100001660000000430414766036355016374 0ustar00runnerdockerimport abc from collections.abc import Iterable, Mapping, MutableMapping from typing import TYPE_CHECKING, Protocol, TypeVar, Union, overload if TYPE_CHECKING: from ._multidict_py import istr else: istr = str _V = TypeVar("_V") _V_co = TypeVar("_V_co", covariant=True) _T = TypeVar("_T") class SupportsKeys(Protocol[_V_co]): def keys(self) -> Iterable[str]: ... def __getitem__(self, key: str, /) -> _V_co: ... class SupportsIKeys(Protocol[_V_co]): def keys(self) -> Iterable[istr]: ... def __getitem__(self, key: istr, /) -> _V_co: ... MDArg = Union[SupportsKeys[_V], SupportsIKeys[_V], Iterable[tuple[str, _V]], None] class MultiMapping(Mapping[str, _V_co]): @overload def getall(self, key: str) -> list[_V_co]: ... @overload def getall(self, key: str, default: _T) -> Union[list[_V_co], _T]: ... @abc.abstractmethod def getall(self, key: str, default: _T = ...) -> Union[list[_V_co], _T]: """Return all values for key.""" @overload def getone(self, key: str) -> _V_co: ... @overload def getone(self, key: str, default: _T) -> Union[_V_co, _T]: ... @abc.abstractmethod def getone(self, key: str, default: _T = ...) -> Union[_V_co, _T]: """Return first value for key.""" class MutableMultiMapping(MultiMapping[_V], MutableMapping[str, _V]): @abc.abstractmethod def add(self, key: str, value: _V) -> None: """Add value to list.""" @abc.abstractmethod def extend(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None: """Add everything from arg and kwargs to the mapping.""" @overload def popone(self, key: str) -> _V: ... @overload def popone(self, key: str, default: _T) -> Union[_V, _T]: ... @abc.abstractmethod def popone(self, key: str, default: _T = ...) -> Union[_V, _T]: """Remove specified key and return the corresponding value.""" @overload def popall(self, key: str) -> list[_V]: ... @overload def popall(self, key: str, default: _T) -> Union[list[_V], _T]: ... @abc.abstractmethod def popall(self, key: str, default: _T = ...) -> Union[list[_V], _T]: """Remove all occurrences of key and return the list of corresponding values.""" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/_compat.py0000644000175100001660000000054014766036355017130 0ustar00runnerdockerimport os import platform NO_EXTENSIONS = bool(os.environ.get("MULTIDICT_NO_EXTENSIONS")) PYPY = platform.python_implementation() == "PyPy" USE_EXTENSIONS = not NO_EXTENSIONS and not PYPY if USE_EXTENSIONS: try: from . import _multidict # type: ignore[attr-defined] # noqa: F401 except ImportError: USE_EXTENSIONS = False ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/_multidict.c0000644000175100001660000014532514766036355017450 0ustar00runnerdocker#include "Python.h" #include "structmember.h" #include "_multilib/pythoncapi_compat.h" // Include order important #include "_multilib/defs.h" #include "_multilib/istr.h" #include "_multilib/pair_list.h" #include "_multilib/dict.h" #include "_multilib/iter.h" #include "_multilib/views.h" #if PY_MINOR_VERSION < 12 #ifndef _PyArg_UnpackKeywords #define FASTCALL_OLD #endif #endif static PyObject *collections_abc_mapping; static PyObject *collections_abc_mut_mapping; static PyObject *collections_abc_mut_multi_mapping; static PyObject *repr_func; static PyTypeObject multidict_type; static PyTypeObject cimultidict_type; static PyTypeObject multidict_proxy_type; static PyTypeObject cimultidict_proxy_type; #define MultiDict_CheckExact(o) (Py_TYPE(o) == &multidict_type) #define CIMultiDict_CheckExact(o) (Py_TYPE(o) == &cimultidict_type) #define MultiDictProxy_CheckExact(o) (Py_TYPE(o) == &multidict_proxy_type) #define CIMultiDictProxy_CheckExact(o) (Py_TYPE(o) == &cimultidict_proxy_type) /* Helper macro for something like isinstance(obj, Base) */ #define _MultiDict_Check(o) \ ((MultiDict_CheckExact(o)) || \ (CIMultiDict_CheckExact(o)) || \ (MultiDictProxy_CheckExact(o)) || \ (CIMultiDictProxy_CheckExact(o))) /******************** Internal Methods ********************/ /* Forward declaration */ static PyObject *multidict_items(MultiDictObject *self); static inline PyObject * _multidict_getone(MultiDictObject *self, PyObject *key, PyObject *_default) { PyObject *val = pair_list_get_one(&self->pairs, key); if (val == NULL && PyErr_ExceptionMatches(PyExc_KeyError) && _default != NULL) { PyErr_Clear(); Py_INCREF(_default); return _default; } return val; } static inline int _multidict_eq(MultiDictObject *self, MultiDictObject *other) { Py_ssize_t pos1 = 0, pos2 = 0; Py_hash_t h1 = 0, h2 = 0; PyObject *identity1 = NULL, *identity2 = NULL, *value1 = NULL, *value2 = NULL; int cmp_identity = 0, cmp_value = 0; if (self == other) { return 1; } if (pair_list_len(&self->pairs) != pair_list_len(&other->pairs)) { return 0; } while (_pair_list_next(&self->pairs, &pos1, &identity1, NULL, &value1, &h1) && _pair_list_next(&other->pairs, &pos2, &identity2, NULL, &value2, &h2)) { if (h1 != h2) { return 0; } cmp_identity = PyObject_RichCompareBool(identity1, identity2, Py_NE); if (cmp_identity < 0) { return -1; } cmp_value = PyObject_RichCompareBool(value1, value2, Py_NE); if (cmp_value < 0) { return -1; } if (cmp_identity || cmp_value) { return 0; } } return 1; } static inline int _multidict_update_items(MultiDictObject *self, pair_list_t *pairs) { return pair_list_update(&self->pairs, pairs); } static inline int _multidict_append_items(MultiDictObject *self, pair_list_t *pairs) { PyObject *key = NULL, *value = NULL; Py_ssize_t pos = 0; while (_pair_list_next(pairs, &pos, NULL, &key, &value, NULL)) { if (pair_list_add(&self->pairs, key, value) < 0) { return -1; } } return 0; } static inline int _multidict_append_items_seq(MultiDictObject *self, PyObject *arg, const char *name) { PyObject *key = NULL, *value = NULL, *item = NULL, *iter = PyObject_GetIter(arg); if (iter == NULL) { return -1; } while ((item = PyIter_Next(iter)) != NULL) { if (PyTuple_CheckExact(item)) { if (PyTuple_GET_SIZE(item) != 2) { goto invalid_type; } key = PyTuple_GET_ITEM(item, 0); Py_INCREF(key); value = PyTuple_GET_ITEM(item, 1); Py_INCREF(value); } else if (PyList_CheckExact(item)) { if (PyList_Size(item) != 2) { goto invalid_type; } key = PyList_GetItemRef(item, 0); if (key == NULL) { goto invalid_type; } value = PyList_GetItemRef(item, 1); if (value == NULL) { goto invalid_type; } } else if (PySequence_Check(item)) { if (PySequence_Size(item) != 2) { goto invalid_type; } key = PySequence_GetItem(item, 0); value = PySequence_GetItem(item, 1); } else { goto invalid_type; } if (pair_list_add(&self->pairs, key, value) < 0) { goto fail; } Py_CLEAR(key); Py_CLEAR(value); Py_CLEAR(item); } Py_DECREF(iter); if (PyErr_Occurred()) { return -1; } return 0; invalid_type: PyErr_Format( PyExc_TypeError, "%s takes either dict or list of (key, value) pairs", name, NULL ); goto fail; fail: Py_XDECREF(key); Py_XDECREF(value); Py_XDECREF(item); Py_DECREF(iter); return -1; } static inline int _multidict_list_extend(PyObject *list, PyObject *target_list) { PyObject *item = NULL, *iter = PyObject_GetIter(target_list); if (iter == NULL) { return -1; } while ((item = PyIter_Next(iter)) != NULL) { if (PyList_Append(list, item) < 0) { Py_DECREF(item); Py_DECREF(iter); return -1; } Py_DECREF(item); } Py_DECREF(iter); if (PyErr_Occurred()) { return -1; } return 0; } static inline int _multidict_extend_with_args(MultiDictObject *self, PyObject *arg, PyObject *kwds, const char *name, int do_add) { PyObject *arg_items = NULL, /* tracked by GC */ *kwds_items = NULL; /* new reference */ pair_list_t *pairs = NULL; int err = 0; if (kwds && !PyArg_ValidateKeywordArguments(kwds)) { return -1; } // TODO: mb can be refactored more clear if (_MultiDict_Check(arg) && kwds == NULL) { if (MultiDict_CheckExact(arg) || CIMultiDict_CheckExact(arg)) { pairs = &((MultiDictObject*)arg)->pairs; } else if (MultiDictProxy_CheckExact(arg) || CIMultiDictProxy_CheckExact(arg)) { pairs = &((MultiDictProxyObject*)arg)->md->pairs; } if (do_add) { return _multidict_append_items(self, pairs); } return _multidict_update_items(self, pairs); } if (PyObject_HasAttrString(arg, "items")) { if (_MultiDict_Check(arg)) { arg_items = multidict_items((MultiDictObject*)arg); } else { arg_items = PyMapping_Items(arg); } if (arg_items == NULL) { return -1; } } else { arg_items = arg; Py_INCREF(arg_items); } if (kwds) { PyObject *tmp = PySequence_List(arg_items); Py_DECREF(arg_items); arg_items = tmp; if (arg_items == NULL) { return -1; } kwds_items = PyDict_Items(kwds); if (kwds_items == NULL) { Py_DECREF(arg_items); return -1; } err = _multidict_list_extend(arg_items, kwds_items); Py_DECREF(kwds_items); if (err < 0) { Py_DECREF(arg_items); return -1; } } if (do_add) { err = _multidict_append_items_seq(self, arg_items, name); } else { err = pair_list_update_from_seq(&self->pairs, arg_items); } Py_DECREF(arg_items); return err; } static inline int _multidict_extend_with_kwds(MultiDictObject *self, PyObject *kwds, const char *name, int do_add) { PyObject *arg = NULL; int err = 0; if (!PyArg_ValidateKeywordArguments(kwds)) { return -1; } arg = PyDict_Items(kwds); if (do_add) { err = _multidict_append_items_seq(self, arg, name); } else { err = pair_list_update_from_seq(&self->pairs, arg); } Py_DECREF(arg); return err; } static inline int _multidict_extend(MultiDictObject *self, PyObject *args, PyObject *kwds, const char *name, int do_add) { PyObject *arg = NULL; if (args && PyObject_Length(args) > 1) { PyErr_Format( PyExc_TypeError, "%s takes from 1 to 2 positional arguments but %zd were given", name, PyObject_Length(args) + 1, NULL ); return -1; } if (args && PyObject_Length(args) > 0) { if (!PyArg_UnpackTuple(args, name, 0, 1, &arg)) { return -1; } if (_multidict_extend_with_args(self, arg, kwds, name, do_add) < 0) { return -1; } } else if (kwds && PyObject_Length(kwds) > 0) { if (_multidict_extend_with_kwds(self, kwds, name, do_add) < 0) { return -1; } } return 0; } static inline PyObject * _multidict_copy(MultiDictObject *self, PyTypeObject *multidict_tp_object) { MultiDictObject *new_multidict = NULL; PyObject *arg_items = NULL, *items = NULL; new_multidict = (MultiDictObject*)PyType_GenericNew( multidict_tp_object, NULL, NULL); if (new_multidict == NULL) { return NULL; } if (multidict_tp_object->tp_init( (PyObject*)new_multidict, NULL, NULL) < 0) { return NULL; } items = multidict_items(self); if (items == NULL) { goto fail; } // TODO: "Implementation looks as slow as possible ..." arg_items = PyTuple_New(1); if (arg_items == NULL) { goto fail; } Py_INCREF(items); PyTuple_SET_ITEM(arg_items, 0, items); if (_multidict_extend( new_multidict, arg_items, NULL, "copy", 1) < 0) { goto fail; } Py_DECREF(items); Py_DECREF(arg_items); return (PyObject*)new_multidict; fail: Py_XDECREF(items); Py_XDECREF(arg_items); Py_DECREF(new_multidict); return NULL; } static inline PyObject * _multidict_proxy_copy(MultiDictProxyObject *self, PyTypeObject *type) { PyObject *new_multidict = PyType_GenericNew(type, NULL, NULL); if (new_multidict == NULL) { goto fail; } if (type->tp_init(new_multidict, NULL, NULL) < 0) { goto fail; } if (_multidict_extend_with_args( (MultiDictObject*)new_multidict, (PyObject*)self, NULL, "copy", 1) < 0) { goto fail; } return new_multidict; fail: Py_XDECREF(new_multidict); return NULL; } /******************** Base Methods ********************/ static inline PyObject * multidict_getall( MultiDictObject *self, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 PyObject *args, PyObject *kwds #else PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames #endif ) { PyObject *list = NULL, *key = NULL, *_default = NULL; #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 static char *getall_keywords[] = {"key", "default", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:getall", getall_keywords, &key, &_default)) { return NULL; } #else static const char * const _keywords[] = {"key", "default", NULL}; #ifdef FASTCALL_OLD static _PyArg_Parser _parser = {"O|O:getall", _keywords, 0}; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, &key, &_default)) { return NULL; } #else static _PyArg_Parser _parser = {NULL, _keywords, "getall", 0}; PyObject *argsbuf[2]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); if (!args) { return NULL; } key = args[0]; if (!noptargs) { goto skip_optional_pos; } _default = args[1]; skip_optional_pos: #endif #endif list = pair_list_get_all(&self->pairs, key); if (list == NULL && PyErr_ExceptionMatches(PyExc_KeyError) && _default != NULL) { PyErr_Clear(); Py_INCREF(_default); return _default; } return list; } static inline PyObject * multidict_getone( MultiDictObject *self, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 PyObject *args, PyObject *kwds #else PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames #endif ) { PyObject *key = NULL, *_default = NULL; #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 static char *getone_keywords[] = {"key", "default", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:getone", getone_keywords, &key, &_default)) { return NULL; } #else static const char * const _keywords[] = {"key", "default", NULL}; #ifdef FASTCALL_OLD static _PyArg_Parser _parser = {"O|O:getone", _keywords, 0}; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, &key, &_default)) { return NULL; } #else static _PyArg_Parser _parser = {NULL, _keywords, "getone", 0}; PyObject *argsbuf[2]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); if (!args) { return NULL; } key = args[0]; if (!noptargs) { goto skip_optional_pos; } _default = args[1]; skip_optional_pos: #endif #endif return _multidict_getone(self, key, _default); } static inline PyObject * multidict_get( MultiDictObject *self, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 PyObject *args, PyObject *kwds #else PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames #endif ) { PyObject *key = NULL, *_default = Py_None, *ret; #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 static char *getone_keywords[] = {"key", "default", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:getone", getone_keywords, &key, &_default)) { return NULL; } #else static const char * const _keywords[] = {"key", "default", NULL}; #ifdef FASTCALL_OLD static _PyArg_Parser _parser = {"O|O:get", _keywords, 0}; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, &key, &_default)) { return NULL; } #else static _PyArg_Parser _parser = {NULL, _keywords, "get", 0}; PyObject *argsbuf[2]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); if (!args) { return NULL; } key = args[0]; if (!noptargs) { goto skip_optional_pos; } _default = args[1]; skip_optional_pos: #endif #endif ret = _multidict_getone(self, key, _default); return ret; } static inline PyObject * multidict_keys(MultiDictObject *self) { return multidict_keysview_new((PyObject*)self); } static inline PyObject * multidict_items(MultiDictObject *self) { return multidict_itemsview_new((PyObject*)self); } static inline PyObject * multidict_values(MultiDictObject *self) { return multidict_valuesview_new((PyObject*)self); } static inline PyObject * multidict_reduce(MultiDictObject *self) { PyObject *items = NULL, *items_list = NULL, *args = NULL, *result = NULL; items = multidict_items(self); if (items == NULL) { goto ret; } items_list = PySequence_List(items); if (items_list == NULL) { goto ret; } args = PyTuple_Pack(1, items_list); if (args == NULL) { goto ret; } result = PyTuple_Pack(2, Py_TYPE(self), args); ret: Py_XDECREF(args); Py_XDECREF(items_list); Py_XDECREF(items); return result; } static inline PyObject * multidict_repr(PyObject *self) { return PyObject_CallFunctionObjArgs( repr_func, self, NULL); } static inline Py_ssize_t multidict_mp_len(MultiDictObject *self) { return pair_list_len(&self->pairs); } static inline PyObject * multidict_mp_subscript(MultiDictObject *self, PyObject *key) { return _multidict_getone(self, key, NULL); } static inline int multidict_mp_as_subscript(MultiDictObject *self, PyObject *key, PyObject *val) { if (val == NULL) { return pair_list_del(&self->pairs, key); } else { return pair_list_replace(&self->pairs, key, val); } } static inline int multidict_sq_contains(MultiDictObject *self, PyObject *key) { return pair_list_contains(&self->pairs, key); } static inline PyObject * multidict_tp_iter(MultiDictObject *self) { return multidict_keys_iter_new(self); } static inline PyObject * multidict_tp_richcompare(PyObject *self, PyObject *other, int op) { // TODO: refactoring me with love int cmp = 0; if (op != Py_EQ && op != Py_NE) { Py_RETURN_NOTIMPLEMENTED; } if (MultiDict_CheckExact(other) || CIMultiDict_CheckExact(other)) { cmp = _multidict_eq( (MultiDictObject*)self, (MultiDictObject*)other ); if (cmp < 0) { return NULL; } if (op == Py_NE) { cmp = !cmp; } return PyBool_FromLong(cmp); } if (MultiDictProxy_CheckExact(other) || CIMultiDictProxy_CheckExact(other)) { cmp = _multidict_eq( (MultiDictObject*)self, ((MultiDictProxyObject*)other)->md ); if (cmp < 0) { return NULL; } if (op == Py_NE) { cmp = !cmp; } return PyBool_FromLong(cmp); } cmp = PyObject_IsInstance(other, (PyObject*)collections_abc_mapping); if (cmp < 0) { return NULL; } if (cmp) { cmp = pair_list_eq_to_mapping(&((MultiDictObject*)self)->pairs, other); if (cmp < 0) { return NULL; } if (op == Py_NE) { cmp = !cmp; } return PyBool_FromLong(cmp); } Py_RETURN_NOTIMPLEMENTED; } static inline void multidict_tp_dealloc(MultiDictObject *self) { PyObject_GC_UnTrack(self); Py_TRASHCAN_BEGIN(self, multidict_tp_dealloc) if (self->weaklist != NULL) { PyObject_ClearWeakRefs((PyObject *)self); }; pair_list_dealloc(&self->pairs); Py_TYPE(self)->tp_free((PyObject *)self); Py_TRASHCAN_END // there should be no code after this } static inline int multidict_tp_traverse(MultiDictObject *self, visitproc visit, void *arg) { return pair_list_traverse(&self->pairs, visit, arg); } static inline int multidict_tp_clear(MultiDictObject *self) { return pair_list_clear(&self->pairs); } PyDoc_STRVAR(multidict_getall_doc, "Return a list of all values matching the key."); PyDoc_STRVAR(multidict_getone_doc, "Get first value matching the key."); PyDoc_STRVAR(multidict_get_doc, "Get first value matching the key.\n\nThe method is alias for .getone()."); PyDoc_STRVAR(multidict_keys_doc, "Return a new view of the dictionary's keys."); PyDoc_STRVAR(multidict_items_doc, "Return a new view of the dictionary's items *(key, value) pairs)."); PyDoc_STRVAR(multidict_values_doc, "Return a new view of the dictionary's values."); /******************** MultiDict ********************/ static inline int multidict_tp_init(MultiDictObject *self, PyObject *args, PyObject *kwds) { if (pair_list_init(&self->pairs) < 0) { return -1; } if (_multidict_extend(self, args, kwds, "MultiDict", 1) < 0) { return -1; } return 0; } static inline PyObject * multidict_add( MultiDictObject *self, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 PyObject *args, PyObject *kwds #else PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames #endif ) { PyObject *key = NULL, *val = NULL; #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 static char *kwlist[] = {"key", "value", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO:add", kwlist, &key, &val)) { return NULL; } #else static const char * const _keywords[] = {"key", "value", NULL}; #ifdef FASTCALL_OLD static _PyArg_Parser _parser = {"OO:add", _keywords, 0}; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, &key, &val)) { return NULL; } #else static _PyArg_Parser _parser = { .keywords = _keywords, .fname = "add", .kwtuple = NULL, }; PyObject *argsbuf[2]; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); if (!args) { return NULL; } key = args[0]; val = args[1]; #endif #endif if (pair_list_add(&self->pairs, key, val) < 0) { return NULL; } Py_RETURN_NONE; } static inline PyObject * multidict_copy(MultiDictObject *self) { return _multidict_copy(self, &multidict_type); } static inline PyObject * multidict_extend(MultiDictObject *self, PyObject *args, PyObject *kwds) { if (_multidict_extend(self, args, kwds, "extend", 1) < 0) { return NULL; } Py_RETURN_NONE; } static inline PyObject * multidict_clear(MultiDictObject *self) { if (pair_list_clear(&self->pairs) < 0) { return NULL; } Py_RETURN_NONE; } static inline PyObject * multidict_setdefault( MultiDictObject *self, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 PyObject *args, PyObject *kwds #else PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames #endif ) { PyObject *key = NULL, *_default = NULL; #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 static char *setdefault_keywords[] = {"key", "default", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:setdefault", setdefault_keywords, &key, &_default)) { return NULL; } #else static const char * const _keywords[] = {"key", "default", NULL}; #ifdef FASTCALL_OLD static _PyArg_Parser _parser = {"O|O:setdefault", _keywords, 0}; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, &key, &_default)) { return NULL; } #else static _PyArg_Parser _parser = {NULL, _keywords, "setdefault", 0}; PyObject *argsbuf[3]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); if (!args) { return NULL; } key = args[0]; if (!noptargs) { goto skip_optional_pos; } _default = args[1]; skip_optional_pos: #endif #endif return pair_list_set_default(&self->pairs, key, _default); } static inline PyObject * multidict_popone( MultiDictObject *self, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 PyObject *args, PyObject *kwds #else PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames #endif ) { PyObject *key = NULL, *_default = NULL, *ret_val = NULL; #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 static char *popone_keywords[] = {"key", "default", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:popone", popone_keywords, &key, &_default)) { return NULL; } ret_val = pair_list_pop_one(&self->pairs, key); if (ret_val == NULL && PyErr_ExceptionMatches(PyExc_KeyError) && _default != NULL) { PyErr_Clear(); Py_INCREF(_default); return _default; } return ret_val; #else static const char * const _keywords[] = {"key", "default", NULL}; #ifdef FASTCALL_OLD static _PyArg_Parser _parser = {"O|O:popone", _keywords, 0}; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, &key, &_default)) { return NULL; } #else static _PyArg_Parser _parser = {NULL, _keywords, "popone", 0}; PyObject *argsbuf[3]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); if (!args) { return NULL; } key = args[0]; if (!noptargs) { goto skip_optional_pos; } _default = args[1]; skip_optional_pos: #endif ret_val = pair_list_pop_one(&self->pairs, key); if (ret_val == NULL && PyErr_ExceptionMatches(PyExc_KeyError) && _default != NULL) { PyErr_Clear(); Py_INCREF(_default); return _default; } return ret_val; #endif } static inline PyObject * multidict_pop( MultiDictObject *self, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 PyObject *args, PyObject *kwds #else PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames #endif ) { PyObject *key = NULL, *_default = NULL, *ret_val = NULL; #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 static char *pop_keywords[] = {"key", "default", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:popone", pop_keywords, &key, &_default)) { return NULL; } #else static const char * const _keywords[] = {"key", "default", NULL}; #ifdef FASTCALL_OLD static _PyArg_Parser _parser = {"O|O:pop", _keywords, 0}; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, &key, &_default)) { return NULL; } #else static _PyArg_Parser _parser = {NULL, _keywords, "pop", 0}; PyObject *argsbuf[3]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); if (!args) { return NULL; } key = args[0]; if (!noptargs) { goto skip_optional_pos; } _default = args[1]; skip_optional_pos: #endif #endif ret_val = pair_list_pop_one(&self->pairs, key); if (ret_val == NULL && PyErr_ExceptionMatches(PyExc_KeyError) && _default != NULL) { PyErr_Clear(); Py_INCREF(_default); return _default; } return ret_val; } static inline PyObject * multidict_popall( MultiDictObject *self, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 PyObject *args, PyObject *kwds #else PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames #endif ) { PyObject *key = NULL, *_default = NULL, *ret_val = NULL; #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 static char *popall_keywords[] = {"key", "default", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:popall", popall_keywords, &key, &_default)) { return NULL; } #else static const char * const _keywords[] = {"key", "default", NULL}; #ifdef FASTCALL_OLD static _PyArg_Parser _parser = {"O|O:popall", _keywords, 0}; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, &key, &_default)) { return NULL; } #else static _PyArg_Parser _parser = {NULL, _keywords, "popall", 0}; PyObject *argsbuf[3]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); if (!args) { return NULL; } key = args[0]; if (!noptargs) { goto skip_optional_pos; } _default = args[1]; skip_optional_pos: #endif #endif ret_val = pair_list_pop_all(&self->pairs, key); if (ret_val == NULL && PyErr_ExceptionMatches(PyExc_KeyError) && _default != NULL) { PyErr_Clear(); Py_INCREF(_default); return _default; } return ret_val; } static inline PyObject * multidict_popitem(MultiDictObject *self) { return pair_list_pop_item(&self->pairs); } static inline PyObject * multidict_update(MultiDictObject *self, PyObject *args, PyObject *kwds) { if (_multidict_extend(self, args, kwds, "update", 0) < 0) { return NULL; } Py_RETURN_NONE; } PyDoc_STRVAR(multidict_add_doc, "Add the key and value, not overwriting any previous value."); PyDoc_STRVAR(multidict_copy_doc, "Return a copy of itself."); PyDoc_STRVAR(multdicit_method_extend_doc, "Extend current MultiDict with more values.\n\ This method must be used instead of update."); PyDoc_STRVAR(multidict_clear_doc, "Remove all items from MultiDict"); PyDoc_STRVAR(multidict_setdefault_doc, "Return value for key, set value to default if key is not present."); PyDoc_STRVAR(multidict_popone_doc, "Remove the last occurrence of key and return the corresponding value.\n\n\ If key is not found, default is returned if given, otherwise KeyError is \ raised.\n"); PyDoc_STRVAR(multidict_pop_doc, "Remove the last occurrence of key and return the corresponding value.\n\n\ If key is not found, default is returned if given, otherwise KeyError is \ raised.\n"); PyDoc_STRVAR(multidict_popall_doc, "Remove all occurrences of key and return the list of corresponding values.\n\n\ If key is not found, default is returned if given, otherwise KeyError is \ raised.\n"); PyDoc_STRVAR(multidict_popitem_doc, "Remove and return an arbitrary (key, value) pair."); PyDoc_STRVAR(multidict_update_doc, "Update the dictionary from *other*, overwriting existing keys."); #define multidict_class_getitem Py_GenericAlias PyDoc_STRVAR(sizeof__doc__, "D.__sizeof__() -> size of D in memory, in bytes"); static inline PyObject * _multidict_sizeof(MultiDictObject *self) { Py_ssize_t size = sizeof(MultiDictObject); if (self->pairs.pairs != self->pairs.buffer) { size += (Py_ssize_t)sizeof(pair_t) * self->pairs.capacity; } return PyLong_FromSsize_t(size); } static PySequenceMethods multidict_sequence = { .sq_contains = (objobjproc)multidict_sq_contains, }; static PyMappingMethods multidict_mapping = { .mp_length = (lenfunc)multidict_mp_len, .mp_subscript = (binaryfunc)multidict_mp_subscript, .mp_ass_subscript = (objobjargproc)multidict_mp_as_subscript, }; static PyMethodDef multidict_methods[] = { { "getall", (PyCFunction)multidict_getall, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 METH_VARARGS #else METH_FASTCALL #endif | METH_KEYWORDS, multidict_getall_doc }, { "getone", (PyCFunction)multidict_getone, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 METH_VARARGS #else METH_FASTCALL #endif | METH_KEYWORDS, multidict_getone_doc }, { "get", (PyCFunction)multidict_get, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 METH_VARARGS #else METH_FASTCALL #endif | METH_KEYWORDS, multidict_get_doc }, { "keys", (PyCFunction)multidict_keys, METH_NOARGS, multidict_keys_doc }, { "items", (PyCFunction)multidict_items, METH_NOARGS, multidict_items_doc }, { "values", (PyCFunction)multidict_values, METH_NOARGS, multidict_values_doc }, { "add", (PyCFunction)multidict_add, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 METH_VARARGS #else METH_FASTCALL #endif | METH_KEYWORDS, multidict_add_doc }, { "copy", (PyCFunction)multidict_copy, METH_NOARGS, multidict_copy_doc }, { "extend", (PyCFunction)multidict_extend, METH_VARARGS | METH_KEYWORDS, multdicit_method_extend_doc }, { "clear", (PyCFunction)multidict_clear, METH_NOARGS, multidict_clear_doc }, { "setdefault", (PyCFunction)multidict_setdefault, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 METH_VARARGS #else METH_FASTCALL #endif | METH_KEYWORDS, multidict_setdefault_doc }, { "popone", (PyCFunction)multidict_popone, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 METH_VARARGS #else METH_FASTCALL #endif | METH_KEYWORDS, multidict_popone_doc }, { "pop", (PyCFunction)multidict_pop, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 METH_VARARGS #else METH_FASTCALL #endif | METH_KEYWORDS, multidict_pop_doc }, { "popall", (PyCFunction)multidict_popall, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 METH_VARARGS #else METH_FASTCALL #endif | METH_KEYWORDS, multidict_popall_doc }, { "popitem", (PyCFunction)multidict_popitem, METH_NOARGS, multidict_popitem_doc }, { "update", (PyCFunction)multidict_update, METH_VARARGS | METH_KEYWORDS, multidict_update_doc }, { "__reduce__", (PyCFunction)multidict_reduce, METH_NOARGS, NULL, }, { "__class_getitem__", (PyCFunction)multidict_class_getitem, METH_O | METH_CLASS, NULL }, { "__sizeof__", (PyCFunction)_multidict_sizeof, METH_NOARGS, sizeof__doc__, }, { NULL, NULL } /* sentinel */ }; PyDoc_STRVAR(MultDict_doc, "Dictionary with the support for duplicate keys."); static PyTypeObject multidict_type = { PyVarObject_HEAD_INIT(NULL, 0) "multidict._multidict.MultiDict", /* tp_name */ sizeof(MultiDictObject), /* tp_basicsize */ .tp_dealloc = (destructor)multidict_tp_dealloc, .tp_repr = (reprfunc)multidict_repr, .tp_as_sequence = &multidict_sequence, .tp_as_mapping = &multidict_mapping, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, .tp_doc = MultDict_doc, .tp_traverse = (traverseproc)multidict_tp_traverse, .tp_clear = (inquiry)multidict_tp_clear, .tp_richcompare = (richcmpfunc)multidict_tp_richcompare, .tp_weaklistoffset = offsetof(MultiDictObject, weaklist), .tp_iter = (getiterfunc)multidict_tp_iter, .tp_methods = multidict_methods, .tp_init = (initproc)multidict_tp_init, .tp_alloc = PyType_GenericAlloc, .tp_new = PyType_GenericNew, .tp_free = PyObject_GC_Del, }; /******************** CIMultiDict ********************/ static inline int cimultidict_tp_init(MultiDictObject *self, PyObject *args, PyObject *kwds) { if (ci_pair_list_init(&self->pairs) < 0) { return -1; } if (_multidict_extend(self, args, kwds, "CIMultiDict", 1) < 0) { return -1; } return 0; } static inline PyObject * cimultidict_copy(MultiDictObject *self) { return _multidict_copy(self, &cimultidict_type); } PyDoc_STRVAR(cimultidict_copy_doc, "Return a copy of itself."); static PyMethodDef cimultidict_methods[] = { { "copy", (PyCFunction)cimultidict_copy, METH_NOARGS, cimultidict_copy_doc }, { NULL, NULL } /* sentinel */ }; PyDoc_STRVAR(CIMultDict_doc, "Dictionary with the support for duplicate case-insensitive keys."); static PyTypeObject cimultidict_type = { PyVarObject_HEAD_INIT(NULL, 0) "multidict._multidict.CIMultiDict", /* tp_name */ sizeof(MultiDictObject), /* tp_basicsize */ .tp_dealloc = (destructor)multidict_tp_dealloc, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, .tp_doc = CIMultDict_doc, .tp_traverse = (traverseproc)multidict_tp_traverse, .tp_clear = (inquiry)multidict_tp_clear, .tp_weaklistoffset = offsetof(MultiDictObject, weaklist), .tp_methods = cimultidict_methods, .tp_base = &multidict_type, .tp_init = (initproc)cimultidict_tp_init, .tp_alloc = PyType_GenericAlloc, .tp_new = PyType_GenericNew, .tp_free = PyObject_GC_Del, }; /******************** MultiDictProxy ********************/ static inline int multidict_proxy_tp_init(MultiDictProxyObject *self, PyObject *args, PyObject *kwds) { PyObject *arg = NULL; MultiDictObject *md = NULL; if (!PyArg_UnpackTuple(args, "multidict._multidict.MultiDictProxy", 0, 1, &arg)) { return -1; } if (arg == NULL) { PyErr_Format( PyExc_TypeError, "__init__() missing 1 required positional argument: 'arg'" ); return -1; } if (!MultiDictProxy_CheckExact(arg) && !CIMultiDict_CheckExact(arg) && !MultiDict_CheckExact(arg)) { PyErr_Format( PyExc_TypeError, "ctor requires MultiDict or MultiDictProxy instance, " "not ", Py_TYPE(arg)->tp_name ); return -1; } md = (MultiDictObject*)arg; if (MultiDictProxy_CheckExact(arg)) { md = ((MultiDictProxyObject*)arg)->md; } Py_INCREF(md); self->md = md; return 0; } static inline PyObject * multidict_proxy_getall( MultiDictProxyObject *self, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 PyObject *args, PyObject *kwds #else PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames #endif ) { return multidict_getall( self->md, args, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 kwds #else nargs, kwnames #endif ); } static inline PyObject * multidict_proxy_getone( MultiDictProxyObject *self, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 PyObject *args, PyObject *kwds #else PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames #endif ) { return multidict_getone( self->md, args, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 kwds #else nargs, kwnames #endif ); } static inline PyObject * multidict_proxy_get( MultiDictProxyObject *self, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 PyObject *args, PyObject *kwds #else PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames #endif ) { return multidict_get( self->md, args, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 kwds #else nargs, kwnames #endif ); } static inline PyObject * multidict_proxy_keys(MultiDictProxyObject *self) { return multidict_keys(self->md); } static inline PyObject * multidict_proxy_items(MultiDictProxyObject *self) { return multidict_items(self->md); } static inline PyObject * multidict_proxy_values(MultiDictProxyObject *self) { return multidict_values(self->md); } static inline PyObject * multidict_proxy_copy(MultiDictProxyObject *self) { return _multidict_proxy_copy(self, &multidict_type); } static inline PyObject * multidict_proxy_reduce(MultiDictProxyObject *self) { PyErr_Format( PyExc_TypeError, "can't pickle %s objects", Py_TYPE(self)->tp_name ); return NULL; } static inline Py_ssize_t multidict_proxy_mp_len(MultiDictProxyObject *self) { return multidict_mp_len(self->md); } static inline PyObject * multidict_proxy_mp_subscript(MultiDictProxyObject *self, PyObject *key) { return multidict_mp_subscript(self->md, key); } static inline int multidict_proxy_sq_contains(MultiDictProxyObject *self, PyObject *key) { return multidict_sq_contains(self->md, key); } static inline PyObject * multidict_proxy_tp_iter(MultiDictProxyObject *self) { return multidict_tp_iter(self->md); } static inline PyObject * multidict_proxy_tp_richcompare(MultiDictProxyObject *self, PyObject *other, int op) { return multidict_tp_richcompare((PyObject*)self->md, other, op); } static inline void multidict_proxy_tp_dealloc(MultiDictProxyObject *self) { PyObject_GC_UnTrack(self); if (self->weaklist != NULL) { PyObject_ClearWeakRefs((PyObject *)self); }; Py_XDECREF(self->md); Py_TYPE(self)->tp_free((PyObject *)self); } static inline int multidict_proxy_tp_traverse(MultiDictProxyObject *self, visitproc visit, void *arg) { Py_VISIT(self->md); return 0; } static inline int multidict_proxy_tp_clear(MultiDictProxyObject *self) { Py_CLEAR(self->md); return 0; } static PySequenceMethods multidict_proxy_sequence = { .sq_contains = (objobjproc)multidict_proxy_sq_contains, }; static PyMappingMethods multidict_proxy_mapping = { .mp_length = (lenfunc)multidict_proxy_mp_len, .mp_subscript = (binaryfunc)multidict_proxy_mp_subscript, }; static PyMethodDef multidict_proxy_methods[] = { { "getall", (PyCFunction)multidict_proxy_getall, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 METH_VARARGS #else METH_FASTCALL #endif | METH_KEYWORDS, multidict_getall_doc }, { "getone", (PyCFunction)multidict_proxy_getone, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 METH_VARARGS #else METH_FASTCALL #endif | METH_KEYWORDS, multidict_getone_doc }, { "get", (PyCFunction)multidict_proxy_get, #if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 12 METH_VARARGS #else METH_FASTCALL #endif | METH_KEYWORDS, multidict_get_doc }, { "keys", (PyCFunction)multidict_proxy_keys, METH_NOARGS, multidict_keys_doc }, { "items", (PyCFunction)multidict_proxy_items, METH_NOARGS, multidict_items_doc }, { "values", (PyCFunction)multidict_proxy_values, METH_NOARGS, multidict_values_doc }, { "copy", (PyCFunction)multidict_proxy_copy, METH_NOARGS, multidict_copy_doc }, { "__reduce__", (PyCFunction)multidict_proxy_reduce, METH_NOARGS, NULL }, { "__class_getitem__", (PyCFunction)multidict_class_getitem, METH_O | METH_CLASS, NULL }, { NULL, NULL } /* sentinel */ }; PyDoc_STRVAR(MultDictProxy_doc, "Read-only proxy for MultiDict instance."); static PyTypeObject multidict_proxy_type = { PyVarObject_HEAD_INIT(NULL, 0) "multidict._multidict.MultiDictProxy", /* tp_name */ sizeof(MultiDictProxyObject), /* tp_basicsize */ .tp_dealloc = (destructor)multidict_proxy_tp_dealloc, .tp_repr = (reprfunc)multidict_repr, .tp_as_sequence = &multidict_proxy_sequence, .tp_as_mapping = &multidict_proxy_mapping, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, .tp_doc = MultDictProxy_doc, .tp_traverse = (traverseproc)multidict_proxy_tp_traverse, .tp_clear = (inquiry)multidict_proxy_tp_clear, .tp_richcompare = (richcmpfunc)multidict_proxy_tp_richcompare, .tp_weaklistoffset = offsetof(MultiDictProxyObject, weaklist), .tp_iter = (getiterfunc)multidict_proxy_tp_iter, .tp_methods = multidict_proxy_methods, .tp_init = (initproc)multidict_proxy_tp_init, .tp_alloc = PyType_GenericAlloc, .tp_new = PyType_GenericNew, .tp_free = PyObject_GC_Del, }; /******************** CIMultiDictProxy ********************/ static inline int cimultidict_proxy_tp_init(MultiDictProxyObject *self, PyObject *args, PyObject *kwds) { PyObject *arg = NULL; MultiDictObject *md = NULL; if (!PyArg_UnpackTuple(args, "multidict._multidict.CIMultiDictProxy", 1, 1, &arg)) { return -1; } if (arg == NULL) { PyErr_Format( PyExc_TypeError, "__init__() missing 1 required positional argument: 'arg'" ); return -1; } if (!CIMultiDictProxy_CheckExact(arg) && !CIMultiDict_CheckExact(arg)) { PyErr_Format( PyExc_TypeError, "ctor requires CIMultiDict or CIMultiDictProxy instance, " "not ", Py_TYPE(arg)->tp_name ); return -1; } md = (MultiDictObject*)arg; if (CIMultiDictProxy_CheckExact(arg)) { md = ((MultiDictProxyObject*)arg)->md; } Py_INCREF(md); self->md = md; return 0; } static inline PyObject * cimultidict_proxy_copy(MultiDictProxyObject *self) { return _multidict_proxy_copy(self, &cimultidict_type); } PyDoc_STRVAR(CIMultDictProxy_doc, "Read-only proxy for CIMultiDict instance."); PyDoc_STRVAR(cimultidict_proxy_copy_doc, "Return copy of itself"); static PyMethodDef cimultidict_proxy_methods[] = { { "copy", (PyCFunction)cimultidict_proxy_copy, METH_NOARGS, cimultidict_proxy_copy_doc }, { NULL, NULL } /* sentinel */ }; static PyTypeObject cimultidict_proxy_type = { PyVarObject_HEAD_INIT(NULL, 0) "multidict._multidict.CIMultiDictProxy", /* tp_name */ sizeof(MultiDictProxyObject), /* tp_basicsize */ .tp_dealloc = (destructor)multidict_proxy_tp_dealloc, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, .tp_doc = CIMultDictProxy_doc, .tp_traverse = (traverseproc)multidict_proxy_tp_traverse, .tp_clear = (inquiry)multidict_proxy_tp_clear, .tp_richcompare = (richcmpfunc)multidict_proxy_tp_richcompare, .tp_weaklistoffset = offsetof(MultiDictProxyObject, weaklist), .tp_methods = cimultidict_proxy_methods, .tp_base = &multidict_proxy_type, .tp_init = (initproc)cimultidict_proxy_tp_init, .tp_alloc = PyType_GenericAlloc, .tp_new = PyType_GenericNew, .tp_free = PyObject_GC_Del, }; /******************** Other functions ********************/ static inline PyObject * getversion(PyObject *self, PyObject *md) { pair_list_t *pairs = NULL; if (MultiDict_CheckExact(md) || CIMultiDict_CheckExact(md)) { pairs = &((MultiDictObject*)md)->pairs; } else if (MultiDictProxy_CheckExact(md) || CIMultiDictProxy_CheckExact(md)) { pairs = &((MultiDictProxyObject*)md)->md->pairs; } else { PyErr_Format(PyExc_TypeError, "unexpected type"); return NULL; } return PyLong_FromUnsignedLong(pair_list_version(pairs)); } /******************** Module ********************/ static inline void module_free(void *m) { Py_CLEAR(multidict_str_lower); Py_CLEAR(collections_abc_mapping); Py_CLEAR(collections_abc_mut_mapping); Py_CLEAR(collections_abc_mut_multi_mapping); } static PyMethodDef multidict_module_methods[] = { { "getversion", (PyCFunction)getversion, METH_O }, { NULL, NULL } /* sentinel */ }; static PyModuleDef multidict_module = { PyModuleDef_HEAD_INIT, /* m_base */ "_multidict", /* m_name */ .m_size = -1, .m_methods = multidict_module_methods, .m_free = (freefunc)module_free, }; PyMODINIT_FUNC PyInit__multidict(void) { multidict_str_lower = PyUnicode_InternFromString("lower"); if (multidict_str_lower == NULL) { goto fail; } PyObject *module = NULL, *reg_func_call_result = NULL; if (multidict_views_init() < 0) { goto fail; } if (multidict_iter_init() < 0) { goto fail; } if (istr_init() < 0) { goto fail; } if (PyType_Ready(&multidict_type) < 0 || PyType_Ready(&cimultidict_type) < 0 || PyType_Ready(&multidict_proxy_type) < 0 || PyType_Ready(&cimultidict_proxy_type) < 0) { goto fail; } #define WITH_MOD(NAME) \ Py_CLEAR(module); \ module = PyImport_ImportModule(NAME); \ if (module == NULL) { \ goto fail; \ } #define GET_MOD_ATTR(VAR, NAME) \ VAR = PyObject_GetAttrString(module, NAME); \ if (VAR == NULL) { \ goto fail; \ } WITH_MOD("collections.abc"); GET_MOD_ATTR(collections_abc_mapping, "Mapping"); WITH_MOD("multidict._abc"); GET_MOD_ATTR(collections_abc_mut_mapping, "MultiMapping"); GET_MOD_ATTR(collections_abc_mut_multi_mapping, "MutableMultiMapping"); WITH_MOD("multidict._multidict_base"); GET_MOD_ATTR(repr_func, "_mdrepr"); Py_CLEAR(module); \ /* Register in _abc mappings (CI)MultiDict and (CI)MultiDictProxy */ reg_func_call_result = PyObject_CallMethod( collections_abc_mut_mapping, "register", "O", (PyObject*)&multidict_proxy_type ); if (reg_func_call_result == NULL) { goto fail; } Py_DECREF(reg_func_call_result); reg_func_call_result = PyObject_CallMethod( collections_abc_mut_mapping, "register", "O", (PyObject*)&cimultidict_proxy_type ); if (reg_func_call_result == NULL) { goto fail; } Py_DECREF(reg_func_call_result); reg_func_call_result = PyObject_CallMethod( collections_abc_mut_multi_mapping, "register", "O", (PyObject*)&multidict_type ); if (reg_func_call_result == NULL) { goto fail; } Py_DECREF(reg_func_call_result); reg_func_call_result = PyObject_CallMethod( collections_abc_mut_multi_mapping, "register", "O", (PyObject*)&cimultidict_type ); if (reg_func_call_result == NULL) { goto fail; } Py_DECREF(reg_func_call_result); /* Instantiate this module */ module = PyModule_Create(&multidict_module); if (module == NULL) { goto fail; } #ifdef Py_GIL_DISABLED PyUnstable_Module_SetGIL(module, Py_MOD_GIL_NOT_USED); #endif Py_INCREF(&istr_type); if (PyModule_AddObject( module, "istr", (PyObject*)&istr_type) < 0) { goto fail; } Py_INCREF(&multidict_type); if (PyModule_AddObject( module, "MultiDict", (PyObject*)&multidict_type) < 0) { goto fail; } Py_INCREF(&cimultidict_type); if (PyModule_AddObject( module, "CIMultiDict", (PyObject*)&cimultidict_type) < 0) { goto fail; } Py_INCREF(&multidict_proxy_type); if (PyModule_AddObject( module, "MultiDictProxy", (PyObject*)&multidict_proxy_type) < 0) { goto fail; } Py_INCREF(&cimultidict_proxy_type); if (PyModule_AddObject( module, "CIMultiDictProxy", (PyObject*)&cimultidict_proxy_type) < 0) { goto fail; } return module; fail: Py_XDECREF(multidict_str_lower); Py_XDECREF(collections_abc_mapping); Py_XDECREF(collections_abc_mut_mapping); Py_XDECREF(collections_abc_mut_multi_mapping); return NULL; #undef WITH_MOD #undef GET_MOD_ATTR } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/_multidict_base.py0000644000175100001660000001177514766036355020651 0ustar00runnerdockerimport sys from collections.abc import ( Container, ItemsView, Iterable, KeysView, Mapping, Set, ValuesView, ) from typing import Literal, Union if sys.version_info >= (3, 10): from types import NotImplementedType else: from typing import Any as NotImplementedType if sys.version_info >= (3, 11): from typing import assert_never else: from typing_extensions import assert_never def _abc_itemsview_register(view_cls: type[object]) -> None: ItemsView.register(view_cls) def _abc_keysview_register(view_cls: type[object]) -> None: KeysView.register(view_cls) def _abc_valuesview_register(view_cls: type[object]) -> None: ValuesView.register(view_cls) def _viewbaseset_richcmp( view: set[object], other: object, op: Literal[0, 1, 2, 3, 4, 5] ) -> Union[bool, NotImplementedType]: if op == 0: # < if not isinstance(other, Set): return NotImplemented # type: ignore[no-any-return] return len(view) < len(other) and view <= other elif op == 1: # <= if not isinstance(other, Set): return NotImplemented # type: ignore[no-any-return] if len(view) > len(other): return False for elem in view: if elem not in other: return False return True elif op == 2: # == if not isinstance(other, Set): return NotImplemented # type: ignore[no-any-return] return len(view) == len(other) and view <= other elif op == 3: # != return not view == other elif op == 4: # > if not isinstance(other, Set): return NotImplemented # type: ignore[no-any-return] return len(view) > len(other) and view >= other elif op == 5: # >= if not isinstance(other, Set): return NotImplemented # type: ignore[no-any-return] if len(view) < len(other): return False for elem in other: if elem not in view: return False return True else: # pragma: no cover assert_never(op) def _viewbaseset_and( view: set[object], other: object ) -> Union[set[object], NotImplementedType]: if not isinstance(other, Iterable): return NotImplemented # type: ignore[no-any-return] if isinstance(view, Set): view = set(iter(view)) if isinstance(other, Set): other = set(iter(other)) if not isinstance(other, Set): other = set(iter(other)) return view & other def _viewbaseset_or( view: set[object], other: object ) -> Union[set[object], NotImplementedType]: if not isinstance(other, Iterable): return NotImplemented # type: ignore[no-any-return] if isinstance(view, Set): view = set(iter(view)) if isinstance(other, Set): other = set(iter(other)) if not isinstance(other, Set): other = set(iter(other)) return view | other def _viewbaseset_sub( view: set[object], other: object ) -> Union[set[object], NotImplementedType]: if not isinstance(other, Iterable): return NotImplemented # type: ignore[no-any-return] if isinstance(view, Set): view = set(iter(view)) if isinstance(other, Set): other = set(iter(other)) if not isinstance(other, Set): other = set(iter(other)) return view - other def _viewbaseset_xor( view: set[object], other: object ) -> Union[set[object], NotImplementedType]: if not isinstance(other, Iterable): return NotImplemented # type: ignore[no-any-return] if isinstance(view, Set): view = set(iter(view)) if isinstance(other, Set): other = set(iter(other)) if not isinstance(other, Set): other = set(iter(other)) return view ^ other def _itemsview_isdisjoint(view: Container[object], other: Iterable[object]) -> bool: "Return True if two sets have a null intersection." for v in other: if v in view: return False return True def _itemsview_repr(view: Iterable[tuple[object, object]]) -> str: lst = [] for k, v in view: lst.append("{!r}: {!r}".format(k, v)) body = ", ".join(lst) return "{}({})".format(view.__class__.__name__, body) def _keysview_isdisjoint(view: Container[object], other: Iterable[object]) -> bool: "Return True if two sets have a null intersection." for k in other: if k in view: return False return True def _keysview_repr(view: Iterable[object]) -> str: lst = [] for k in view: lst.append("{!r}".format(k)) body = ", ".join(lst) return "{}({})".format(view.__class__.__name__, body) def _valuesview_repr(view: Iterable[object]) -> str: lst = [] for v in view: lst.append("{!r}".format(v)) body = ", ".join(lst) return "{}({})".format(view.__class__.__name__, body) def _mdrepr(md: Mapping[object, object]) -> str: lst = [] for k, v in md.items(): lst.append("'{}': {!r}".format(k, v)) body = ", ".join(lst) return "<{}({})>".format(md.__class__.__name__, body) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/_multidict_py.py0000644000175100001660000004277414766036355020372 0ustar00runnerdockerimport enum import sys from array import array from collections.abc import ( Callable, ItemsView, Iterable, Iterator, KeysView, Mapping, ValuesView, ) from typing import ( TYPE_CHECKING, Generic, NoReturn, TypeVar, Union, cast, overload, ) from ._abc import MDArg, MultiMapping, MutableMultiMapping, SupportsKeys if sys.version_info >= (3, 11): from typing import Self else: from typing_extensions import Self class istr(str): """Case insensitive str.""" __is_istr__ = True _V = TypeVar("_V") _T = TypeVar("_T") _SENTINEL = enum.Enum("_SENTINEL", "sentinel") sentinel = _SENTINEL.sentinel _version = array("Q", [0]) class _Impl(Generic[_V]): __slots__ = ("_items", "_version") def __init__(self) -> None: self._items: list[tuple[str, str, _V]] = [] self.incr_version() def incr_version(self) -> None: global _version v = _version v[0] += 1 self._version = v[0] if sys.implementation.name != "pypy": def __sizeof__(self) -> int: return object.__sizeof__(self) + sys.getsizeof(self._items) class _Iter(Generic[_T]): __slots__ = ("_size", "_iter") def __init__(self, size: int, iterator: Iterator[_T]): self._size = size self._iter = iterator def __iter__(self) -> Self: return self def __next__(self) -> _T: return next(self._iter) def __length_hint__(self) -> int: return self._size class _ViewBase(Generic[_V]): def __init__(self, impl: _Impl[_V]): self._impl = impl def __len__(self) -> int: return len(self._impl._items) class _ItemsView(_ViewBase[_V], ItemsView[str, _V]): def __contains__(self, item: object) -> bool: if not isinstance(item, (tuple, list)) or len(item) != 2: return False for i, k, v in self._impl._items: if item[0] == k and item[1] == v: return True return False def __iter__(self) -> _Iter[tuple[str, _V]]: return _Iter(len(self), self._iter(self._impl._version)) def _iter(self, version: int) -> Iterator[tuple[str, _V]]: for i, k, v in self._impl._items: if version != self._impl._version: raise RuntimeError("Dictionary changed during iteration") yield k, v def __repr__(self) -> str: lst = [] for item in self._impl._items: lst.append("{!r}: {!r}".format(item[1], item[2])) body = ", ".join(lst) return "{}({})".format(self.__class__.__name__, body) class _ValuesView(_ViewBase[_V], ValuesView[_V]): def __contains__(self, value: object) -> bool: for item in self._impl._items: if item[2] == value: return True return False def __iter__(self) -> _Iter[_V]: return _Iter(len(self), self._iter(self._impl._version)) def _iter(self, version: int) -> Iterator[_V]: for item in self._impl._items: if version != self._impl._version: raise RuntimeError("Dictionary changed during iteration") yield item[2] def __repr__(self) -> str: lst = [] for item in self._impl._items: lst.append("{!r}".format(item[2])) body = ", ".join(lst) return "{}({})".format(self.__class__.__name__, body) class _KeysView(_ViewBase[_V], KeysView[str]): def __contains__(self, key: object) -> bool: for item in self._impl._items: if item[1] == key: return True return False def __iter__(self) -> _Iter[str]: return _Iter(len(self), self._iter(self._impl._version)) def _iter(self, version: int) -> Iterator[str]: for item in self._impl._items: if version != self._impl._version: raise RuntimeError("Dictionary changed during iteration") yield item[1] def __repr__(self) -> str: lst = [] for item in self._impl._items: lst.append("{!r}".format(item[1])) body = ", ".join(lst) return "{}({})".format(self.__class__.__name__, body) class _Base(MultiMapping[_V]): _impl: _Impl[_V] def _title(self, key: str) -> str: return key @overload def getall(self, key: str) -> list[_V]: ... @overload def getall(self, key: str, default: _T) -> Union[list[_V], _T]: ... def getall( self, key: str, default: Union[_T, _SENTINEL] = sentinel ) -> Union[list[_V], _T]: """Return a list of all values matching the key.""" identity = self._title(key) res = [v for i, k, v in self._impl._items if i == identity] if res: return res if not res and default is not sentinel: return default raise KeyError("Key not found: %r" % key) @overload def getone(self, key: str) -> _V: ... @overload def getone(self, key: str, default: _T) -> Union[_V, _T]: ... def getone( self, key: str, default: Union[_T, _SENTINEL] = sentinel ) -> Union[_V, _T]: """Get first value matching the key. Raises KeyError if the key is not found and no default is provided. """ identity = self._title(key) for i, k, v in self._impl._items: if i == identity: return v if default is not sentinel: return default raise KeyError("Key not found: %r" % key) # Mapping interface # def __getitem__(self, key: str) -> _V: return self.getone(key) @overload def get(self, key: str, /) -> Union[_V, None]: ... @overload def get(self, key: str, /, default: _T) -> Union[_V, _T]: ... def get(self, key: str, default: Union[_T, None] = None) -> Union[_V, _T, None]: """Get first value matching the key. If the key is not found, returns the default (or None if no default is provided) """ return self.getone(key, default) def __iter__(self) -> Iterator[str]: return iter(self.keys()) def __len__(self) -> int: return len(self._impl._items) def keys(self) -> KeysView[str]: """Return a new view of the dictionary's keys.""" return _KeysView(self._impl) def items(self) -> ItemsView[str, _V]: """Return a new view of the dictionary's items *(key, value) pairs).""" return _ItemsView(self._impl) def values(self) -> _ValuesView[_V]: """Return a new view of the dictionary's values.""" return _ValuesView(self._impl) def __eq__(self, other: object) -> bool: if not isinstance(other, Mapping): return NotImplemented if isinstance(other, _Base): lft = self._impl._items rht = other._impl._items if len(lft) != len(rht): return False for (i1, k2, v1), (i2, k2, v2) in zip(lft, rht): if i1 != i2 or v1 != v2: return False return True if len(self._impl._items) != len(other): return False for k, v in self.items(): nv = other.get(k, sentinel) if v != nv: return False return True def __contains__(self, key: object) -> bool: if not isinstance(key, str): return False identity = self._title(key) for i, k, v in self._impl._items: if i == identity: return True return False def __repr__(self) -> str: body = ", ".join("'{}': {!r}".format(k, v) for k, v in self.items()) return "<{}({})>".format(self.__class__.__name__, body) class MultiDict(_Base[_V], MutableMultiMapping[_V]): """Dictionary with the support for duplicate keys.""" def __init__(self, arg: MDArg[_V] = None, /, **kwargs: _V): self._impl = _Impl() self._extend(arg, kwargs, self.__class__.__name__, self._extend_items) if sys.implementation.name != "pypy": def __sizeof__(self) -> int: return object.__sizeof__(self) + sys.getsizeof(self._impl) def __reduce__(self) -> tuple[type[Self], tuple[list[tuple[str, _V]]]]: return (self.__class__, (list(self.items()),)) def _title(self, key: str) -> str: return key def _key(self, key: str) -> str: if isinstance(key, str): return key else: raise TypeError("MultiDict keys should be either str or subclasses of str") def add(self, key: str, value: _V) -> None: identity = self._title(key) self._impl._items.append((identity, self._key(key), value)) self._impl.incr_version() def copy(self) -> Self: """Return a copy of itself.""" cls = self.__class__ return cls(self.items()) __copy__ = copy def extend(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None: """Extend current MultiDict with more values. This method must be used instead of update. """ self._extend(arg, kwargs, "extend", self._extend_items) def _extend( self, arg: MDArg[_V], kwargs: Mapping[str, _V], name: str, method: Callable[[list[tuple[str, str, _V]]], None], ) -> None: if arg: if isinstance(arg, (MultiDict, MultiDictProxy)) and not kwargs: items = arg._impl._items else: if hasattr(arg, "keys"): arg = cast(SupportsKeys[_V], arg) arg = [(k, arg[k]) for k in arg.keys()] if kwargs: arg = list(arg) arg.extend(list(kwargs.items())) items = [] for item in arg: if not len(item) == 2: raise TypeError( "{} takes either dict or list of (key, value) " "tuples".format(name) ) items.append((self._title(item[0]), self._key(item[0]), item[1])) method(items) else: method( [ (self._title(key), self._key(key), value) for key, value in kwargs.items() ] ) def _extend_items(self, items: Iterable[tuple[str, str, _V]]) -> None: for identity, key, value in items: self.add(key, value) def clear(self) -> None: """Remove all items from MultiDict.""" self._impl._items.clear() self._impl.incr_version() # Mapping interface # def __setitem__(self, key: str, value: _V) -> None: self._replace(key, value) def __delitem__(self, key: str) -> None: identity = self._title(key) items = self._impl._items found = False for i in range(len(items) - 1, -1, -1): if items[i][0] == identity: del items[i] found = True if not found: raise KeyError(key) else: self._impl.incr_version() @overload def setdefault( self: "MultiDict[Union[_T, None]]", key: str, default: None = None ) -> Union[_T, None]: ... @overload def setdefault(self, key: str, default: _V) -> _V: ... def setdefault(self, key: str, default: Union[_V, None] = None) -> Union[_V, None]: # type: ignore[misc] """Return value for key, set value to default if key is not present.""" identity = self._title(key) for i, k, v in self._impl._items: if i == identity: return v self.add(key, default) # type: ignore[arg-type] return default @overload def popone(self, key: str) -> _V: ... @overload def popone(self, key: str, default: _T) -> Union[_V, _T]: ... def popone( self, key: str, default: Union[_T, _SENTINEL] = sentinel ) -> Union[_V, _T]: """Remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. """ identity = self._title(key) for i in range(len(self._impl._items)): if self._impl._items[i][0] == identity: value = self._impl._items[i][2] del self._impl._items[i] self._impl.incr_version() return value if default is sentinel: raise KeyError(key) else: return default # Type checking will inherit signature for pop() if we don't confuse it here. if not TYPE_CHECKING: pop = popone @overload def popall(self, key: str) -> list[_V]: ... @overload def popall(self, key: str, default: _T) -> Union[list[_V], _T]: ... def popall( self, key: str, default: Union[_T, _SENTINEL] = sentinel ) -> Union[list[_V], _T]: """Remove all occurrences of key and return the list of corresponding values. If key is not found, default is returned if given, otherwise KeyError is raised. """ found = False identity = self._title(key) ret = [] for i in range(len(self._impl._items) - 1, -1, -1): item = self._impl._items[i] if item[0] == identity: ret.append(item[2]) del self._impl._items[i] self._impl.incr_version() found = True if not found: if default is sentinel: raise KeyError(key) else: return default else: ret.reverse() return ret def popitem(self) -> tuple[str, _V]: """Remove and return an arbitrary (key, value) pair.""" if self._impl._items: i = self._impl._items.pop(0) self._impl.incr_version() return i[1], i[2] else: raise KeyError("empty multidict") def update(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None: """Update the dictionary from *other*, overwriting existing keys.""" self._extend(arg, kwargs, "update", self._update_items) def _update_items(self, items: list[tuple[str, str, _V]]) -> None: if not items: return used_keys: dict[str, int] = {} for identity, key, value in items: start = used_keys.get(identity, 0) for i in range(start, len(self._impl._items)): item = self._impl._items[i] if item[0] == identity: used_keys[identity] = i + 1 self._impl._items[i] = (identity, key, value) break else: self._impl._items.append((identity, key, value)) used_keys[identity] = len(self._impl._items) # drop tails i = 0 while i < len(self._impl._items): item = self._impl._items[i] identity = item[0] pos = used_keys.get(identity) if pos is None: i += 1 continue if i >= pos: del self._impl._items[i] else: i += 1 self._impl.incr_version() def _replace(self, key: str, value: _V) -> None: key = self._key(key) identity = self._title(key) items = self._impl._items for i in range(len(items)): item = items[i] if item[0] == identity: items[i] = (identity, key, value) # i points to last found item rgt = i self._impl.incr_version() break else: self._impl._items.append((identity, key, value)) self._impl.incr_version() return # remove all tail items # Mypy bug: https://github.com/python/mypy/issues/14209 i = rgt + 1 # type: ignore[possibly-undefined] while i < len(items): item = items[i] if item[0] == identity: del items[i] else: i += 1 class CIMultiDict(MultiDict[_V]): """Dictionary with the support for duplicate case-insensitive keys.""" def _title(self, key: str) -> str: return key.title() class MultiDictProxy(_Base[_V]): """Read-only proxy for MultiDict instance.""" def __init__(self, arg: Union[MultiDict[_V], "MultiDictProxy[_V]"]): if not isinstance(arg, (MultiDict, MultiDictProxy)): raise TypeError( "ctor requires MultiDict or MultiDictProxy instance" ", not {}".format(type(arg)) ) self._impl = arg._impl def __reduce__(self) -> NoReturn: raise TypeError("can't pickle {} objects".format(self.__class__.__name__)) def copy(self) -> MultiDict[_V]: """Return a copy of itself.""" return MultiDict(self.items()) class CIMultiDictProxy(MultiDictProxy[_V]): """Read-only proxy for CIMultiDict instance.""" def __init__(self, arg: Union[MultiDict[_V], MultiDictProxy[_V]]): if not isinstance(arg, (CIMultiDict, CIMultiDictProxy)): raise TypeError( "ctor requires CIMultiDict or CIMultiDictProxy instance" ", not {}".format(type(arg)) ) self._impl = arg._impl def _title(self, key: str) -> str: return key.title() def copy(self) -> CIMultiDict[_V]: """Return a copy of itself.""" return CIMultiDict(self.items()) def getversion(md: Union[MultiDict[object], MultiDictProxy[object]]) -> int: if not isinstance(md, _Base): raise TypeError("Parameter should be multidict or proxy") return md._impl._version ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1742224624.164563 multidict-6.2.0/multidict/_multilib/0000755000175100001660000000000014766036360017111 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/_multilib/defs.h0000644000175100001660000000121114766036355020202 0ustar00runnerdocker#ifndef _MULTIDICT_DEFS_H #define _MULTIDICT_DEFS_H #ifdef __cplusplus extern "C" { #endif static PyObject *multidict_str_lower = NULL; /* We link this module statically for convenience. If compiled as a shared library instead, some compilers don't allow addresses of Python objects defined in other libraries to be used in static initializers here. The DEFERRED_ADDRESS macro is used to tag the slots where such addresses appear; the module init function must fill in the tagged slots at runtime. The argument is for documentation -- the macro ignores it. */ #define DEFERRED_ADDRESS(ADDR) 0 #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/_multilib/dict.h0000644000175100001660000000056014766036355020212 0ustar00runnerdocker#ifndef _MULTIDICT_C_H #define _MULTIDICT_C_H #ifdef __cplusplus extern "C" { #endif typedef struct { // 16 or 24 for GC prefix PyObject_HEAD // 16 PyObject *weaklist; pair_list_t pairs; } MultiDictObject; typedef struct { PyObject_HEAD PyObject *weaklist; MultiDictObject *md; } MultiDictProxyObject; #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/_multilib/istr.h0000644000175100001660000000360014766036355020246 0ustar00runnerdocker#ifndef _MULTIDICT_ISTR_H #define _MULTIDICT_ISTR_H #ifdef __cplusplus extern "C" { #endif typedef struct { PyUnicodeObject str; PyObject * canonical; } istrobject; PyDoc_STRVAR(istr__doc__, "istr class implementation"); static PyTypeObject istr_type; static inline void istr_dealloc(istrobject *self) { Py_XDECREF(self->canonical); PyUnicode_Type.tp_dealloc((PyObject*)self); } static inline PyObject * istr_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { PyObject *x = NULL; static char *kwlist[] = {"object", "encoding", "errors", 0}; PyObject *encoding = NULL; PyObject *errors = NULL; PyObject *s = NULL; PyObject * ret = NULL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOO:str", kwlist, &x, &encoding, &errors)) { return NULL; } if (x != NULL && Py_TYPE(x) == &istr_type) { Py_INCREF(x); return x; } ret = PyUnicode_Type.tp_new(type, args, kwds); if (!ret) { goto fail; } s = PyObject_CallMethodNoArgs(ret, multidict_str_lower); if (!s) { goto fail; } ((istrobject*)ret)->canonical = s; s = NULL; /* the reference is stollen by .canonical */ return ret; fail: Py_XDECREF(ret); return NULL; } static PyTypeObject istr_type = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "multidict._multidict.istr", sizeof(istrobject), .tp_dealloc = (destructor)istr_dealloc, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_UNICODE_SUBCLASS, .tp_doc = istr__doc__, .tp_base = DEFERRED_ADDRESS(&PyUnicode_Type), .tp_new = (newfunc)istr_new, }; static inline int istr_init(void) { istr_type.tp_base = &PyUnicode_Type; if (PyType_Ready(&istr_type) < 0) { return -1; } return 0; } #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/_multilib/iter.h0000644000175100001660000001366314766036355020242 0ustar00runnerdocker#ifndef _MULTIDICT_ITER_H #define _MULTIDICT_ITER_H #ifdef __cplusplus extern "C" { #endif static PyTypeObject multidict_items_iter_type; static PyTypeObject multidict_values_iter_type; static PyTypeObject multidict_keys_iter_type; typedef struct multidict_iter { PyObject_HEAD MultiDictObject *md; // MultiDict or CIMultiDict Py_ssize_t current; uint64_t version; } MultidictIter; static inline void _init_iter(MultidictIter *it, MultiDictObject *md) { Py_INCREF(md); it->md = md; it->current = 0; it->version = pair_list_version(&md->pairs); } static inline PyObject * multidict_items_iter_new(MultiDictObject *md) { MultidictIter *it = PyObject_GC_New( MultidictIter, &multidict_items_iter_type); if (it == NULL) { return NULL; } _init_iter(it, md); PyObject_GC_Track(it); return (PyObject *)it; } static inline PyObject * multidict_keys_iter_new(MultiDictObject *md) { MultidictIter *it = PyObject_GC_New( MultidictIter, &multidict_keys_iter_type); if (it == NULL) { return NULL; } _init_iter(it, md); PyObject_GC_Track(it); return (PyObject *)it; } static inline PyObject * multidict_values_iter_new(MultiDictObject *md) { MultidictIter *it = PyObject_GC_New( MultidictIter, &multidict_values_iter_type); if (it == NULL) { return NULL; } _init_iter(it, md); PyObject_GC_Track(it); return (PyObject *)it; } static inline PyObject * multidict_items_iter_iternext(MultidictIter *self) { PyObject *key = NULL; PyObject *value = NULL; PyObject *ret = NULL; if (self->version != pair_list_version(&self->md->pairs)) { PyErr_SetString(PyExc_RuntimeError, "Dictionary changed during iteration"); return NULL; } if (!_pair_list_next(&self->md->pairs, &self->current, NULL, &key, &value, NULL)) { PyErr_SetNone(PyExc_StopIteration); return NULL; } ret = PyTuple_Pack(2, key, value); if (ret == NULL) { return NULL; } return ret; } static inline PyObject * multidict_values_iter_iternext(MultidictIter *self) { PyObject *value = NULL; if (self->version != pair_list_version(&self->md->pairs)) { PyErr_SetString(PyExc_RuntimeError, "Dictionary changed during iteration"); return NULL; } if (!pair_list_next(&self->md->pairs, &self->current, NULL, NULL, &value)) { PyErr_SetNone(PyExc_StopIteration); return NULL; } Py_INCREF(value); return value; } static inline PyObject * multidict_keys_iter_iternext(MultidictIter *self) { PyObject *key = NULL; if (self->version != pair_list_version(&self->md->pairs)) { PyErr_SetString(PyExc_RuntimeError, "Dictionary changed during iteration"); return NULL; } if (!pair_list_next(&self->md->pairs, &self->current, NULL, &key, NULL)) { PyErr_SetNone(PyExc_StopIteration); return NULL; } Py_INCREF(key); return key; } static inline void multidict_iter_dealloc(MultidictIter *self) { PyObject_GC_UnTrack(self); Py_XDECREF(self->md); PyObject_GC_Del(self); } static inline int multidict_iter_traverse(MultidictIter *self, visitproc visit, void *arg) { Py_VISIT(self->md); return 0; } static inline int multidict_iter_clear(MultidictIter *self) { Py_CLEAR(self->md); return 0; } static inline PyObject * multidict_iter_len(MultidictIter *self) { return PyLong_FromLong(pair_list_len(&self->md->pairs)); } PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); static PyMethodDef multidict_iter_methods[] = { { "__length_hint__", (PyCFunction)(void(*)(void))multidict_iter_len, METH_NOARGS, length_hint_doc }, { NULL, NULL } /* sentinel */ }; /***********************************************************************/ static PyTypeObject multidict_items_iter_type = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "multidict._multidict._itemsiter", /* tp_name */ sizeof(MultidictIter), /* tp_basicsize */ .tp_dealloc = (destructor)multidict_iter_dealloc, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, .tp_traverse = (traverseproc)multidict_iter_traverse, .tp_clear = (inquiry)multidict_iter_clear, .tp_iter = PyObject_SelfIter, .tp_iternext = (iternextfunc)multidict_items_iter_iternext, .tp_methods = multidict_iter_methods, }; static PyTypeObject multidict_values_iter_type = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "multidict._multidict._valuesiter", /* tp_name */ sizeof(MultidictIter), /* tp_basicsize */ .tp_dealloc = (destructor)multidict_iter_dealloc, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, .tp_traverse = (traverseproc)multidict_iter_traverse, .tp_clear = (inquiry)multidict_iter_clear, .tp_iter = PyObject_SelfIter, .tp_iternext = (iternextfunc)multidict_values_iter_iternext, .tp_methods = multidict_iter_methods, }; static PyTypeObject multidict_keys_iter_type = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "multidict._multidict._keysiter", /* tp_name */ sizeof(MultidictIter), /* tp_basicsize */ .tp_dealloc = (destructor)multidict_iter_dealloc, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, .tp_traverse = (traverseproc)multidict_iter_traverse, .tp_clear = (inquiry)multidict_iter_clear, .tp_iter = PyObject_SelfIter, .tp_iternext = (iternextfunc)multidict_keys_iter_iternext, .tp_methods = multidict_iter_methods, }; static inline int multidict_iter_init(void) { if (PyType_Ready(&multidict_items_iter_type) < 0 || PyType_Ready(&multidict_values_iter_type) < 0 || PyType_Ready(&multidict_keys_iter_type) < 0) { return -1; } return 0; } #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/_multilib/pair_list.h0000644000175100001660000006716314766036355021271 0ustar00runnerdocker#ifndef _MULTIDICT_PAIR_LIST_H #define _MULTIDICT_PAIR_LIST_H #ifdef __cplusplus extern "C" { #endif #include #include #include #include typedef struct pair { PyObject *identity; // 8 PyObject *key; // 8 PyObject *value; // 8 Py_hash_t hash; // 8 } pair_t; /* Note about the structure size With 29 pairs the MultiDict object size is slightly less than 1KiB (1000-1008 bytes depending on Python version, plus extra 12 bytes for memory allocator internal structures). As the result the max reserved size is 1020 bytes at most. To fit into 512 bytes, the structure can contain only 13 pairs which is too small, e.g. https://www.python.org returns 16 headers (9 of them are caching proxy information though). The embedded buffer intention is to fit the vast majority of possible HTTP headers into the buffer without allocating an extra memory block. */ #define EMBEDDED_CAPACITY 29 typedef struct pair_list { Py_ssize_t capacity; Py_ssize_t size; uint64_t version; bool calc_ci_indentity; pair_t *pairs; pair_t buffer[EMBEDDED_CAPACITY]; } pair_list_t; #define MIN_CAPACITY 63 #define CAPACITY_STEP 64 /* Global counter used to set ma_version_tag field of dictionary. * It is incremented each time that a dictionary is created and each * time that a dictionary is modified. */ static uint64_t pair_list_global_version = 0; #define NEXT_VERSION() (++pair_list_global_version) static inline int str_cmp(PyObject *s1, PyObject *s2) { PyObject *ret = PyUnicode_RichCompare(s1, s2, Py_EQ); if (ret == Py_True) { Py_DECREF(ret); return 1; } else if (ret == NULL) { return -1; } else { Py_DECREF(ret); return 0; } } static inline PyObject * key_to_str(PyObject *key) { PyObject *ret; PyTypeObject *type = Py_TYPE(key); if (type == &istr_type) { ret = ((istrobject*)key)->canonical; Py_INCREF(ret); return ret; } if (PyUnicode_CheckExact(key)) { Py_INCREF(key); return key; } if (PyUnicode_Check(key)) { return PyObject_Str(key); } PyErr_SetString(PyExc_TypeError, "MultiDict keys should be either str " "or subclasses of str"); return NULL; } static inline PyObject * ci_key_to_str(PyObject *key) { PyObject *ret; PyTypeObject *type = Py_TYPE(key); if (type == &istr_type) { ret = ((istrobject*)key)->canonical; Py_INCREF(ret); return ret; } if (PyUnicode_Check(key)) { return PyObject_CallMethodNoArgs(key, multidict_str_lower); } PyErr_SetString(PyExc_TypeError, "CIMultiDict keys should be either str " "or subclasses of str"); return NULL; } static inline pair_t * pair_list_get(pair_list_t *list, Py_ssize_t i) { pair_t *item = list->pairs + i; return item; } static inline int pair_list_grow(pair_list_t *list) { // Grow by one element if needed Py_ssize_t new_capacity; pair_t *new_pairs; if (list->size < list->capacity) { return 0; } if (list->pairs == list->buffer) { new_pairs = PyMem_New(pair_t, MIN_CAPACITY); memcpy(new_pairs, list->buffer, (size_t)list->capacity * sizeof(pair_t)); list->pairs = new_pairs; list->capacity = MIN_CAPACITY; return 0; } else { new_capacity = list->capacity + CAPACITY_STEP; new_pairs = PyMem_Resize(list->pairs, pair_t, (size_t)new_capacity); if (NULL == new_pairs) { // Resizing error return -1; } list->pairs = new_pairs; list->capacity = new_capacity; return 0; } } static inline int pair_list_shrink(pair_list_t *list) { // Shrink by one element if needed. // Optimization is applied to prevent jitter // (grow-shrink-grow-shrink on adding-removing the single element // when the buffer is full). // To prevent this, the buffer is resized if the size is less than the capacity // by 2*CAPACITY_STEP factor. // The switch back to embedded buffer is never performed for both reasons: // the code simplicity and the jitter prevention. pair_t *new_pairs; Py_ssize_t new_capacity; if (list->capacity - list->size < 2 * CAPACITY_STEP) { return 0; } new_capacity = list->capacity - CAPACITY_STEP; if (new_capacity < MIN_CAPACITY) { return 0; } new_pairs = PyMem_Resize(list->pairs, pair_t, (size_t)new_capacity); if (NULL == new_pairs) { // Resizing error return -1; } list->pairs = new_pairs; list->capacity = new_capacity; return 0; } static inline int _pair_list_init(pair_list_t *list, bool calc_ci_identity) { list->calc_ci_indentity = calc_ci_identity; list->pairs = list->buffer; list->capacity = EMBEDDED_CAPACITY; list->size = 0; list->version = NEXT_VERSION(); return 0; } static inline int pair_list_init(pair_list_t *list) { return _pair_list_init(list, /* calc_ci_identity = */ false); } static inline int ci_pair_list_init(pair_list_t *list) { return _pair_list_init(list, /* calc_ci_identity = */ true); } static inline PyObject * pair_list_calc_identity(pair_list_t *list, PyObject *key) { if (list->calc_ci_indentity) return ci_key_to_str(key); return key_to_str(key); } static inline void pair_list_dealloc(pair_list_t *list) { pair_t *pair; Py_ssize_t pos; for (pos = 0; pos < list->size; pos++) { pair = pair_list_get(list, pos); Py_XDECREF(pair->identity); Py_XDECREF(pair->key); Py_XDECREF(pair->value); } /* Strictly speaking, resetting size and capacity and assigning pairs to buffer is not necessary. Do it to consistency and idemotency. The cleanup doesn't hurt performance. !!! !!! The buffer deletion is crucial though. !!! */ list->size = 0; if (list->pairs != list->buffer) { PyMem_Del(list->pairs); list->pairs = list->buffer; list->capacity = EMBEDDED_CAPACITY; } } static inline Py_ssize_t pair_list_len(pair_list_t *list) { return list->size; } static inline int _pair_list_add_with_hash(pair_list_t *list, PyObject *identity, PyObject *key, PyObject *value, Py_hash_t hash) { pair_t *pair; if (pair_list_grow(list) < 0) { return -1; } pair = pair_list_get(list, list->size); Py_INCREF(identity); pair->identity = identity; Py_INCREF(key); pair->key = key; Py_INCREF(value); pair->value = value; pair->hash = hash; list->version = NEXT_VERSION(); list->size += 1; return 0; } static inline int pair_list_add(pair_list_t *list, PyObject *key, PyObject *value) { Py_hash_t hash; PyObject *identity = NULL; int ret; identity = pair_list_calc_identity(list, key); if (identity == NULL) { goto fail; } hash = PyObject_Hash(identity); if (hash == -1) { goto fail; } ret = _pair_list_add_with_hash(list, identity, key, value, hash); Py_DECREF(identity); return ret; fail: Py_XDECREF(identity); return -1; } static inline int pair_list_del_at(pair_list_t *list, Py_ssize_t pos) { // return 1 on success, -1 on failure Py_ssize_t tail; pair_t *pair; pair = pair_list_get(list, pos); Py_DECREF(pair->identity); Py_DECREF(pair->key); Py_DECREF(pair->value); list->size -= 1; list->version = NEXT_VERSION(); if (list->size == pos) { // remove from tail, no need to shift body return 0; } tail = list->size - pos; // TODO: raise an error if tail < 0 memmove((void *)pair_list_get(list, pos), (void *)pair_list_get(list, pos + 1), sizeof(pair_t) * (size_t)tail); return pair_list_shrink(list); } static inline int _pair_list_drop_tail(pair_list_t *list, PyObject *identity, Py_hash_t hash, Py_ssize_t pos) { // return 1 if deleted, 0 if not found pair_t *pair; int ret; int found = 0; if (pos >= list->size) { return 0; } for (; pos < list->size; pos++) { pair = pair_list_get(list, pos); if (pair->hash != hash) { continue; } ret = str_cmp(pair->identity, identity); if (ret > 0) { if (pair_list_del_at(list, pos) < 0) { return -1; } found = 1; pos--; } else if (ret == -1) { return -1; } } return found; } static inline int _pair_list_del_hash(pair_list_t *list, PyObject *identity, PyObject *key, Py_hash_t hash) { int ret = _pair_list_drop_tail(list, identity, hash, 0); if (ret < 0) { return -1; } else if (ret == 0) { PyErr_SetObject(PyExc_KeyError, key); return -1; } else { list->version = NEXT_VERSION(); return 0; } } static inline int pair_list_del(pair_list_t *list, PyObject *key) { PyObject *identity = NULL; Py_hash_t hash; int ret; identity = pair_list_calc_identity(list, key); if (identity == NULL) { goto fail; } hash = PyObject_Hash(identity); if (hash == -1) { goto fail; } ret = _pair_list_del_hash(list, identity, key, hash); Py_DECREF(identity); return ret; fail: Py_XDECREF(identity); return -1; } static inline uint64_t pair_list_version(pair_list_t *list) { return list->version; } static inline int _pair_list_next(pair_list_t *list, Py_ssize_t *ppos, PyObject **pidentity, PyObject **pkey, PyObject **pvalue, Py_hash_t *phash) { pair_t *pair; if (*ppos >= list->size) { return 0; } pair = pair_list_get(list, *ppos); if (pidentity) { *pidentity = pair->identity; } if (pkey) { *pkey = pair->key; } if (pvalue) { *pvalue = pair->value; } if (phash) { *phash = pair->hash; } *ppos += 1; return 1; } static inline int pair_list_next(pair_list_t *list, Py_ssize_t *ppos, PyObject **pidentity, PyObject **pkey, PyObject **pvalue) { Py_hash_t hash; return _pair_list_next(list, ppos, pidentity, pkey, pvalue, &hash); } static inline int pair_list_contains(pair_list_t *list, PyObject *key) { Py_hash_t hash1, hash2; Py_ssize_t pos = 0; PyObject *ident = NULL; PyObject *identity = NULL; int tmp; if (!PyUnicode_Check(key)) { return 0; } ident = pair_list_calc_identity(list, key); if (ident == NULL) { goto fail; } hash1 = PyObject_Hash(ident); if (hash1 == -1) { goto fail; } while (_pair_list_next(list, &pos, &identity, NULL, NULL, &hash2)) { if (hash1 != hash2) { continue; } tmp = str_cmp(ident, identity); if (tmp > 0) { Py_DECREF(ident); return 1; } else if (tmp < 0) { goto fail; } } Py_DECREF(ident); return 0; fail: Py_XDECREF(ident); return -1; } static inline PyObject * pair_list_get_one(pair_list_t *list, PyObject *key) { Py_hash_t hash1, hash2; Py_ssize_t pos = 0; PyObject *ident = NULL; PyObject *identity = NULL; PyObject *value = NULL; int tmp; ident = pair_list_calc_identity(list, key); if (ident == NULL) { goto fail; } hash1 = PyObject_Hash(ident); if (hash1 == -1) { goto fail; } while (_pair_list_next(list, &pos, &identity, NULL, &value, &hash2)) { if (hash1 != hash2) { continue; } tmp = str_cmp(ident, identity); if (tmp > 0) { Py_INCREF(value); Py_DECREF(ident); return value; } else if (tmp < 0) { goto fail; } } Py_DECREF(ident); PyErr_SetObject(PyExc_KeyError, key); return NULL; fail: Py_XDECREF(ident); return NULL; } static inline PyObject * pair_list_get_all(pair_list_t *list, PyObject *key) { Py_hash_t hash1, hash2; Py_ssize_t pos = 0; PyObject *ident = NULL; PyObject *identity = NULL; PyObject *value = NULL; PyObject *res = NULL; int tmp; ident = pair_list_calc_identity(list, key); if (ident == NULL) { goto fail; } hash1 = PyObject_Hash(ident); if (hash1 == -1) { goto fail; } while (_pair_list_next(list, &pos, &identity, NULL, &value, &hash2)) { if (hash1 != hash2) { continue; } tmp = str_cmp(ident, identity); if (tmp > 0) { if (res == NULL) { res = PyList_New(1); if (res == NULL) { goto fail; } if (PyList_SetItem(res, 0, value) < 0) { goto fail; } Py_INCREF(value); } else if (PyList_Append(res, value) < 0) { goto fail; } } else if (tmp < 0) { goto fail; } } if (res == NULL) { PyErr_SetObject(PyExc_KeyError, key); } Py_DECREF(ident); return res; fail: Py_XDECREF(ident); Py_XDECREF(res); return NULL; } static inline PyObject * pair_list_set_default(pair_list_t *list, PyObject *key, PyObject *value) { Py_hash_t hash1, hash2; Py_ssize_t pos = 0; PyObject *ident = NULL; PyObject *identity = NULL; PyObject *value2 = NULL; int tmp; ident = pair_list_calc_identity(list, key); if (ident == NULL) { goto fail; } hash1 = PyObject_Hash(ident); if (hash1 == -1) { goto fail; } while (_pair_list_next(list, &pos, &identity, NULL, &value2, &hash2)) { if (hash1 != hash2) { continue; } tmp = str_cmp(ident, identity); if (tmp > 0) { Py_INCREF(value2); Py_DECREF(ident); return value2; } else if (tmp < 0) { goto fail; } } if (_pair_list_add_with_hash(list, ident, key, value, hash1) < 0) { goto fail; } Py_INCREF(value); Py_DECREF(ident); return value; fail: Py_XDECREF(ident); return NULL; } static inline PyObject * pair_list_pop_one(pair_list_t *list, PyObject *key) { pair_t *pair; Py_hash_t hash; Py_ssize_t pos; PyObject *value = NULL; int tmp; PyObject *ident = NULL; ident = pair_list_calc_identity(list, key); if (ident == NULL) { goto fail; } hash = PyObject_Hash(ident); if (hash == -1) { goto fail; } for (pos=0; pos < list->size; pos++) { pair = pair_list_get(list, pos); if (pair->hash != hash) { continue; } tmp = str_cmp(ident, pair->identity); if (tmp > 0) { value = pair->value; Py_INCREF(value); if (pair_list_del_at(list, pos) < 0) { goto fail; } Py_DECREF(ident); return value; } else if (tmp < 0) { goto fail; } } PyErr_SetObject(PyExc_KeyError, key); goto fail; fail: Py_XDECREF(value); Py_XDECREF(ident); return NULL; } static inline PyObject * pair_list_pop_all(pair_list_t *list, PyObject *key) { Py_hash_t hash; Py_ssize_t pos; pair_t *pair; int tmp; PyObject *res = NULL; PyObject *ident = NULL; ident = pair_list_calc_identity(list, key); if (ident == NULL) { goto fail; } hash = PyObject_Hash(ident); if (hash == -1) { goto fail; } if (list->size == 0) { PyErr_SetObject(PyExc_KeyError, ident); goto fail; } for (pos = list->size - 1; pos >= 0; pos--) { pair = pair_list_get(list, pos); if (hash != pair->hash) { continue; } tmp = str_cmp(ident, pair->identity); if (tmp > 0) { if (res == NULL) { res = PyList_New(1); if (res == NULL) { goto fail; } if (PyList_SetItem(res, 0, pair->value) < 0) { goto fail; } Py_INCREF(pair->value); } else if (PyList_Append(res, pair->value) < 0) { goto fail; } if (pair_list_del_at(list, pos) < 0) { goto fail; } } else if (tmp < 0) { goto fail; } } if (res == NULL) { PyErr_SetObject(PyExc_KeyError, key); } else if (PyList_Reverse(res) < 0) { goto fail; } Py_DECREF(ident); return res; fail: Py_XDECREF(ident); Py_XDECREF(res); return NULL; } static inline PyObject * pair_list_pop_item(pair_list_t *list) { PyObject *ret; pair_t *pair; if (list->size == 0) { PyErr_SetString(PyExc_KeyError, "empty multidict"); return NULL; } pair = pair_list_get(list, 0); ret = PyTuple_Pack(2, pair->key, pair->value); if (ret == NULL) { return NULL; } if (pair_list_del_at(list, 0) < 0) { Py_DECREF(ret); return NULL; } return ret; } static inline int pair_list_replace(pair_list_t *list, PyObject * key, PyObject *value) { pair_t *pair; Py_ssize_t pos; int tmp; int found = 0; PyObject *identity = NULL; Py_hash_t hash; identity = pair_list_calc_identity(list, key); if (identity == NULL) { goto fail; } hash = PyObject_Hash(identity); if (hash == -1) { goto fail; } for (pos = 0; pos < list->size; pos++) { pair = pair_list_get(list, pos); if (hash != pair->hash) { continue; } tmp = str_cmp(identity, pair->identity); if (tmp > 0) { found = 1; Py_INCREF(key); Py_DECREF(pair->key); pair->key = key; Py_INCREF(value); Py_DECREF(pair->value); pair->value = value; break; } else if (tmp < 0) { goto fail; } } if (!found) { if (_pair_list_add_with_hash(list, identity, key, value, hash) < 0) { goto fail; } Py_DECREF(identity); return 0; } else { list->version = NEXT_VERSION(); if (_pair_list_drop_tail(list, identity, hash, pos+1) < 0) { goto fail; } Py_DECREF(identity); return 0; } fail: Py_XDECREF(identity); return -1; } static inline int _dict_set_number(PyObject *dict, PyObject *key, Py_ssize_t num) { PyObject *tmp = PyLong_FromSsize_t(num); if (tmp == NULL) { return -1; } if (PyDict_SetItem(dict, key, tmp) < 0) { Py_DECREF(tmp); return -1; } return 0; } static inline int _pair_list_post_update(pair_list_t *list, PyObject* used_keys, Py_ssize_t pos) { pair_t *pair; PyObject *tmp; Py_ssize_t num; for (; pos < list->size; pos++) { pair = pair_list_get(list, pos); int status = PyDict_GetItemRef(used_keys, pair->identity, &tmp); if (status == -1) { // exception set return -1; } else if (status == 0) { // not found continue; } num = PyLong_AsSsize_t(tmp); Py_DECREF(tmp); if (num == -1) { if (!PyErr_Occurred()) { PyErr_SetString(PyExc_RuntimeError, "invalid internal state"); } return -1; } if (pos >= num) { // del self[pos] if (pair_list_del_at(list, pos) < 0) { return -1; } pos--; } } list->version = NEXT_VERSION(); return 0; } // TODO: need refactoring function name static inline int _pair_list_update(pair_list_t *list, PyObject *key, PyObject *value, PyObject *used_keys, PyObject *identity, Py_hash_t hash) { PyObject *item = NULL; pair_t *pair = NULL; Py_ssize_t pos; int found; int ident_cmp_res; int status = PyDict_GetItemRef(used_keys, identity, &item); if (status == -1) { // exception set return -1; } else if (status == 0) { // not found pos = 0; } else { pos = PyLong_AsSsize_t(item); Py_DECREF(item); if (pos == -1) { if (!PyErr_Occurred()) { PyErr_SetString(PyExc_RuntimeError, "invalid internal state"); } return -1; } } found = 0; for (; pos < list->size; pos++) { pair = pair_list_get(list, pos); if (pair->hash != hash) { continue; } ident_cmp_res = str_cmp(pair->identity, identity); if (ident_cmp_res > 0) { Py_INCREF(key); Py_DECREF(pair->key); pair->key = key; Py_INCREF(value); Py_DECREF(pair->value); pair->value = value; if (_dict_set_number(used_keys, pair->identity, pos + 1) < 0) { return -1; } found = 1; break; } else if (ident_cmp_res < 0) { return -1; } } if (!found) { if (_pair_list_add_with_hash(list, identity, key, value, hash) < 0) { return -1; } if (_dict_set_number(used_keys, identity, list->size) < 0) { return -1; } } return 0; } static inline int pair_list_update(pair_list_t *list, pair_list_t *other) { PyObject *used_keys = NULL; pair_t *pair = NULL; Py_ssize_t pos; if (other->size == 0) { return 0; } used_keys = PyDict_New(); if (used_keys == NULL) { return -1; } for (pos = 0; pos < other->size; pos++) { pair = pair_list_get(other, pos); if (_pair_list_update(list, pair->key, pair->value, used_keys, pair->identity, pair->hash) < 0) { goto fail; } } if (_pair_list_post_update(list, used_keys, 0) < 0) { goto fail; } Py_DECREF(used_keys); return 0; fail: Py_XDECREF(used_keys); return -1; } static inline int pair_list_update_from_seq(pair_list_t *list, PyObject *seq) { PyObject *it = NULL; // iter(seq) PyObject *fast = NULL; // item as a 2-tuple or 2-list PyObject *item = NULL; // seq[i] PyObject *used_keys = NULL; // dict() PyObject *key = NULL; PyObject *value = NULL; PyObject *identity = NULL; Py_hash_t hash; Py_ssize_t i; Py_ssize_t n; it = PyObject_GetIter(seq); if (it == NULL) { return -1; } used_keys = PyDict_New(); if (used_keys == NULL) { goto fail_1; } for (i = 0; ; ++i) { // i - index into seq of current element fast = NULL; item = PyIter_Next(it); if (item == NULL) { if (PyErr_Occurred()) { goto fail_1; } break; } // Convert item to sequence, and verify length 2. #ifdef Py_GIL_DISABLED if (!PySequence_Check(item)) { #else fast = PySequence_Fast(item, ""); if (fast == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { #endif PyErr_Format(PyExc_TypeError, "multidict cannot convert sequence element #%zd" " to a sequence", i); #ifndef Py_GIL_DISABLED } #endif goto fail_1; } #ifdef Py_GIL_DISABLED n = PySequence_Size(item); #else n = PySequence_Fast_GET_SIZE(fast); #endif if (n != 2) { PyErr_Format(PyExc_ValueError, "multidict update sequence element #%zd " "has length %zd; 2 is required", i, n); goto fail_1; } #ifdef Py_GIL_DISABLED key = PySequence_ITEM(item, 0); if (key == NULL) { PyErr_Format(PyExc_ValueError, "multidict update sequence element #%zd's " "key could not be fetched", i); goto fail_1; } value = PySequence_ITEM(item, 1); if (value == NULL) { PyErr_Format(PyExc_ValueError, "multidict update sequence element #%zd's " "value could not be fetched", i); goto fail_1; } #else key = PySequence_Fast_GET_ITEM(fast, 0); value = PySequence_Fast_GET_ITEM(fast, 1); Py_INCREF(key); Py_INCREF(value); #endif identity = pair_list_calc_identity(list, key); if (identity == NULL) { goto fail_1; } hash = PyObject_Hash(identity); if (hash == -1) { goto fail_1; } if (_pair_list_update(list, key, value, used_keys, identity, hash) < 0) { goto fail_1; } Py_DECREF(key); Py_DECREF(value); #ifndef Py_GIL_DISABLED Py_DECREF(fast); #endif Py_DECREF(item); Py_DECREF(identity); } if (_pair_list_post_update(list, used_keys, 0) < 0) { goto fail_2; } Py_DECREF(it); Py_DECREF(used_keys); return 0; fail_1: Py_XDECREF(key); Py_XDECREF(value); Py_XDECREF(fast); Py_XDECREF(item); Py_XDECREF(identity); fail_2: Py_XDECREF(it); Py_XDECREF(used_keys); return -1; } static inline int pair_list_eq_to_mapping(pair_list_t *list, PyObject *other) { PyObject *key = NULL; PyObject *avalue = NULL; PyObject *bvalue; Py_ssize_t pos, other_len; int eq; if (!PyMapping_Check(other)) { PyErr_Format(PyExc_TypeError, "other argument must be a mapping, not %s", Py_TYPE(other)->tp_name); return -1; } other_len = PyMapping_Size(other); if (other_len < 0) { return -1; } if (pair_list_len(list) != other_len) { return 0; } pos = 0; while (pair_list_next(list, &pos, NULL, &key, &avalue)) { bvalue = PyObject_GetItem(other, key); if (bvalue == NULL) { if (PyErr_ExceptionMatches(PyExc_KeyError)) { PyErr_Clear(); return 0; } return -1; } eq = PyObject_RichCompareBool(avalue, bvalue, Py_EQ); Py_DECREF(bvalue); if (eq <= 0) { return eq; } } return 1; } /***********************************************************************/ static inline int pair_list_traverse(pair_list_t *list, visitproc visit, void *arg) { pair_t *pair = NULL; Py_ssize_t pos; for (pos = 0; pos < list->size; pos++) { pair = pair_list_get(list, pos); // Don't need traverse the identity: it is a terminal Py_VISIT(pair->key); Py_VISIT(pair->value); } return 0; } static inline int pair_list_clear(pair_list_t *list) { pair_t *pair = NULL; Py_ssize_t pos; if (list->size == 0) { return 0; } list->version = NEXT_VERSION(); for (pos = 0; pos < list->size; pos++) { pair = pair_list_get(list, pos); Py_CLEAR(pair->key); Py_CLEAR(pair->identity); Py_CLEAR(pair->value); } list->size = 0; if (list->pairs != list->buffer) { PyMem_Del(list->pairs); list->pairs = list->buffer; } return 0; } #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/_multilib/pythoncapi_compat.h0000644000175100001660000007175714766036355023030 0ustar00runnerdocker// Header file providing new C API functions to old Python versions. // // File distributed under the Zero Clause BSD (0BSD) license. // Copyright Contributors to the pythoncapi_compat project. // // Homepage: // https://github.com/python/pythoncapi_compat // // Latest version: // https://raw.githubusercontent.com/python/pythoncapi_compat/master/pythoncapi_compat.h // // The vendored version comes from commit: // https://raw.githubusercontent.com/python/pythoncapi-compat/2d18aecd7b2f549d38a13e27b682ea4966f37bd8/pythoncapi_compat.h // // SPDX-License-Identifier: 0BSD #ifndef PYTHONCAPI_COMPAT #define PYTHONCAPI_COMPAT #ifdef __cplusplus extern "C" { #endif #include // Python 3.11.0b4 added PyFrame_Back() to Python.h #if PY_VERSION_HEX < 0x030b00B4 && !defined(PYPY_VERSION) # include "frameobject.h" // PyFrameObject, PyFrame_GetBack() #endif #ifndef _Py_CAST # define _Py_CAST(type, expr) ((type)(expr)) #endif // Static inline functions should use _Py_NULL rather than using directly NULL // to prevent C++ compiler warnings. On C23 and newer and on C++11 and newer, // _Py_NULL is defined as nullptr. #if (defined (__STDC_VERSION__) && __STDC_VERSION__ > 201710L) \ || (defined(__cplusplus) && __cplusplus >= 201103) # define _Py_NULL nullptr #else # define _Py_NULL NULL #endif // Cast argument to PyObject* type. #ifndef _PyObject_CAST # define _PyObject_CAST(op) _Py_CAST(PyObject*, op) #endif // bpo-42262 added Py_NewRef() to Python 3.10.0a3 #if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef) static inline PyObject* _Py_NewRef(PyObject *obj) { Py_INCREF(obj); return obj; } #define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj)) #endif // bpo-42262 added Py_XNewRef() to Python 3.10.0a3 #if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_XNewRef) static inline PyObject* _Py_XNewRef(PyObject *obj) { Py_XINCREF(obj); return obj; } #define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj)) #endif // bpo-43753 added Py_Is(), Py_IsNone(), Py_IsTrue() and Py_IsFalse() // to Python 3.10.0b1. #if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_Is) # define Py_Is(x, y) ((x) == (y)) #endif #if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_IsNone) # define Py_IsNone(x) Py_Is(x, Py_None) #endif #if (PY_VERSION_HEX < 0x030A00B1 || defined(PYPY_VERSION)) && !defined(Py_IsTrue) # define Py_IsTrue(x) Py_Is(x, Py_True) #endif #if (PY_VERSION_HEX < 0x030A00B1 || defined(PYPY_VERSION)) && !defined(Py_IsFalse) # define Py_IsFalse(x) Py_Is(x, Py_False) #endif #if defined(PYPY_VERSION) static inline PyCodeObject* PyFrame_GetCode(PyFrameObject *frame) { assert(frame != _Py_NULL); assert(frame->f_code != _Py_NULL); return _Py_CAST(PyCodeObject*, Py_NewRef(frame->f_code)); } #endif static inline PyCodeObject* _PyFrame_GetCodeBorrow(PyFrameObject *frame) { PyCodeObject *code = PyFrame_GetCode(frame); Py_DECREF(code); return code; } #if !defined(PYPY_VERSION) static inline PyFrameObject* _PyFrame_GetBackBorrow(PyFrameObject *frame) { PyFrameObject *back = PyFrame_GetBack(frame); Py_XDECREF(back); return back; } #endif // bpo-40421 added PyFrame_GetLocals() to Python 3.11.0a7 #if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) static inline PyObject* PyFrame_GetLocals(PyFrameObject *frame) { if (PyFrame_FastToLocalsWithError(frame) < 0) { return NULL; } return Py_NewRef(frame->f_locals); } #endif // bpo-40421 added PyFrame_GetGlobals() to Python 3.11.0a7 #if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) static inline PyObject* PyFrame_GetGlobals(PyFrameObject *frame) { return Py_NewRef(frame->f_globals); } #endif // bpo-40421 added PyFrame_GetBuiltins() to Python 3.11.0a7 #if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) static inline PyObject* PyFrame_GetBuiltins(PyFrameObject *frame) { return Py_NewRef(frame->f_builtins); } #endif // bpo-40421 added PyFrame_GetLasti() to Python 3.11.0b1 #if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION) static inline int PyFrame_GetLasti(PyFrameObject *frame) { #if PY_VERSION_HEX >= 0x030A00A7 // bpo-27129: Since Python 3.10.0a7, f_lasti is an instruction offset, // not a bytes offset anymore. Python uses 16-bit "wordcode" (2 bytes) // instructions. if (frame->f_lasti < 0) { return -1; } return frame->f_lasti * 2; #else return frame->f_lasti; #endif } #endif // gh-91248 added PyFrame_GetVar() to Python 3.12.0a2 #if PY_VERSION_HEX < 0x030C00A2 && !defined(PYPY_VERSION) static inline PyObject* PyFrame_GetVar(PyFrameObject *frame, PyObject *name) { PyObject *locals, *value; locals = PyFrame_GetLocals(frame); if (locals == NULL) { return NULL; } value = PyDict_GetItemWithError(locals, name); Py_DECREF(locals); if (value == NULL) { if (PyErr_Occurred()) { return NULL; } PyErr_Format(PyExc_NameError, "variable %R does not exist", name); return NULL; } return Py_NewRef(value); } #endif // gh-91248 added PyFrame_GetVarString() to Python 3.12.0a2 #if PY_VERSION_HEX < 0x030C00A2 && !defined(PYPY_VERSION) static inline PyObject* PyFrame_GetVarString(PyFrameObject *frame, const char *name) { PyObject *name_obj, *value; name_obj = PyUnicode_FromString(name); if (name_obj == NULL) { return NULL; } value = PyFrame_GetVar(frame, name_obj); Py_DECREF(name_obj); return value; } #endif #if defined(PYPY_VERSION) static inline PyInterpreterState * PyThreadState_GetInterpreter(PyThreadState *tstate) { assert(tstate != _Py_NULL); return tstate->interp; } #endif #if !defined(PYPY_VERSION) static inline PyFrameObject* _PyThreadState_GetFrameBorrow(PyThreadState *tstate) { PyFrameObject *frame = PyThreadState_GetFrame(tstate); Py_XDECREF(frame); return frame; } #endif #if defined(PYPY_VERSION) static inline PyInterpreterState* PyInterpreterState_Get(void) { PyThreadState *tstate; PyInterpreterState *interp; tstate = PyThreadState_GET(); if (tstate == _Py_NULL) { Py_FatalError("GIL released (tstate is NULL)"); } interp = tstate->interp; if (interp == _Py_NULL) { Py_FatalError("no current interpreter"); } return interp; } #endif // bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2 #if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) static inline void PyThreadState_EnterTracing(PyThreadState *tstate) { tstate->tracing++; #if PY_VERSION_HEX >= 0x030A00A1 tstate->cframe->use_tracing = 0; #else tstate->use_tracing = 0; #endif } #endif // bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2 #if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) static inline void PyThreadState_LeaveTracing(PyThreadState *tstate) { int use_tracing = (tstate->c_tracefunc != _Py_NULL || tstate->c_profilefunc != _Py_NULL); tstate->tracing--; #if PY_VERSION_HEX >= 0x030A00A1 tstate->cframe->use_tracing = use_tracing; #else tstate->use_tracing = use_tracing; #endif } #endif // bpo-1635741 added PyModule_AddObjectRef() to Python 3.10.0a3 #if PY_VERSION_HEX < 0x030A00A3 static inline int PyModule_AddObjectRef(PyObject *module, const char *name, PyObject *value) { int res; if (!value && !PyErr_Occurred()) { // PyModule_AddObject() raises TypeError in this case PyErr_SetString(PyExc_SystemError, "PyModule_AddObjectRef() must be called " "with an exception raised if value is NULL"); return -1; } Py_XINCREF(value); res = PyModule_AddObject(module, name, value); if (res < 0) { Py_XDECREF(value); } return res; } #endif // bpo-46906 added PyFloat_Pack2() and PyFloat_Unpack2() to Python 3.11a7. // Python 3.11a2 moved _PyFloat_Pack2() and _PyFloat_Unpack2() to the internal // C API: Python 3.11a2-3.11a6 versions are not supported. #if PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION) static inline int PyFloat_Pack2(double x, char *p, int le) { return _PyFloat_Pack2(x, (unsigned char*)p, le); } static inline double PyFloat_Unpack2(const char *p, int le) { return _PyFloat_Unpack2((const unsigned char *)p, le); } #endif // bpo-46906 added PyFloat_Pack4(), PyFloat_Pack8(), PyFloat_Unpack4() and // PyFloat_Unpack8() to Python 3.11a7. // Python 3.11a2 moved _PyFloat_Pack4(), _PyFloat_Pack8(), _PyFloat_Unpack4() // and _PyFloat_Unpack8() to the internal C API: Python 3.11a2-3.11a6 versions // are not supported. #if PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION) static inline int PyFloat_Pack4(double x, char *p, int le) { return _PyFloat_Pack4(x, (unsigned char*)p, le); } static inline int PyFloat_Pack8(double x, char *p, int le) { return _PyFloat_Pack8(x, (unsigned char*)p, le); } static inline double PyFloat_Unpack4(const char *p, int le) { return _PyFloat_Unpack4((const unsigned char *)p, le); } static inline double PyFloat_Unpack8(const char *p, int le) { return _PyFloat_Unpack8((const unsigned char *)p, le); } #endif // gh-92154 added PyCode_GetCode() to Python 3.11.0b1 #if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION) static inline PyObject* PyCode_GetCode(PyCodeObject *code) { return Py_NewRef(code->co_code); } #endif // gh-95008 added PyCode_GetVarnames() to Python 3.11.0rc1 #if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) static inline PyObject* PyCode_GetVarnames(PyCodeObject *code) { return Py_NewRef(code->co_varnames); } #endif // gh-95008 added PyCode_GetFreevars() to Python 3.11.0rc1 #if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) static inline PyObject* PyCode_GetFreevars(PyCodeObject *code) { return Py_NewRef(code->co_freevars); } #endif // gh-95008 added PyCode_GetCellvars() to Python 3.11.0rc1 #if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) static inline PyObject* PyCode_GetCellvars(PyCodeObject *code) { return Py_NewRef(code->co_cellvars); } #endif // gh-105922 added PyImport_AddModuleRef() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A0 static inline PyObject* PyImport_AddModuleRef(const char *name) { return Py_XNewRef(PyImport_AddModule(name)); } #endif // gh-105927 added PyWeakref_GetRef() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D0000 static inline int PyWeakref_GetRef(PyObject *ref, PyObject **pobj) { PyObject *obj; if (ref != NULL && !PyWeakref_Check(ref)) { *pobj = NULL; PyErr_SetString(PyExc_TypeError, "expected a weakref"); return -1; } obj = PyWeakref_GetObject(ref); if (obj == NULL) { // SystemError if ref is NULL *pobj = NULL; return -1; } if (obj == Py_None) { *pobj = NULL; return 0; } *pobj = Py_NewRef(obj); return (*pobj != NULL); } #endif // gh-106521 added PyObject_GetOptionalAttr() and // PyObject_GetOptionalAttrString() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyObject_GetOptionalAttr(PyObject *obj, PyObject *attr_name, PyObject **result) { return _PyObject_LookupAttr(obj, attr_name, result); } static inline int PyObject_GetOptionalAttrString(PyObject *obj, const char *attr_name, PyObject **result) { PyObject *name_obj; int rc; name_obj = PyUnicode_FromString(attr_name); if (name_obj == NULL) { *result = NULL; return -1; } rc = PyObject_GetOptionalAttr(obj, name_obj, result); Py_DECREF(name_obj); return rc; } #endif // gh-106307 added PyObject_GetOptionalAttr() and // PyMapping_GetOptionalItemString() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyMapping_GetOptionalItem(PyObject *obj, PyObject *key, PyObject **result) { *result = PyObject_GetItem(obj, key); if (*result) { return 1; } if (!PyErr_ExceptionMatches(PyExc_KeyError)) { return -1; } PyErr_Clear(); return 0; } static inline int PyMapping_GetOptionalItemString(PyObject *obj, const char *key, PyObject **result) { PyObject *key_obj; int rc; key_obj = PyUnicode_FromString(key); if (key_obj == NULL) { *result = NULL; return -1; } rc = PyMapping_GetOptionalItem(obj, key_obj, result); Py_DECREF(key_obj); return rc; } #endif // gh-108511 added PyMapping_HasKeyWithError() and // PyMapping_HasKeyStringWithError() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyMapping_HasKeyWithError(PyObject *obj, PyObject *key) { PyObject *res; int rc = PyMapping_GetOptionalItem(obj, key, &res); Py_XDECREF(res); return rc; } static inline int PyMapping_HasKeyStringWithError(PyObject *obj, const char *key) { PyObject *res; int rc = PyMapping_GetOptionalItemString(obj, key, &res); Py_XDECREF(res); return rc; } #endif // gh-108511 added PyObject_HasAttrWithError() and // PyObject_HasAttrStringWithError() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyObject_HasAttrWithError(PyObject *obj, PyObject *attr) { PyObject *res; int rc = PyObject_GetOptionalAttr(obj, attr, &res); Py_XDECREF(res); return rc; } static inline int PyObject_HasAttrStringWithError(PyObject *obj, const char *attr) { PyObject *res; int rc = PyObject_GetOptionalAttrString(obj, attr, &res); Py_XDECREF(res); return rc; } #endif // gh-106004 added PyDict_GetItemRef() and PyDict_GetItemStringRef() // to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyDict_GetItemRef(PyObject *mp, PyObject *key, PyObject **result) { PyObject *item = PyDict_GetItemWithError(mp, key); if (item != NULL) { *result = Py_NewRef(item); return 1; // found } if (!PyErr_Occurred()) { *result = NULL; return 0; // not found } *result = NULL; return -1; } static inline int PyDict_GetItemStringRef(PyObject *mp, const char *key, PyObject **result) { int res; PyObject *key_obj = PyUnicode_FromString(key); if (key_obj == NULL) { *result = NULL; return -1; } res = PyDict_GetItemRef(mp, key_obj, result); Py_DECREF(key_obj); return res; } #endif // gh-106307 added PyModule_Add() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyModule_Add(PyObject *mod, const char *name, PyObject *value) { int res = PyModule_AddObjectRef(mod, name, value); Py_XDECREF(value); return res; } #endif // gh-108014 added Py_IsFinalizing() to Python 3.13.0a1 // bpo-1856 added _Py_Finalizing to Python 3.2.1b1. // _Py_IsFinalizing() was added to PyPy 7.3.0. #if (PY_VERSION_HEX < 0x030D00A1) \ && (!defined(PYPY_VERSION_NUM) || PYPY_VERSION_NUM >= 0x7030000) static inline int Py_IsFinalizing(void) { return _Py_IsFinalizing(); } #endif // gh-108323 added PyDict_ContainsString() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyDict_ContainsString(PyObject *op, const char *key) { PyObject *key_obj = PyUnicode_FromString(key); if (key_obj == NULL) { return -1; } int res = PyDict_Contains(op, key_obj); Py_DECREF(key_obj); return res; } #endif // gh-108445 added PyLong_AsInt() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyLong_AsInt(PyObject *obj) { #ifdef PYPY_VERSION long value = PyLong_AsLong(obj); if (value == -1 && PyErr_Occurred()) { return -1; } if (value < (long)INT_MIN || (long)INT_MAX < value) { PyErr_SetString(PyExc_OverflowError, "Python int too large to convert to C int"); return -1; } return (int)value; #else return _PyLong_AsInt(obj); #endif } #endif // gh-107073 added PyObject_VisitManagedDict() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyObject_VisitManagedDict(PyObject *obj, visitproc visit, void *arg) { PyObject **dict = _PyObject_GetDictPtr(obj); if (*dict == NULL) { return -1; } Py_VISIT(*dict); return 0; } static inline void PyObject_ClearManagedDict(PyObject *obj) { PyObject **dict = _PyObject_GetDictPtr(obj); if (*dict == NULL) { return; } Py_CLEAR(*dict); } #endif // gh-108867 added PyThreadState_GetUnchecked() to Python 3.13.0a1. #if PY_VERSION_HEX < 0x030D00A1 static inline PyThreadState* PyThreadState_GetUnchecked(void) { return _PyThreadState_UncheckedGet(); } #endif // gh-110289 added PyUnicode_EqualToUTF8() and PyUnicode_EqualToUTF8AndSize() // to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyUnicode_EqualToUTF8AndSize(PyObject *unicode, const char *str, Py_ssize_t str_len) { Py_ssize_t len; const void *utf8; PyObject *exc_type, *exc_value, *exc_tb; int res; // API cannot report errors so save/restore the exception PyErr_Fetch(&exc_type, &exc_value, &exc_tb); if (PyUnicode_IS_ASCII(unicode)) { utf8 = PyUnicode_DATA(unicode); len = PyUnicode_GET_LENGTH(unicode); } else { utf8 = PyUnicode_AsUTF8AndSize(unicode, &len); if (utf8 == NULL) { // Memory allocation failure. The API cannot report error, // so ignore the exception and return 0. res = 0; goto done; } } if (len != str_len) { res = 0; goto done; } res = (memcmp(utf8, str, (size_t)len) == 0); done: PyErr_Restore(exc_type, exc_value, exc_tb); return res; } static inline int PyUnicode_EqualToUTF8(PyObject *unicode, const char *str) { return PyUnicode_EqualToUTF8AndSize(unicode, str, (Py_ssize_t)strlen(str)); } #endif // gh-111138 added PyList_Extend() and PyList_Clear() to Python 3.13.0a2 #if PY_VERSION_HEX < 0x030D00A2 static inline int PyList_Extend(PyObject *list, PyObject *iterable) { return PyList_SetSlice(list, PY_SSIZE_T_MAX, PY_SSIZE_T_MAX, iterable); } static inline int PyList_Clear(PyObject *list) { return PyList_SetSlice(list, 0, PY_SSIZE_T_MAX, NULL); } #endif // gh-111262 added PyDict_Pop() and PyDict_PopString() to Python 3.13.0a2 #if PY_VERSION_HEX < 0x030D00A2 static inline int PyDict_Pop(PyObject *dict, PyObject *key, PyObject **result) { PyObject *value; if (!PyDict_Check(dict)) { PyErr_BadInternalCall(); if (result) { *result = NULL; } return -1; } // Python 3.13.0a1 removed _PyDict_Pop(). #if defined(PYPY_VERSION) || PY_VERSION_HEX >= 0x030D0000 value = PyObject_CallMethod(dict, "pop", "O", key); #else value = _PyDict_Pop(dict, key, NULL); #endif if (value == NULL) { if (result) { *result = NULL; } if (PyErr_Occurred() && !PyErr_ExceptionMatches(PyExc_KeyError)) { return -1; } PyErr_Clear(); return 0; } if (result) { *result = value; } else { Py_DECREF(value); } return 1; } static inline int PyDict_PopString(PyObject *dict, const char *key, PyObject **result) { PyObject *key_obj = PyUnicode_FromString(key); if (key_obj == NULL) { if (result != NULL) { *result = NULL; } return -1; } int res = PyDict_Pop(dict, key_obj, result); Py_DECREF(key_obj); return res; } #endif // gh-111545 added Py_HashPointer() to Python 3.13.0a3 #if PY_VERSION_HEX < 0x030D00A3 static inline Py_hash_t Py_HashPointer(const void *ptr) { #if !defined(PYPY_VERSION) return _Py_HashPointer(ptr); #else return _Py_HashPointer(_Py_CAST(void*, ptr)); #endif } #endif // Python 3.13a4 added a PyTime API. #if PY_VERSION_HEX < 0x030D00A4 typedef _PyTime_t PyTime_t; #define PyTime_MIN _PyTime_MIN #define PyTime_MAX _PyTime_MAX static inline double PyTime_AsSecondsDouble(PyTime_t t) { return _PyTime_AsSecondsDouble(t); } static inline int PyTime_Monotonic(PyTime_t *result) { return _PyTime_GetMonotonicClockWithInfo(result, NULL); } static inline int PyTime_Time(PyTime_t *result) { return _PyTime_GetSystemClockWithInfo(result, NULL); } static inline int PyTime_PerfCounter(PyTime_t *result) { #if !defined(PYPY_VERSION) return _PyTime_GetPerfCounterWithInfo(result, NULL); #else // Call time.perf_counter_ns() and convert Python int object to PyTime_t. // Cache time.perf_counter_ns() function for best performance. static PyObject *func = NULL; if (func == NULL) { PyObject *mod = PyImport_ImportModule("time"); if (mod == NULL) { return -1; } func = PyObject_GetAttrString(mod, "perf_counter_ns"); Py_DECREF(mod); if (func == NULL) { return -1; } } PyObject *res = PyObject_CallNoArgs(func); if (res == NULL) { return -1; } long long value = PyLong_AsLongLong(res); Py_DECREF(res); if (value == -1 && PyErr_Occurred()) { return -1; } Py_BUILD_ASSERT(sizeof(value) >= sizeof(PyTime_t)); *result = (PyTime_t)value; return 0; #endif } #endif // gh-111389 added hash constants to Python 3.13.0a5. These constants were // added first as private macros to Python 3.4.0b1 and PyPy 7.3.9. #if (!defined(PyHASH_BITS) \ && (!defined(PYPY_VERSION) \ || (defined(PYPY_VERSION) && PYPY_VERSION_NUM >= 0x07090000))) # define PyHASH_BITS _PyHASH_BITS # define PyHASH_MODULUS _PyHASH_MODULUS # define PyHASH_INF _PyHASH_INF # define PyHASH_IMAG _PyHASH_IMAG #endif // gh-111545 added Py_GetConstant() and Py_GetConstantBorrowed() // to Python 3.13.0a6 #if PY_VERSION_HEX < 0x030D00A6 && !defined(Py_CONSTANT_NONE) #define Py_CONSTANT_NONE 0 #define Py_CONSTANT_FALSE 1 #define Py_CONSTANT_TRUE 2 #define Py_CONSTANT_ELLIPSIS 3 #define Py_CONSTANT_NOT_IMPLEMENTED 4 #define Py_CONSTANT_ZERO 5 #define Py_CONSTANT_ONE 6 #define Py_CONSTANT_EMPTY_STR 7 #define Py_CONSTANT_EMPTY_BYTES 8 #define Py_CONSTANT_EMPTY_TUPLE 9 static inline PyObject* Py_GetConstant(unsigned int constant_id) { static PyObject* constants[Py_CONSTANT_EMPTY_TUPLE + 1] = {NULL}; if (constants[Py_CONSTANT_NONE] == NULL) { constants[Py_CONSTANT_NONE] = Py_None; constants[Py_CONSTANT_FALSE] = Py_False; constants[Py_CONSTANT_TRUE] = Py_True; constants[Py_CONSTANT_ELLIPSIS] = Py_Ellipsis; constants[Py_CONSTANT_NOT_IMPLEMENTED] = Py_NotImplemented; constants[Py_CONSTANT_ZERO] = PyLong_FromLong(0); if (constants[Py_CONSTANT_ZERO] == NULL) { goto fatal_error; } constants[Py_CONSTANT_ONE] = PyLong_FromLong(1); if (constants[Py_CONSTANT_ONE] == NULL) { goto fatal_error; } constants[Py_CONSTANT_EMPTY_STR] = PyUnicode_FromStringAndSize("", 0); if (constants[Py_CONSTANT_EMPTY_STR] == NULL) { goto fatal_error; } constants[Py_CONSTANT_EMPTY_BYTES] = PyBytes_FromStringAndSize("", 0); if (constants[Py_CONSTANT_EMPTY_BYTES] == NULL) { goto fatal_error; } constants[Py_CONSTANT_EMPTY_TUPLE] = PyTuple_New(0); if (constants[Py_CONSTANT_EMPTY_TUPLE] == NULL) { goto fatal_error; } // goto dance to avoid compiler warnings about Py_FatalError() goto init_done; fatal_error: // This case should never happen Py_FatalError("Py_GetConstant() failed to get constants"); } init_done: if (constant_id <= Py_CONSTANT_EMPTY_TUPLE) { return Py_NewRef(constants[constant_id]); } else { PyErr_BadInternalCall(); return NULL; } } static inline PyObject* Py_GetConstantBorrowed(unsigned int constant_id) { PyObject *obj = Py_GetConstant(constant_id); Py_XDECREF(obj); return obj; } #endif // gh-114329 added PyList_GetItemRef() to Python 3.13.0a4 #if PY_VERSION_HEX < 0x030D00A4 static inline PyObject * PyList_GetItemRef(PyObject *op, Py_ssize_t index) { PyObject *item = PyList_GetItem(op, index); Py_XINCREF(item); return item; } #endif // gh-114329 added PyList_GetItemRef() to Python 3.13.0a4 #if PY_VERSION_HEX < 0x030D00A4 static inline int PyDict_SetDefaultRef(PyObject *d, PyObject *key, PyObject *default_value, PyObject **result) { PyObject *value; if (PyDict_GetItemRef(d, key, &value) < 0) { // get error if (result) { *result = NULL; } return -1; } if (value != NULL) { // present if (result) { *result = value; } else { Py_DECREF(value); } return 1; } // missing: set the item if (PyDict_SetItem(d, key, default_value) < 0) { // set error if (result) { *result = NULL; } return -1; } if (result) { *result = Py_NewRef(default_value); } return 0; } #endif #if PY_VERSION_HEX < 0x030D00B3 # define Py_BEGIN_CRITICAL_SECTION(op) { # define Py_END_CRITICAL_SECTION() } # define Py_BEGIN_CRITICAL_SECTION2(a, b) { # define Py_END_CRITICAL_SECTION2() } #endif #if PY_VERSION_HEX < 0x030E0000 && !defined(PYPY_VERSION) typedef struct PyUnicodeWriter PyUnicodeWriter; static inline void PyUnicodeWriter_Discard(PyUnicodeWriter *writer) { _PyUnicodeWriter_Dealloc((_PyUnicodeWriter*)writer); PyMem_Free(writer); } static inline PyUnicodeWriter* PyUnicodeWriter_Create(Py_ssize_t length) { if (length < 0) { PyErr_SetString(PyExc_ValueError, "length must be positive"); return NULL; } const size_t size = sizeof(_PyUnicodeWriter); PyUnicodeWriter *pub_writer = (PyUnicodeWriter *)PyMem_Malloc(size); if (pub_writer == _Py_NULL) { PyErr_NoMemory(); return _Py_NULL; } _PyUnicodeWriter *writer = (_PyUnicodeWriter *)pub_writer; _PyUnicodeWriter_Init(writer); if (_PyUnicodeWriter_Prepare(writer, length, 127) < 0) { PyUnicodeWriter_Discard(pub_writer); return NULL; } writer->overallocate = 1; return pub_writer; } static inline PyObject* PyUnicodeWriter_Finish(PyUnicodeWriter *writer) { PyObject *str = _PyUnicodeWriter_Finish((_PyUnicodeWriter*)writer); assert(((_PyUnicodeWriter*)writer)->buffer == NULL); PyMem_Free(writer); return str; } static inline int PyUnicodeWriter_WriteChar(PyUnicodeWriter *writer, Py_UCS4 ch) { if (ch > 0x10ffff) { PyErr_SetString(PyExc_ValueError, "character must be in range(0x110000)"); return -1; } return _PyUnicodeWriter_WriteChar((_PyUnicodeWriter*)writer, ch); } static inline int PyUnicodeWriter_WriteStr(PyUnicodeWriter *writer, PyObject *obj) { PyObject *str = PyObject_Str(obj); if (str == NULL) { return -1; } int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str); Py_DECREF(str); return res; } static inline int PyUnicodeWriter_WriteRepr(PyUnicodeWriter *writer, PyObject *obj) { PyObject *str = PyObject_Repr(obj); if (str == NULL) { return -1; } int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str); Py_DECREF(str); return res; } static inline int PyUnicodeWriter_WriteUTF8(PyUnicodeWriter *writer, const char *str, Py_ssize_t size) { if (size < 0) { size = (Py_ssize_t)strlen(str); } PyObject *str_obj = PyUnicode_FromStringAndSize(str, size); if (str_obj == _Py_NULL) { return -1; } int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj); Py_DECREF(str_obj); return res; } static inline int PyUnicodeWriter_WriteWideChar(PyUnicodeWriter *writer, const wchar_t *str, Py_ssize_t size) { if (size < 0) { size = (Py_ssize_t)wcslen(str); } PyObject *str_obj = PyUnicode_FromWideChar(str, size); if (str_obj == _Py_NULL) { return -1; } int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj); Py_DECREF(str_obj); return res; } static inline int PyUnicodeWriter_WriteSubstring(PyUnicodeWriter *writer, PyObject *str, Py_ssize_t start, Py_ssize_t end) { if (!PyUnicode_Check(str)) { PyErr_Format(PyExc_TypeError, "expect str, not %T", str); return -1; } if (start < 0 || start > end) { PyErr_Format(PyExc_ValueError, "invalid start argument"); return -1; } if (end > PyUnicode_GET_LENGTH(str)) { PyErr_Format(PyExc_ValueError, "invalid end argument"); return -1; } return _PyUnicodeWriter_WriteSubstring((_PyUnicodeWriter*)writer, str, start, end); } static inline int PyUnicodeWriter_Format(PyUnicodeWriter *writer, const char *format, ...) { va_list vargs; va_start(vargs, format); PyObject *str = PyUnicode_FromFormatV(format, vargs); va_end(vargs); if (str == _Py_NULL) { return -1; } int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str); Py_DECREF(str); return res; } #endif // PY_VERSION_HEX < 0x030E0000 // gh-116560 added PyLong_GetSign() to Python 3.14.0a0 #if PY_VERSION_HEX < 0x030E00A0 static inline int PyLong_GetSign(PyObject *obj, int *sign) { if (!PyLong_Check(obj)) { PyErr_Format(PyExc_TypeError, "expect int, got %s", Py_TYPE(obj)->tp_name); return -1; } *sign = _PyLong_Sign(obj); return 0; } #endif #ifdef __cplusplus } #endif #endif // PYTHONCAPI_COMPAT ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/_multilib/views.h0000644000175100001660000003044514766036355020431 0ustar00runnerdocker#ifndef _MULTIDICT_VIEWS_H #define _MULTIDICT_VIEWS_H #ifdef __cplusplus extern "C" { #endif static PyTypeObject multidict_itemsview_type; static PyTypeObject multidict_valuesview_type; static PyTypeObject multidict_keysview_type; static PyObject *viewbaseset_richcmp_func; static PyObject *viewbaseset_and_func; static PyObject *viewbaseset_or_func; static PyObject *viewbaseset_sub_func; static PyObject *viewbaseset_xor_func; static PyObject *abc_itemsview_register_func; static PyObject *abc_keysview_register_func; static PyObject *abc_valuesview_register_func; static PyObject *itemsview_isdisjoint_func; static PyObject *itemsview_repr_func; static PyObject *keysview_repr_func; static PyObject *keysview_isdisjoint_func; static PyObject *valuesview_repr_func; typedef struct { PyObject_HEAD PyObject *md; } _Multidict_ViewObject; /********** Base **********/ static inline void _init_view(_Multidict_ViewObject *self, PyObject *md) { Py_INCREF(md); self->md = md; } static inline void multidict_view_dealloc(_Multidict_ViewObject *self) { PyObject_GC_UnTrack(self); Py_XDECREF(self->md); PyObject_GC_Del(self); } static inline int multidict_view_traverse(_Multidict_ViewObject *self, visitproc visit, void *arg) { Py_VISIT(self->md); return 0; } static inline int multidict_view_clear(_Multidict_ViewObject *self) { Py_CLEAR(self->md); return 0; } static inline Py_ssize_t multidict_view_len(_Multidict_ViewObject *self) { return pair_list_len(&((MultiDictObject*)self->md)->pairs); } static inline PyObject * multidict_view_richcompare(PyObject *self, PyObject *other, int op) { PyObject *ret; PyObject *op_obj = PyLong_FromLong(op); if (op_obj == NULL) { return NULL; } ret = PyObject_CallFunctionObjArgs( viewbaseset_richcmp_func, self, other, op_obj, NULL); Py_DECREF(op_obj); return ret; } static inline PyObject * multidict_view_and(PyObject *self, PyObject *other) { return PyObject_CallFunctionObjArgs( viewbaseset_and_func, self, other, NULL); } static inline PyObject * multidict_view_or(PyObject *self, PyObject *other) { return PyObject_CallFunctionObjArgs( viewbaseset_or_func, self, other, NULL); } static inline PyObject * multidict_view_sub(PyObject *self, PyObject *other) { return PyObject_CallFunctionObjArgs( viewbaseset_sub_func, self, other, NULL); } static inline PyObject * multidict_view_xor(PyObject *self, PyObject *other) { return PyObject_CallFunctionObjArgs( viewbaseset_xor_func, self, other, NULL); } static PyNumberMethods multidict_view_as_number = { .nb_subtract = (binaryfunc)multidict_view_sub, .nb_and = (binaryfunc)multidict_view_and, .nb_xor = (binaryfunc)multidict_view_xor, .nb_or = (binaryfunc)multidict_view_or, }; /********** Items **********/ static inline PyObject * multidict_itemsview_new(PyObject *md) { _Multidict_ViewObject *mv = PyObject_GC_New( _Multidict_ViewObject, &multidict_itemsview_type); if (mv == NULL) { return NULL; } _init_view(mv, md); PyObject_GC_Track(mv); return (PyObject *)mv; } static inline PyObject * multidict_itemsview_iter(_Multidict_ViewObject *self) { return multidict_items_iter_new((MultiDictObject*)self->md); } static inline PyObject * multidict_itemsview_repr(_Multidict_ViewObject *self) { return PyObject_CallFunctionObjArgs( itemsview_repr_func, self, NULL); } static inline PyObject * multidict_itemsview_isdisjoint(_Multidict_ViewObject *self, PyObject *other) { return PyObject_CallFunctionObjArgs( itemsview_isdisjoint_func, self, other, NULL); } PyDoc_STRVAR(itemsview_isdisjoint_doc, "Return True if two sets have a null intersection."); static PyMethodDef multidict_itemsview_methods[] = { { "isdisjoint", (PyCFunction)multidict_itemsview_isdisjoint, METH_O, itemsview_isdisjoint_doc }, { NULL, NULL } /* sentinel */ }; static inline int multidict_itemsview_contains(_Multidict_ViewObject *self, PyObject *obj) { PyObject *akey = NULL, *aval = NULL, *bkey = NULL, *bval = NULL, *iter = NULL, *item = NULL; int ret1, ret2; if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 2) { return 0; } bkey = PyTuple_GET_ITEM(obj, 0); bval = PyTuple_GET_ITEM(obj, 1); iter = multidict_itemsview_iter(self); if (iter == NULL) { return 0; } while ((item = PyIter_Next(iter)) != NULL) { akey = PyTuple_GET_ITEM(item, 0); aval = PyTuple_GET_ITEM(item, 1); ret1 = PyObject_RichCompareBool(akey, bkey, Py_EQ); if (ret1 < 0) { Py_DECREF(iter); Py_DECREF(item); return -1; } ret2 = PyObject_RichCompareBool(aval, bval, Py_EQ); if (ret2 < 0) { Py_DECREF(iter); Py_DECREF(item); return -1; } if (ret1 > 0 && ret2 > 0) { Py_DECREF(iter); Py_DECREF(item); return 1; } Py_DECREF(item); } Py_DECREF(iter); if (PyErr_Occurred()) { return -1; } return 0; } static PySequenceMethods multidict_itemsview_as_sequence = { .sq_length = (lenfunc)multidict_view_len, .sq_contains = (objobjproc)multidict_itemsview_contains, }; static PyTypeObject multidict_itemsview_type = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "multidict._multidict._ItemsView", /* tp_name */ sizeof(_Multidict_ViewObject), /* tp_basicsize */ .tp_dealloc = (destructor)multidict_view_dealloc, .tp_repr = (reprfunc)multidict_itemsview_repr, .tp_as_number = &multidict_view_as_number, .tp_as_sequence = &multidict_itemsview_as_sequence, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, .tp_traverse = (traverseproc)multidict_view_traverse, .tp_clear = (inquiry)multidict_view_clear, .tp_richcompare = multidict_view_richcompare, .tp_iter = (getiterfunc)multidict_itemsview_iter, .tp_methods = multidict_itemsview_methods, }; /********** Keys **********/ static inline PyObject * multidict_keysview_new(PyObject *md) { _Multidict_ViewObject *mv = PyObject_GC_New( _Multidict_ViewObject, &multidict_keysview_type); if (mv == NULL) { return NULL; } _init_view(mv, md); PyObject_GC_Track(mv); return (PyObject *)mv; } static inline PyObject * multidict_keysview_iter(_Multidict_ViewObject *self) { return multidict_keys_iter_new(((MultiDictObject*)self->md)); } static inline PyObject * multidict_keysview_repr(_Multidict_ViewObject *self) { return PyObject_CallFunctionObjArgs( keysview_repr_func, self, NULL); } static inline PyObject * multidict_keysview_isdisjoint(_Multidict_ViewObject *self, PyObject *other) { return PyObject_CallFunctionObjArgs( keysview_isdisjoint_func, self, other, NULL); } PyDoc_STRVAR(keysview_isdisjoint_doc, "Return True if two sets have a null intersection."); static PyMethodDef multidict_keysview_methods[] = { { "isdisjoint", (PyCFunction)multidict_keysview_isdisjoint, METH_O, keysview_isdisjoint_doc }, { NULL, NULL } /* sentinel */ }; static inline int multidict_keysview_contains(_Multidict_ViewObject *self, PyObject *key) { return pair_list_contains(&((MultiDictObject*)self->md)->pairs, key); } static PySequenceMethods multidict_keysview_as_sequence = { .sq_length = (lenfunc)multidict_view_len, .sq_contains = (objobjproc)multidict_keysview_contains, }; static PyTypeObject multidict_keysview_type = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "multidict._multidict._KeysView", /* tp_name */ sizeof(_Multidict_ViewObject), /* tp_basicsize */ .tp_dealloc = (destructor)multidict_view_dealloc, .tp_repr = (reprfunc)multidict_keysview_repr, .tp_as_number = &multidict_view_as_number, .tp_as_sequence = &multidict_keysview_as_sequence, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, .tp_traverse = (traverseproc)multidict_view_traverse, .tp_clear = (inquiry)multidict_view_clear, .tp_richcompare = multidict_view_richcompare, .tp_iter = (getiterfunc)multidict_keysview_iter, .tp_methods = multidict_keysview_methods, }; /********** Values **********/ static inline PyObject * multidict_valuesview_new(PyObject *md) { _Multidict_ViewObject *mv = PyObject_GC_New( _Multidict_ViewObject, &multidict_valuesview_type); if (mv == NULL) { return NULL; } _init_view(mv, md); PyObject_GC_Track(mv); return (PyObject *)mv; } static inline PyObject * multidict_valuesview_iter(_Multidict_ViewObject *self) { return multidict_values_iter_new(((MultiDictObject*)self->md)); } static inline PyObject * multidict_valuesview_repr(_Multidict_ViewObject *self) { return PyObject_CallFunctionObjArgs( valuesview_repr_func, self, NULL); } static PySequenceMethods multidict_valuesview_as_sequence = { .sq_length = (lenfunc)multidict_view_len, }; static PyTypeObject multidict_valuesview_type = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "multidict._multidict._ValuesView", /* tp_name */ sizeof(_Multidict_ViewObject), /* tp_basicsize */ .tp_dealloc = (destructor)multidict_view_dealloc, .tp_repr = (reprfunc)multidict_valuesview_repr, .tp_as_sequence = &multidict_valuesview_as_sequence, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, .tp_traverse = (traverseproc)multidict_view_traverse, .tp_clear = (inquiry)multidict_view_clear, .tp_iter = (getiterfunc)multidict_valuesview_iter, }; static inline int multidict_views_init(void) { PyObject *reg_func_call_result = NULL; PyObject *module = PyImport_ImportModule("multidict._multidict_base"); if (module == NULL) { goto fail; } #define GET_MOD_ATTR(VAR, NAME) \ VAR = PyObject_GetAttrString(module, NAME); \ if (VAR == NULL) { \ goto fail; \ } GET_MOD_ATTR(viewbaseset_richcmp_func, "_viewbaseset_richcmp"); GET_MOD_ATTR(viewbaseset_and_func, "_viewbaseset_and"); GET_MOD_ATTR(viewbaseset_or_func, "_viewbaseset_or"); GET_MOD_ATTR(viewbaseset_sub_func, "_viewbaseset_sub"); GET_MOD_ATTR(viewbaseset_xor_func, "_viewbaseset_xor"); GET_MOD_ATTR(abc_itemsview_register_func, "_abc_itemsview_register"); GET_MOD_ATTR(abc_keysview_register_func, "_abc_keysview_register"); GET_MOD_ATTR(abc_valuesview_register_func, "_abc_valuesview_register"); GET_MOD_ATTR(itemsview_isdisjoint_func, "_itemsview_isdisjoint"); GET_MOD_ATTR(itemsview_repr_func, "_itemsview_repr"); GET_MOD_ATTR(keysview_repr_func, "_keysview_repr"); GET_MOD_ATTR(keysview_isdisjoint_func, "_keysview_isdisjoint"); GET_MOD_ATTR(valuesview_repr_func, "_valuesview_repr"); if (PyType_Ready(&multidict_itemsview_type) < 0 || PyType_Ready(&multidict_valuesview_type) < 0 || PyType_Ready(&multidict_keysview_type) < 0) { goto fail; } // abc.ItemsView.register(_ItemsView) reg_func_call_result = PyObject_CallFunctionObjArgs( abc_itemsview_register_func, (PyObject*)&multidict_itemsview_type, NULL); if (reg_func_call_result == NULL) { goto fail; } Py_DECREF(reg_func_call_result); // abc.KeysView.register(_KeysView) reg_func_call_result = PyObject_CallFunctionObjArgs( abc_keysview_register_func, (PyObject*)&multidict_keysview_type, NULL); if (reg_func_call_result == NULL) { goto fail; } Py_DECREF(reg_func_call_result); // abc.ValuesView.register(_KeysView) reg_func_call_result = PyObject_CallFunctionObjArgs( abc_valuesview_register_func, (PyObject*)&multidict_valuesview_type, NULL); if (reg_func_call_result == NULL) { goto fail; } Py_DECREF(reg_func_call_result); Py_DECREF(module); return 0; fail: Py_CLEAR(module); return -1; #undef GET_MOD_ATTR } #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/multidict/py.typed0000644000175100001660000000001714766036355016632 0ustar00runnerdockerPEP-561 marker.././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1742224624.172563 multidict-6.2.0/multidict.egg-info/0000755000175100001660000000000014766036360016623 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224624.0 multidict-6.2.0/multidict.egg-info/PKG-INFO0000644000175100001660000001147714766036360017732 0ustar00runnerdockerMetadata-Version: 2.2 Name: multidict Version: 6.2.0 Summary: multidict implementation Home-page: https://github.com/aio-libs/multidict Author: Andrew Svetlov Author-email: andrew.svetlov@gmail.com License: Apache 2 Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org Project-URL: CI: GitHub, https://github.com/aio-libs/multidict/actions Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/multidict Project-URL: Docs: Changelog, https://multidict.aio-libs.org/en/latest/changes/ Project-URL: Docs: RTD, https://multidict.aio-libs.org Project-URL: GitHub: issues, https://github.com/aio-libs/multidict/issues Project-URL: GitHub: repo, https://github.com/aio-libs/multidict Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 Classifier: Programming Language :: Python :: 3.13 Requires-Python: >=3.9 Description-Content-Type: text/x-rst License-File: LICENSE Requires-Dist: typing-extensions>=4.1.0; python_version < "3.11" ========= multidict ========= .. image:: https://github.com/aio-libs/multidict/actions/workflows/ci-cd.yml/badge.svg :target: https://github.com/aio-libs/multidict/actions :alt: GitHub status for master branch .. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg :target: https://codecov.io/gh/aio-libs/multidict :alt: Coverage metrics .. image:: https://img.shields.io/pypi/v/multidict.svg :target: https://pypi.org/project/multidict :alt: PyPI .. image:: https://readthedocs.org/projects/multidict/badge/?version=latest :target: https://multidict.aio-libs.org :alt: Read The Docs build status badge .. image:: https://img.shields.io/pypi/pyversions/multidict.svg :target: https://pypi.org/project/multidict :alt: Python versions .. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat :target: https://matrix.to/#/%23aio-libs:matrix.org :alt: Matrix Room — #aio-libs:matrix.org .. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat :target: https://matrix.to/#/%23aio-libs-space:matrix.org :alt: Matrix Space — #aio-libs-space:matrix.org Multidict is dict-like collection of *key-value pairs* where key might occur more than once in the container. Introduction ------------ *HTTP Headers* and *URL query string* require specific data structure: *multidict*. It behaves mostly like a regular ``dict`` but it may have several *values* for the same *key* and *preserves insertion ordering*. The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries). ``multidict`` has four multidict classes: ``MultiDict``, ``MultiDictProxy``, ``CIMultiDict`` and ``CIMultiDictProxy``. Immutable proxies (``MultiDictProxy`` and ``CIMultiDictProxy``) provide a dynamic view for the proxied multidict, the view reflects underlying collection changes. They implement the ``collections.abc.Mapping`` interface. Regular mutable (``MultiDict`` and ``CIMultiDict``) classes implement ``collections.abc.MutableMapping`` and allows them to change their own content. *Case insensitive* (``CIMultiDict`` and ``CIMultiDictProxy``) assume the *keys* are case insensitive, e.g.:: >>> dct = CIMultiDict(key='val') >>> 'Key' in dct True >>> dct['Key'] 'val' *Keys* should be ``str`` or ``istr`` instances. The library has optional C Extensions for speed. License ------- Apache 2 Library Installation -------------------- .. code-block:: bash $ pip install multidict The library is Python 3 only! PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install ``multidict`` on another operating system (or *Alpine Linux* inside a Docker) the tarball will be used to compile the library from source. It requires a C compiler and Python headers to be installed. To skip the compilation, please use the `MULTIDICT_NO_EXTENSIONS` environment variable, e.g.: .. code-block:: bash $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict Please note, the pure Python (uncompiled) version is about 20-50 times slower depending on the usage scenario!!! Changelog --------- See `RTD page `_. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224624.0 multidict-6.2.0/multidict.egg-info/SOURCES.txt0000644000175100001660000000434514766036360020515 0ustar00runnerdocker.coveragerc CHANGES.rst LICENSE MANIFEST.in Makefile README.rst pyproject.toml pytest.ini setup.cfg setup.py CHANGES/.TEMPLATE.rst CHANGES/.gitignore CHANGES/README.rst docs/Makefile docs/benchmark.rst docs/changes.rst docs/conf.py docs/index.rst docs/make.bat docs/multidict.rst docs/spelling_wordlist.txt multidict/__init__.py multidict/_abc.py multidict/_compat.py multidict/_multidict.c multidict/_multidict_base.py multidict/_multidict_py.py multidict/py.typed multidict.egg-info/PKG-INFO multidict.egg-info/SOURCES.txt multidict.egg-info/dependency_links.txt multidict.egg-info/requires.txt multidict.egg-info/top_level.txt multidict/_multilib/defs.h multidict/_multilib/dict.h multidict/_multilib/istr.h multidict/_multilib/iter.h multidict/_multilib/pair_list.h multidict/_multilib/pythoncapi_compat.h multidict/_multilib/views.h requirements/ci.txt requirements/dev.txt requirements/doc-spelling.txt requirements/doc.txt requirements/lint.txt requirements/pytest.txt requirements/towncrier.txt requirements/wheel.txt tests/cimultidict-c-extension.pickle.0 tests/cimultidict-c-extension.pickle.1 tests/cimultidict-c-extension.pickle.2 tests/cimultidict-c-extension.pickle.3 tests/cimultidict-c-extension.pickle.4 tests/cimultidict-c-extension.pickle.5 tests/cimultidict-pure-python.pickle.0 tests/cimultidict-pure-python.pickle.1 tests/cimultidict-pure-python.pickle.2 tests/cimultidict-pure-python.pickle.3 tests/cimultidict-pure-python.pickle.4 tests/cimultidict-pure-python.pickle.5 tests/conftest.py tests/gen_pickles.py tests/multidict-c-extension.pickle.0 tests/multidict-c-extension.pickle.1 tests/multidict-c-extension.pickle.2 tests/multidict-c-extension.pickle.3 tests/multidict-c-extension.pickle.4 tests/multidict-c-extension.pickle.5 tests/multidict-pure-python.pickle.0 tests/multidict-pure-python.pickle.1 tests/multidict-pure-python.pickle.2 tests/multidict-pure-python.pickle.3 tests/multidict-pure-python.pickle.4 tests/multidict-pure-python.pickle.5 tests/test_abc.py tests/test_circular_imports.py tests/test_copy.py tests/test_guard.py tests/test_istr.py tests/test_multidict.py tests/test_multidict_benchmarks.py tests/test_mutable_multidict.py tests/test_mypy.py tests/test_pickle.py tests/test_types.py tests/test_update.py tests/test_version.py././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224624.0 multidict-6.2.0/multidict.egg-info/dependency_links.txt0000644000175100001660000000000114766036360022671 0ustar00runnerdocker ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224624.0 multidict-6.2.0/multidict.egg-info/requires.txt0000644000175100001660000000006514766036360021224 0ustar00runnerdocker [:python_version < "3.11"] typing-extensions>=4.1.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224624.0 multidict-6.2.0/multidict.egg-info/top_level.txt0000644000175100001660000000001214766036360021346 0ustar00runnerdockermultidict ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/pyproject.toml0000644000175100001660000000064714766036355016062 0ustar00runnerdocker[build-system] requires = ["setuptools >= 40"] build-backend = "setuptools.build_meta" [tool.cibuildwheel] test-requires = "-r requirements/pytest.txt" test-command = "pytest {project}/tests" # don't build PyPy wheels, install from source instead skip = "pp*" free-threaded-support = true [tool.cibuildwheel.linux] before-all = "yum install -y libffi-devel || apk add --upgrade libffi-dev || apt-get install libffi-dev" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/pytest.ini0000644000175100001660000000326414766036355015175 0ustar00runnerdocker[pytest] addopts = # `pytest-xdist`: # --numprocesses=auto # NOTE: the plugin disabled because it's slower with so few tests # --numprocesses=0 # Show 10 slowest invocations: --durations=10 # Report all the things == -rxXs: -ra # Show values of the local vars in errors/tracebacks: --showlocals # Autocollect and invoke the doctests from all modules: # https://docs.pytest.org/en/stable/doctest.html --doctest-modules # Pre-load the `pytest-cov` plugin early: -p pytest_cov # `pytest-cov`: --cov --cov-config=.coveragerc --cov-context=test # Fail on config parsing warnings: # --strict-config # Fail on non-existing markers: # * Deprecated since v6.2.0 but may be reintroduced later covering a # broader scope: # --strict # * Exists since v4.5.0 (advised to be used instead of `--strict`): --strict-markers doctest_optionflags = ALLOW_UNICODE ELLIPSIS # Marks tests with an empty parameterset as xfail(run=False) empty_parameter_set_mark = xfail faulthandler_timeout = 30 filterwarnings = error # https://docs.pytest.org/en/stable/usage.html#creating-junitxml-format-files junit_duration_report = call # xunit1 contains more metadata than xunit2 so it's better for CI UIs: junit_family = xunit1 junit_logging = all junit_log_passing_tests = true junit_suite_name = multidict_test_suite # A mapping of markers to their descriptions allowed in strict mode: markers = minversion = 3.8.2 # Optimize pytest's lookup by restricting potentially deep dir tree scan: norecursedirs = build dist docs multidict.egg-info requirements venv virtualenv .cache .eggs .git .github .tox *.egg testpaths = tests/ xfail_strict = true ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742224624.1655629 multidict-6.2.0/requirements/0000755000175100001660000000000014766036360015656 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/requirements/ci.txt0000644000175100001660000000002314766036355017011 0ustar00runnerdocker-e . -r pytest.txt ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/requirements/dev.txt0000644000175100001660000000011014766036355017171 0ustar00runnerdocker-r ci.txt -r lint.txt -r towncrier.txt -r doc.txt pyperformance==1.11.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/requirements/doc-spelling.txt0000644000175100001660000000014414766036355021002 0ustar00runnerdocker-r doc.txt sphinxcontrib-spelling==8.0.1; platform_system!="Windows" # We only use it in Travis CI ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/requirements/doc.txt0000644000175100001660000000014614766036355017171 0ustar00runnerdocker-r towncrier.txt sphinx==8.2.3 pyenchant==3.2.2 sphinxcontrib-spelling==8.0.1 sphinxcontrib-towncrier ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/requirements/lint.txt0000644000175100001660000000011114766036355017362 0ustar00runnerdocker-r ci.txt -r towncrier.txt black==25.1.0 isort==6.0.1 pre-commit==4.1.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/requirements/pytest.txt0000644000175100001660000000006714766036355017756 0ustar00runnerdockerpytest==8.3.5 pytest-codspeed==3.2.0 pytest-cov==6.0.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/requirements/towncrier.txt0000644000175100001660000000002314766036355020432 0ustar00runnerdockertowncrier==23.11.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/requirements/wheel.txt0000644000175100001660000000000614766036355017523 0ustar00runnerdocker-e . ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1742224624.173563 multidict-6.2.0/setup.cfg0000644000175100001660000000324514766036360014760 0ustar00runnerdocker[bdist_wheel] universal = 0 [metadata] name = multidict version = attr: multidict.__version__ url = https://github.com/aio-libs/multidict project_urls = Chat: Matrix = https://matrix.to/#/#aio-libs:matrix.org Chat: Matrix Space = https://matrix.to/#/#aio-libs-space:matrix.org CI: GitHub = https://github.com/aio-libs/multidict/actions Code of Conduct = https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md Coverage: codecov = https://codecov.io/github/aio-libs/multidict Docs: Changelog = https://multidict.aio-libs.org/en/latest/changes/ Docs: RTD = https://multidict.aio-libs.org GitHub: issues = https://github.com/aio-libs/multidict/issues GitHub: repo = https://github.com/aio-libs/multidict description = multidict implementation long_description = file: README.rst long_description_content_type = text/x-rst author = Andrew Svetlov author_email = andrew.svetlov@gmail.com license = Apache 2 license_files = LICENSE classifiers = Development Status :: 5 - Production/Stable Intended Audience :: Developers License :: OSI Approved :: Apache Software License Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 Programming Language :: Python :: 3.13 [options] python_requires = >= 3.9 install_requires = typing-extensions >= 4.1.0; python_version < '3.11' packages = multidict [isort] multi_line_output = 3 include_trailing_comma = True force_grid_wrap = 0 use_parentheses = True known_first_party = multidict known_third_party = pytest [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/setup.py0000644000175100001660000000162714766036355014657 0ustar00runnerdockerimport os import platform import sys from setuptools import Extension, setup NO_EXTENSIONS = bool(os.environ.get("MULTIDICT_NO_EXTENSIONS")) if sys.implementation.name != "cpython": NO_EXTENSIONS = True CFLAGS = ["-O2"] # CFLAGS = ['-g'] if platform.system() != "Windows": CFLAGS.extend( [ "-std=c99", "-Wall", "-Wsign-compare", "-Wconversion", "-fno-strict-aliasing", "-pedantic", ] ) extensions = [ Extension( "multidict._multidict", ["multidict/_multidict.c"], extra_compile_args=CFLAGS, ), ] if not NO_EXTENSIONS: print("*********************") print("* Accelerated build *") print("*********************") setup(ext_modules=extensions) else: print("*********************") print("* Pure Python build *") print("*********************") setup() ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1742224624.172563 multidict-6.2.0/tests/0000755000175100001660000000000014766036360014275 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/cimultidict-c-extension.pickle.00000644000175100001660000000012114766036355022366 0ustar00runnerdockercmultidict._multidict CIMultiDict p0 ((lp1 (Va p2 L1L tp3 a(g2 L2L tp4 atp5 Rp6 .././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/cimultidict-c-extension.pickle.10000644000175100001660000000010714766036355022373 0ustar00runnerdockercmultidict._multidict CIMultiDict q(]q((XaqKtq(hKtqetqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/cimultidict-c-extension.pickle.20000644000175100001660000000010614766036355022373 0ustar00runnerdocker€cmultidict._multidict CIMultiDict q]q(XaqK†qhK†qe…qRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/cimultidict-c-extension.pickle.30000644000175100001660000000010614766036355022374 0ustar00runnerdocker€cmultidict._multidict CIMultiDict q]q(XaqK†qhK†qe…qRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/cimultidict-c-extension.pickle.40000644000175100001660000000011114766036355022371 0ustar00runnerdocker€•>Œmultidict._multidict”Œ CIMultiDict”“”]”(Œa”K†”hK†”e…”R”.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/cimultidict-c-extension.pickle.50000644000175100001660000000011114766036355022372 0ustar00runnerdocker€•>Œmultidict._multidict”Œ CIMultiDict”“”]”(Œa”K†”hK†”e…”R”.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/cimultidict-pure-python.pickle.00000644000175100001660000000012414766036355022427 0ustar00runnerdockercmultidict._multidict_py CIMultiDict p0 ((lp1 (Va p2 L1L tp3 a(g2 L2L tp4 atp5 Rp6 .././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/cimultidict-pure-python.pickle.10000644000175100001660000000011214766036355022425 0ustar00runnerdockercmultidict._multidict_py CIMultiDict q(]q((XaqKtq(hKtqetqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/cimultidict-pure-python.pickle.20000644000175100001660000000011114766036355022425 0ustar00runnerdocker€cmultidict._multidict_py CIMultiDict q]q(XaqK†qhK†qe…qRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/cimultidict-pure-python.pickle.30000644000175100001660000000011114766036355022426 0ustar00runnerdocker€cmultidict._multidict_py CIMultiDict q]q(XaqK†qhK†qe…qRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/cimultidict-pure-python.pickle.40000644000175100001660000000011414766036355022432 0ustar00runnerdocker€•AŒmultidict._multidict_py”Œ CIMultiDict”“”]”(Œa”K†”hK†”e…”R”.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/cimultidict-pure-python.pickle.50000644000175100001660000000011414766036355022433 0ustar00runnerdocker€•AŒmultidict._multidict_py”Œ CIMultiDict”“”]”(Œa”K†”hK†”e…”R”.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/conftest.py0000644000175100001660000001535714766036355016513 0ustar00runnerdockerfrom __future__ import annotations import argparse import pickle from dataclasses import dataclass from functools import cached_property from importlib import import_module from types import ModuleType from typing import Callable, Type, Union import pytest from multidict import ( CIMultiDict, MultiDict, MultiDictProxy, MultiMapping, MutableMultiMapping, ) C_EXT_MARK = pytest.mark.c_extension @dataclass(frozen=True) class MultidictImplementation: """A facade for accessing importable multidict module variants. An instance essentially represents a c-extension or a pure-python module. The actual underlying module is accessed dynamically through a property and is cached. It also has a text tag depending on what variant it is, and a string representation suitable for use in Pytest's test IDs via parametrization. """ is_pure_python: bool """A flag showing whether this is a pure-python module or a C-extension.""" @cached_property def tag(self) -> str: """Return a text representation of the pure-python attribute.""" return "pure-python" if self.is_pure_python else "c-extension" @cached_property def imported_module(self) -> ModuleType: """Return a loaded importable containing a multidict variant.""" importable_module = "_multidict_py" if self.is_pure_python else "_multidict" return import_module(f"multidict.{importable_module}") def __str__(self) -> str: """Render the implementation facade instance as a string.""" return f"{self.tag}-module" @pytest.fixture( scope="session", params=( pytest.param( MultidictImplementation(is_pure_python=False), marks=C_EXT_MARK, ), MultidictImplementation(is_pure_python=True), ), ids=str, ) def multidict_implementation(request: pytest.FixtureRequest) -> MultidictImplementation: """Return a multidict variant facade.""" return request.param # type: ignore[no-any-return] @pytest.fixture(scope="session") def multidict_module( multidict_implementation: MultidictImplementation, ) -> ModuleType: """Return a pre-imported module containing a multidict variant.""" return multidict_implementation.imported_module @pytest.fixture( scope="session", params=("MultiDict", "CIMultiDict"), ids=("case-sensitive", "case-insensitive"), ) def any_multidict_class_name(request: pytest.FixtureRequest) -> str: """Return a class name of a mutable multidict implementation.""" return request.param # type: ignore[no-any-return] @pytest.fixture(scope="session") def any_multidict_class( any_multidict_class_name: str, multidict_module: ModuleType, ) -> Type[MutableMultiMapping[str]]: """Return a class object of a mutable multidict implementation.""" return getattr(multidict_module, any_multidict_class_name) # type: ignore[no-any-return] @pytest.fixture(scope="session") def case_sensitive_multidict_class( multidict_module: ModuleType, ) -> Type[MultiDict[str]]: """Return a case-sensitive mutable multidict class.""" return multidict_module.MultiDict # type: ignore[no-any-return] @pytest.fixture(scope="session") def case_insensitive_multidict_class( multidict_module: ModuleType, ) -> Type[CIMultiDict[str]]: """Return a case-insensitive mutable multidict class.""" return multidict_module.CIMultiDict # type: ignore[no-any-return] @pytest.fixture(scope="session") def case_insensitive_str_class(multidict_module: ModuleType) -> Type[str]: """Return a case-insensitive string class.""" return multidict_module.istr # type: ignore[no-any-return] @pytest.fixture(scope="session") def any_multidict_proxy_class_name(any_multidict_class_name: str) -> str: """Return a class name of an immutable multidict implementation.""" return f"{any_multidict_class_name}Proxy" @pytest.fixture(scope="session") def any_multidict_proxy_class( any_multidict_proxy_class_name: str, multidict_module: ModuleType, ) -> Type[MultiMapping[str]]: """Return an immutable multidict implementation class object.""" return getattr(multidict_module, any_multidict_proxy_class_name) # type: ignore[no-any-return] @pytest.fixture(scope="session") def case_sensitive_multidict_proxy_class( multidict_module: ModuleType, ) -> Type[MutableMultiMapping[str]]: """Return a case-sensitive immutable multidict class.""" return multidict_module.MultiDictProxy # type: ignore[no-any-return] @pytest.fixture(scope="session") def case_insensitive_multidict_proxy_class( multidict_module: ModuleType, ) -> Type[MutableMultiMapping[str]]: """Return a case-insensitive immutable multidict class.""" return multidict_module.CIMultiDictProxy # type: ignore[no-any-return] @pytest.fixture(scope="session") def multidict_getversion_callable( multidict_module: ModuleType, ) -> Callable[[Union[MultiDict[object], MultiDictProxy[object]]], int]: """Return a ``getversion()`` function for current implementation.""" return multidict_module.getversion # type: ignore[no-any-return] def pytest_addoption( parser: pytest.Parser, pluginmanager: pytest.PytestPluginManager, ) -> None: """Define a new ``--c-extensions`` flag. This lets the callers deselect tests executed against the C-extension version of the ``multidict`` implementation. """ del pluginmanager parser.addoption( "--c-extensions", # disabled with `--no-c-extensions` action=argparse.BooleanOptionalAction, default=True, dest="c_extensions", help="Test C-extensions (on by default)", ) def pytest_collection_modifyitems( session: pytest.Session, config: pytest.Config, items: list[pytest.Item], ) -> None: """Deselect tests against C-extensions when requested via CLI.""" test_c_extensions = config.getoption("--c-extensions") is True if test_c_extensions: return selected_tests: list[pytest.Item] = [] deselected_tests: list[pytest.Item] = [] for item in items: c_ext = item.get_closest_marker(C_EXT_MARK.name) is not None target_items_list = deselected_tests if c_ext else selected_tests target_items_list.append(item) config.hook.pytest_deselected(items=deselected_tests) items[:] = selected_tests def pytest_configure(config: pytest.Config) -> None: """Declare the C-extension marker in config.""" config.addinivalue_line( "markers", f"{C_EXT_MARK.name}: tests running against the C-extension implementation.", ) def pytest_generate_tests(metafunc: pytest.Metafunc) -> None: if "pickle_protocol" in metafunc.fixturenames: metafunc.parametrize( "pickle_protocol", list(range(pickle.HIGHEST_PROTOCOL + 1)), scope="session" ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/gen_pickles.py0000644000175100001660000000165314766036355017143 0ustar00runnerdockerimport pickle from importlib import import_module from pathlib import Path from typing import Union from multidict import CIMultiDict, MultiDict TESTS_DIR = Path(__file__).parent.resolve() _MD_Classes = Union[type[MultiDict[int]], type[CIMultiDict[int]]] def write(tag: str, cls: _MD_Classes, proto: int) -> None: d = cls([("a", 1), ("a", 2)]) file_basename = f"{cls.__name__.lower()}-{tag}" with (TESTS_DIR / f"{file_basename}.pickle.{proto}").open("wb") as f: pickle.dump(d, f, proto) def generate() -> None: _impl_map = { "c-extension": "_multidict", "pure-python": "_multidict_py", } for proto in range(pickle.HIGHEST_PROTOCOL + 1): for tag, impl_name in _impl_map.items(): impl = import_module(f"multidict.{impl_name}") for cls in impl.CIMultiDict, impl.MultiDict: write(tag, cls, proto) if __name__ == "__main__": generate() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/multidict-c-extension.pickle.00000644000175100001660000000011714766036355022057 0ustar00runnerdockercmultidict._multidict MultiDict p0 ((lp1 (Va p2 L1L tp3 a(g2 L2L tp4 atp5 Rp6 .././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/multidict-c-extension.pickle.10000644000175100001660000000010514766036355022055 0ustar00runnerdockercmultidict._multidict MultiDict q(]q((XaqKtq(hKtqetqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/multidict-c-extension.pickle.20000644000175100001660000000010414766036355022055 0ustar00runnerdocker€cmultidict._multidict MultiDict q]q(XaqK†qhK†qe…qRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/multidict-c-extension.pickle.30000644000175100001660000000010414766036355022056 0ustar00runnerdocker€cmultidict._multidict MultiDict q]q(XaqK†qhK†qe…qRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/multidict-c-extension.pickle.40000644000175100001660000000010714766036355022062 0ustar00runnerdocker€•<Œmultidict._multidict”Œ MultiDict”“”]”(Œa”K†”hK†”e…”R”.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/multidict-c-extension.pickle.50000644000175100001660000000010714766036355022063 0ustar00runnerdocker€•<Œmultidict._multidict”Œ MultiDict”“”]”(Œa”K†”hK†”e…”R”.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/multidict-pure-python.pickle.00000644000175100001660000000012214766036355022111 0ustar00runnerdockercmultidict._multidict_py MultiDict p0 ((lp1 (Va p2 L1L tp3 a(g2 L2L tp4 atp5 Rp6 .././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/multidict-pure-python.pickle.10000644000175100001660000000011014766036355022107 0ustar00runnerdockercmultidict._multidict_py MultiDict q(]q((XaqKtq(hKtqetqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/multidict-pure-python.pickle.20000644000175100001660000000010714766036355022116 0ustar00runnerdocker€cmultidict._multidict_py MultiDict q]q(XaqK†qhK†qe…qRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/multidict-pure-python.pickle.30000644000175100001660000000010714766036355022117 0ustar00runnerdocker€cmultidict._multidict_py MultiDict q]q(XaqK†qhK†qe…qRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/multidict-pure-python.pickle.40000644000175100001660000000011214766036355022114 0ustar00runnerdocker€•?Œmultidict._multidict_py”Œ MultiDict”“”]”(Œa”K†”hK†”e…”R”.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/multidict-pure-python.pickle.50000644000175100001660000000011214766036355022115 0ustar00runnerdocker€•?Œmultidict._multidict_py”Œ MultiDict”“”]”(Œa”K†”hK†”e…”R”.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_abc.py0000644000175100001660000000166214766036355016444 0ustar00runnerdockerfrom collections.abc import Mapping, MutableMapping from multidict import ( MultiDict, MultiDictProxy, MultiMapping, MutableMultiMapping, ) def test_abc_inheritance() -> None: assert issubclass(MultiMapping, Mapping) assert not issubclass(MultiMapping, MutableMapping) assert issubclass(MutableMultiMapping, Mapping) assert issubclass(MutableMultiMapping, MutableMapping) def test_multidict_inheritance(any_multidict_class: type[MultiDict[str]]) -> None: assert issubclass(any_multidict_class, MultiMapping) assert issubclass(any_multidict_class, MutableMultiMapping) def test_proxy_inheritance( any_multidict_proxy_class: type[MultiDictProxy[str]], ) -> None: assert issubclass(any_multidict_proxy_class, MultiMapping) assert not issubclass(any_multidict_proxy_class, MutableMultiMapping) def test_generic_type_in_runtime() -> None: MultiMapping[str] MutableMultiMapping[str] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_circular_imports.py0000644000175100001660000000610214766036355021272 0ustar00runnerdocker"""Tests for circular imports in all local packages and modules. This ensures all internal packages can be imported right away without any need to import some other module before doing so. This module is based on the idea that pytest uses for self-testing: * https://github.com/sanitizers/octomachinery/blob/be18b54/tests/circular_imports_test.py # noqa: E501 * https://github.com/pytest-dev/pytest/blob/d18c75b/testing/test_meta.py * https://twitter.com/codewithanthony/status/1229445110510735361 """ from __future__ import annotations import os import pkgutil import subprocess import sys from itertools import chain from pathlib import Path from types import ModuleType from typing import Generator import pytest import multidict def _find_all_importables(pkg: ModuleType) -> list[str]: """Find all importables in the project. Return them in order. """ return sorted( set( chain.from_iterable( _discover_path_importables(Path(p), pkg.__name__) for p in pkg.__path__ ), ), ) def _discover_path_importables( pkg_pth: Path, pkg_name: str, ) -> Generator[str, None, None]: """Yield all importables under a given path and package.""" yield pkg_name for dir_path, _d, file_names in os.walk(pkg_pth): pkg_dir_path = Path(dir_path) if pkg_dir_path.parts[-1] == "__pycache__": continue if all(Path(_).suffix != ".py" for _ in file_names): continue rel_pt = pkg_dir_path.relative_to(pkg_pth) pkg_pref = ".".join((pkg_name,) + rel_pt.parts) yield from ( pkg_path for _, pkg_path, _ in pkgutil.walk_packages( (str(pkg_dir_path),), prefix=f"{pkg_pref}.", ) ) @pytest.fixture(params=_find_all_importables(multidict)) def import_path(request: pytest.FixtureRequest) -> str: """Return an importable from the multidict package.""" importable_module: str = request.param if importable_module == "multidict._multidict": request.applymarker(pytest.mark.c_extension) return importable_module def test_no_warnings(import_path: str) -> None: """Verify that importing modules and packages doesn't explode. This is seeking for any import errors including ones caused by circular imports. """ imp_cmd = ( # fmt: off sys.executable, "-I", "-W", "error", "-c", f"import {import_path!s}", # fmt: on ) subprocess.check_call(imp_cmd) @pytest.mark.c_extension def test_c_extension_preferred_by_default(monkeypatch: pytest.MonkeyPatch) -> None: """Verify that the C-extension is exposed by default.""" monkeypatch.delenv("MULTIDICT_NO_EXTENSIONS", raising=False) imp_cmd = ( # fmt: off sys.executable, "-I", "-W", "error", "-c", "import multidict; raise SystemExit(int(" "multidict.istr.__module__ != 'multidict._multidict' " "or multidict.USE_EXTENSIONS is not True))", # fmt: on ) subprocess.check_call(imp_cmd) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_copy.py0000644000175100001660000000223114766036355016662 0ustar00runnerdockerimport copy from typing import Union from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy _MD_Classes = Union[type[MultiDict[int]], type[CIMultiDict[int]]] _MDP_Classes = Union[type[MultiDictProxy[int]], type[CIMultiDictProxy[int]]] def test_copy(any_multidict_class: _MD_Classes) -> None: d = any_multidict_class() d["foo"] = 6 d2 = d.copy() d2["foo"] = 7 assert d["foo"] == 6 assert d2["foo"] == 7 def test_copy_proxy( any_multidict_class: _MD_Classes, any_multidict_proxy_class: _MDP_Classes ) -> None: d = any_multidict_class() d["foo"] = 6 p = any_multidict_proxy_class(d) d2 = p.copy() d2["foo"] = 7 assert d["foo"] == 6 assert p["foo"] == 6 assert d2["foo"] == 7 def test_copy_std_copy(any_multidict_class: _MD_Classes) -> None: d = any_multidict_class() d["foo"] = 6 d2 = copy.copy(d) d2["foo"] = 7 assert d["foo"] == 6 assert d2["foo"] == 7 def test_ci_multidict_clone(any_multidict_class: _MD_Classes) -> None: d = any_multidict_class(foo=6) d2 = any_multidict_class(d) d2["foo"] = 7 assert d["foo"] == 6 assert d2["foo"] == 7 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_guard.py0000644000175100001660000000141314766036355017013 0ustar00runnerdockerimport pytest from multidict import MultiDict def test_guard_items( case_sensitive_multidict_class: type[MultiDict[str]], ) -> None: md = case_sensitive_multidict_class({"a": "b"}) it = iter(md.items()) md["a"] = "c" with pytest.raises(RuntimeError): next(it) def test_guard_keys( case_sensitive_multidict_class: type[MultiDict[str]], ) -> None: md = case_sensitive_multidict_class({"a": "b"}) it = iter(md.keys()) md["a"] = "c" with pytest.raises(RuntimeError): next(it) def test_guard_values( case_sensitive_multidict_class: type[MultiDict[str]], ) -> None: md = case_sensitive_multidict_class({"a": "b"}) it = iter(md.values()) md["a"] = "c" with pytest.raises(RuntimeError): next(it) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_istr.py0000644000175100001660000000360314766036355016675 0ustar00runnerdockerimport gc import sys from typing import Callable, Type import pytest IMPLEMENTATION = getattr(sys, "implementation") # to suppress mypy error def test_ctor(case_insensitive_str_class: Type[str]) -> None: s = case_insensitive_str_class() assert "" == s def test_ctor_str(case_insensitive_str_class: Type[str]) -> None: s = case_insensitive_str_class("aBcD") assert "aBcD" == s def test_ctor_istr(case_insensitive_str_class: Type[str]) -> None: s = case_insensitive_str_class("A") s2 = case_insensitive_str_class(s) assert "A" == s assert s == s2 def test_ctor_buffer(case_insensitive_str_class: Type[str]) -> None: s = case_insensitive_str_class(b"aBc") assert "b'aBc'" == s def test_ctor_repr(case_insensitive_str_class: Type[str]) -> None: s = case_insensitive_str_class(None) assert "None" == s def test_str(case_insensitive_str_class: Type[str]) -> None: s = case_insensitive_str_class("aBcD") s1 = str(s) assert s1 == "aBcD" assert type(s1) is str def test_eq(case_insensitive_str_class: Type[str]) -> None: s1 = "Abc" s2 = case_insensitive_str_class(s1) assert s1 == s2 @pytest.fixture def create_istrs(case_insensitive_str_class: Type[str]) -> Callable[[], None]: """Make a callable populating memory with a few ``istr`` objects.""" def _create_strs() -> None: case_insensitive_str_class("foobarbaz") istr2 = case_insensitive_str_class() case_insensitive_str_class(istr2) return _create_strs @pytest.mark.skipif( IMPLEMENTATION.name != "cpython", reason="PyPy has different GC implementation", ) def test_leak(create_istrs: Callable[[], None]) -> None: gc.collect() cnt = len(gc.get_objects()) for _ in range(10000): create_istrs() gc.collect() cnt2 = len(gc.get_objects()) assert abs(cnt - cnt2) < 50 # on PyPy these numbers are not equal ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_multidict.py0000644000175100001660000006252114766036355017716 0ustar00runnerdockerfrom __future__ import annotations import gc import operator import sys import weakref from collections import deque from collections.abc import Callable, Iterable, Iterator, KeysView, Mapping from types import ModuleType from typing import Union, cast import pytest import multidict from multidict import ( CIMultiDict, MultiDict, MultiDictProxy, MultiMapping, MutableMultiMapping, ) def chained_callable( module: ModuleType, callables: Iterable[str], ) -> Callable[..., MultiMapping[int | str] | MutableMultiMapping[int | str]]: """ Return callable that will get and call all given objects in module in exact order. """ def chained_call( *args: object, **kwargs: object, ) -> MultiMapping[int | str] | MutableMultiMapping[int | str]: nonlocal callables callable_chain = (getattr(module, name) for name in callables) first_callable = next(callable_chain) value = first_callable(*args, **kwargs) for element in callable_chain: value = element(value) return cast( Union[ MultiMapping[Union[int, str]], MutableMultiMapping[Union[int, str]], ], value, ) return chained_call @pytest.fixture def cls( # type: ignore[misc] request: pytest.FixtureRequest, multidict_module: ModuleType, ) -> Callable[..., MultiMapping[int | str] | MutableMultiMapping[int | str]]: """Make a callable from multidict module, requested by name.""" return chained_callable(multidict_module, request.param) def test_exposed_names(any_multidict_class_name: str) -> None: assert any_multidict_class_name in multidict.__all__ @pytest.mark.parametrize( ("cls", "key_cls"), ( (("MultiDict",), str), ( ("MultiDict", "MultiDictProxy"), str, ), ), indirect=["cls"], ) def test__iter__types( cls: type[MultiDict[Union[str, int]]], key_cls: type[str], ) -> None: d = cls([("key", "one"), ("key2", "two"), ("key", 3)]) for i in d: assert type(i) is key_cls, (type(i), key_cls) def test_proxy_copy( any_multidict_class: type[MultiDict[str]], any_multidict_proxy_class: type[MultiDictProxy[str]], ) -> None: d1 = any_multidict_class(key="value", a="b") p1 = any_multidict_proxy_class(d1) d2 = p1.copy() assert d1 == d2 assert d1 is not d2 def test_multidict_subclassing( any_multidict_class: type[MultiDict[str]], ) -> None: class DummyMultidict(any_multidict_class): # type: ignore[valid-type,misc] pass def test_multidict_proxy_subclassing( any_multidict_proxy_class: type[MultiDictProxy[str]], ) -> None: class DummyMultidictProxy( any_multidict_proxy_class, # type: ignore[valid-type,misc] ): pass class BaseMultiDictTest: def test_instantiate__empty(self, cls: type[MutableMultiMapping[str]]) -> None: d = cls() empty: Mapping[str, str] = {} assert d == empty assert len(d) == 0 assert list(d.keys()) == [] assert list(d.values()) == [] assert list(d.items()) == [] assert cls() != list() # type: ignore[comparison-overlap] with pytest.raises(TypeError, match=r"3 were given"): cls(("key1", "value1"), ("key2", "value2")) # type: ignore[call-arg] # noqa: E501 @pytest.mark.parametrize("arg0", ([("key", "value1")], {"key": "value1"})) def test_instantiate__from_arg0( self, cls: type[MultiDict[str]], arg0: Union[list[tuple[str, str]], dict[str, str]], ) -> None: d = cls(arg0) assert d == {"key": "value1"} assert len(d) == 1 assert list(d.keys()) == ["key"] assert list(d.values()) == ["value1"] assert list(d.items()) == [("key", "value1")] def test_instantiate__with_kwargs( self, cls: type[MultiDict[str]], ) -> None: d = cls([("key", "value1")], key2="value2") assert d == {"key": "value1", "key2": "value2"} assert len(d) == 2 assert sorted(d.keys()) == ["key", "key2"] assert sorted(d.values()) == ["value1", "value2"] assert sorted(d.items()) == [("key", "value1"), ("key2", "value2")] def test_instantiate__from_generator( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]] ) -> None: d = cls((str(i), i) for i in range(2)) assert d == {"0": 0, "1": 1} assert len(d) == 2 assert sorted(d.keys()) == ["0", "1"] assert sorted(d.values()) == [0, 1] assert sorted(d.items()) == [("0", 0), ("1", 1)] def test_instantiate__from_list_of_lists( self, cls: type[MutableMultiMapping[str]], ) -> None: # Should work at runtime, but won't type check. d = cls([["key", "value1"]]) # type: ignore[call-arg] assert d == {"key": "value1"} def test_instantiate__from_list_of_custom_pairs( self, cls: type[MultiDict[str]], ) -> None: class Pair: def __len__(self) -> int: return 2 def __getitem__(self, pos: int) -> str: return ("key", "value1")[pos] # Works at runtime, but won't type check. d = cls([Pair()]) # type: ignore[list-item] assert d == {"key": "value1"} def test_getone(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")], key="value2") assert d.getone("key") == "value1" assert d.get("key") == "value1" assert d["key"] == "value1" with pytest.raises(KeyError, match="key2"): d["key2"] with pytest.raises(KeyError, match="key2"): d.getone("key2") assert d.getone("key2", "default") == "default" def test_call_with_kwargs(self, cls: type[MultiDict[str]]) -> None: d = cls([("present", "value")]) assert d.getall(default="missing", key="notfound") == "missing" def test__iter__( self, cls: Union[ type[MultiDict[Union[str, int]]], type[CIMultiDict[Union[str, int]]], ], ) -> None: d = cls([("key", "one"), ("key2", "two"), ("key", 3)]) assert list(d) == ["key", "key2", "key"] def test__contains( self, cls: Union[ type[MultiDict[Union[str, int]]], type[CIMultiDict[Union[str, int]]], ], ) -> None: d = cls([("key", "one"), ("key2", "two"), ("key", 3)]) assert list(d) == ["key", "key2", "key"] assert "key" in d assert "key2" in d assert "foo" not in d assert 42 not in d # type: ignore[comparison-overlap] def test_keys__contains( self, cls: Union[ type[MultiDict[Union[str, int]]], type[CIMultiDict[Union[str, int]]], ], ) -> None: d = cls([("key", "one"), ("key2", "two"), ("key", 3)]) assert list(d.keys()) == ["key", "key2", "key"] assert "key" in d.keys() assert "key2" in d.keys() assert "foo" not in d.keys() assert 42 not in d.keys() # type: ignore[comparison-overlap] def test_values__contains( self, cls: Union[ type[MultiDict[Union[str, int]]], type[CIMultiDict[Union[str, int]]], ], ) -> None: d = cls([("key", "one"), ("key", "two"), ("key", 3)]) assert list(d.values()) == ["one", "two", 3] assert "one" in d.values() assert "two" in d.values() assert 3 in d.values() assert "foo" not in d.values() def test_items__contains( self, cls: Union[ type[MultiDict[Union[str, int]]], type[CIMultiDict[Union[str, int]]], ], ) -> None: d = cls([("key", "one"), ("key", "two"), ("key", 3)]) assert list(d.items()) == [("key", "one"), ("key", "two"), ("key", 3)] assert ("key", "one") in d.items() assert ("key", "two") in d.items() assert ("key", 3) in d.items() assert ("foo", "bar") not in d.items() assert (42, 3) not in d.items() # type: ignore[comparison-overlap] assert 42 not in d.items() # type: ignore[comparison-overlap] def test_cannot_create_from_unaccepted( self, cls: type[MutableMultiMapping[str]], ) -> None: with pytest.raises(TypeError): cls([(1, 2, 3)]) # type: ignore[call-arg] def test_keys_is_set_less(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert d.keys() < {"key", "key2"} @pytest.mark.parametrize( ("contents", "expected"), ( ([("key", "value1")], True), ([("key", "value1"), ("key2", "value2")], True), ([("key", "value1"), ("key2", "value2"), ("key3", "value3")], False), ([("key", "value1"), ("key3", "value3")], False), ), ) def test_keys_is_set_less_equal( self, cls: type[MultiDict[str]], contents: list[tuple[str, str]], expected: bool, ) -> None: d = cls(contents) result = d.keys() <= {"key", "key2"} assert result is expected def test_keys_is_set_equal(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert d.keys() == {"key"} def test_items_is_set_equal(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert d.items() == {("key", "value1")} def test_keys_is_set_greater(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert d.keys() > {"key"} @pytest.mark.parametrize( ("set_", "expected"), ( ({"key"}, True), ({"key", "key2"}, True), ({"key", "key2", "key3"}, False), ({"key3"}, False), ), ) def test_keys_is_set_greater_equal( self, cls: type[MultiDict[str]], set_: set[str], expected: bool ) -> None: d = cls([("key", "value1"), ("key2", "value2")]) result = d.keys() >= set_ assert result is expected def test_keys_less_than_not_implemented(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) sentinel_operation_result = object() class RightOperand: def __gt__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert (d.keys() < RightOperand()) is sentinel_operation_result def test_keys_less_than_or_equal_not_implemented( self, cls: type[MultiDict[str]] ) -> None: d = cls([("key", "value1")]) sentinel_operation_result = object() class RightOperand: def __ge__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert (d.keys() <= RightOperand()) is sentinel_operation_result def test_keys_greater_than_not_implemented(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) sentinel_operation_result = object() class RightOperand: def __lt__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert (d.keys() > RightOperand()) is sentinel_operation_result def test_keys_greater_than_or_equal_not_implemented( self, cls: type[MultiDict[str]] ) -> None: d = cls([("key", "value1")]) sentinel_operation_result = object() class RightOperand: def __le__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert (d.keys() >= RightOperand()) is sentinel_operation_result def test_keys_is_set_not_equal(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert d.keys() != {"key2"} def test_keys_not_equal_unrelated_type(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert d.keys() != "other" # type: ignore[comparison-overlap] def test_eq(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key": "value1"} == d def test_eq2(self, cls: type[MultiDict[str]]) -> None: d1 = cls([("key", "value1")]) d2 = cls([("key2", "value1")]) assert d1 != d2 def test_eq3(self, cls: type[MultiDict[str]]) -> None: d1 = cls([("key", "value1")]) d2 = cls() assert d1 != d2 def test_eq_other_mapping_contains_more_keys( self, cls: type[MultiDict[str]], ) -> None: d1 = cls(foo="bar") d2 = dict(foo="bar", bar="baz") assert d1 != d2 def test_eq_bad_mapping_len( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]] ) -> None: class BadMapping(Mapping[str, int]): def __getitem__(self, key: str) -> int: return 1 # pragma: no cover # `len()` fails earlier def __iter__(self) -> Iterator[str]: yield "a" # pragma: no cover # `len()` fails earlier def __len__(self) -> int: return 1 // 0 d1 = cls(a=1) d2 = BadMapping() with pytest.raises(ZeroDivisionError): d1 == d2 def test_eq_bad_mapping_getitem( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: class BadMapping(Mapping[str, int]): def __getitem__(self, key: str) -> int: return 1 // 0 def __iter__(self) -> Iterator[str]: yield "a" # pragma: no cover # foreign objects no iterated def __len__(self) -> int: return 1 d1 = cls(a=1) d2 = BadMapping() with pytest.raises(ZeroDivisionError): d1 == d2 def test_ne(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert d != {"key": "another_value"} def test_and(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key"} == d.keys() & {"key", "key2"} def test_and2(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key"} == {"key", "key2"} & d.keys() def test_bitwise_and_not_implemented(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) sentinel_operation_result = object() class RightOperand: def __rand__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert d.keys() & RightOperand() is sentinel_operation_result def test_bitwise_and_iterable_not_set(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key"} == d.keys() & ["key", "key2"] def test_or(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key", "key2"} == d.keys() | {"key2"} def test_or2(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key", "key2"} == {"key2"} | d.keys() def test_bitwise_or_not_implemented(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) sentinel_operation_result = object() class RightOperand: def __ror__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert d.keys() | RightOperand() is sentinel_operation_result def test_bitwise_or_iterable_not_set(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key", "key2"} == d.keys() | ["key2"] def test_sub(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert {"key"} == d.keys() - {"key2"} def test_sub2(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert {"key3"} == {"key", "key2", "key3"} - d.keys() def test_sub_not_implemented(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) sentinel_operation_result = object() class RightOperand: def __rsub__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert d.keys() - RightOperand() is sentinel_operation_result def test_sub_iterable_not_set(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert {"key"} == d.keys() - ["key2"] def test_xor(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert {"key", "key3"} == d.keys() ^ {"key2", "key3"} def test_xor2(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert {"key", "key3"} == {"key2", "key3"} ^ d.keys() def test_xor_not_implemented(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) sentinel_operation_result = object() class RightOperand: def __rxor__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert d.keys() ^ RightOperand() is sentinel_operation_result def test_xor_iterable_not_set(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert {"key", "key3"} == d.keys() ^ ["key2", "key3"] @pytest.mark.parametrize( ("key", "value", "expected"), (("key2", "v", True), ("key", "value1", False)), ) def test_isdisjoint( self, cls: type[MultiDict[str]], key: str, value: str, expected: bool ) -> None: d = cls([("key", "value1")]) assert d.items().isdisjoint({(key, value)}) is expected assert d.keys().isdisjoint({key}) is expected def test_repr_aiohttp_issue_410(self, cls: type[MutableMultiMapping[str]]) -> None: d = cls() try: raise Exception pytest.fail("Should never happen") # pragma: no cover except Exception as e: repr(d) assert sys.exc_info()[1] == e # noqa: PT017 @pytest.mark.parametrize( "op", (operator.or_, operator.and_, operator.sub, operator.xor), ) @pytest.mark.parametrize("other", ({"other"},)) def test_op_issue_aiohttp_issue_410( self, cls: type[MultiDict[str]], op: Callable[[object, object], object], other: set[str], ) -> None: d = cls([("key", "value")]) try: raise Exception pytest.fail("Should never happen") # pragma: no cover except Exception as e: op(d.keys(), other) assert sys.exc_info()[1] == e # noqa: PT017 def test_weakref(self, cls: type[MutableMultiMapping[str]]) -> None: called = False def cb(wr: object) -> None: nonlocal called called = True d = cls() wr = weakref.ref(d, cb) del d gc.collect() assert called del wr def test_iter_length_hint_keys( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: md = cls(a=1, b=2) it = iter(md.keys()) assert it.__length_hint__() == 2 # type: ignore[attr-defined] def test_iter_length_hint_items( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: md = cls(a=1, b=2) it = iter(md.items()) assert it.__length_hint__() == 2 # type: ignore[attr-defined] def test_iter_length_hint_values( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: md = cls(a=1, b=2) it = iter(md.values()) assert it.__length_hint__() == 2 def test_ctor_list_arg_and_kwds( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: arg = [("a", 1)] obj = cls(arg, b=2) assert list(obj.items()) == [("a", 1), ("b", 2)] assert arg == [("a", 1)] def test_ctor_tuple_arg_and_kwds( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: arg = (("a", 1),) obj = cls(arg, b=2) assert list(obj.items()) == [("a", 1), ("b", 2)] assert arg == (("a", 1),) def test_ctor_deque_arg_and_kwds( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: arg = deque([("a", 1)]) obj = cls(arg, b=2) assert list(obj.items()) == [("a", 1), ("b", 2)] assert arg == deque([("a", 1)]) class TestMultiDict(BaseMultiDictTest): @pytest.fixture( params=[ ("MultiDict",), ("MultiDict", "MultiDictProxy"), ], ) def cls( # type: ignore[misc] self, request: pytest.FixtureRequest, multidict_module: ModuleType, ) -> Callable[..., MultiMapping[int | str] | MutableMultiMapping[int | str]]: """Make a case-sensitive multidict class/proxy constructor.""" return chained_callable(multidict_module, request.param) def test__repr__(self, cls: type[MultiDict[str]]) -> None: d = cls() _cls = type(d) assert str(d) == "<%s()>" % _cls.__name__ d = cls([("key", "one"), ("key", "two")]) assert str(d) == "<%s('key': 'one', 'key': 'two')>" % _cls.__name__ def test_getall(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")], key="value2") assert d != {"key": "value1"} assert len(d) == 2 assert d.getall("key") == ["value1", "value2"] with pytest.raises(KeyError, match="some_key"): d.getall("some_key") default = object() assert d.getall("some_key", default) is default def test_preserve_stable_ordering( self, cls: type[MultiDict[Union[str, int]]], ) -> None: d = cls([("a", 1), ("b", "2"), ("a", 3)]) s = "&".join("{}={}".format(k, v) for k, v in d.items()) assert s == "a=1&b=2&a=3" def test_get(self, cls: type[MultiDict[int]]) -> None: d = cls([("a", 1), ("a", 2)]) assert d["a"] == 1 def test_items__repr__(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")], key="value2") expected = "_ItemsView('key': 'value1', 'key': 'value2')" assert repr(d.items()) == expected def test_keys__repr__(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")], key="value2") assert repr(d.keys()) == "_KeysView('key', 'key')" def test_values__repr__(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")], key="value2") assert repr(d.values()) == "_ValuesView('value1', 'value2')" class TestCIMultiDict(BaseMultiDictTest): @pytest.fixture( params=[ ("CIMultiDict",), ("CIMultiDict", "CIMultiDictProxy"), ], ) def cls( # type: ignore[misc] self, request: pytest.FixtureRequest, multidict_module: ModuleType, ) -> Callable[..., MultiMapping[int | str] | MutableMultiMapping[int | str]]: """Make a case-insensitive multidict class/proxy constructor.""" return chained_callable(multidict_module, request.param) def test_basics(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], KEY="value2") assert d.getone("key") == "value1" assert d.get("key") == "value1" assert d.get("key2", "val") == "val" assert d["key"] == "value1" assert "key" in d with pytest.raises(KeyError, match="key2"): d["key2"] with pytest.raises(KeyError, match="key2"): d.getone("key2") def test_getall(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], KEY="value2") assert not d == {"KEY": "value1"} assert len(d) == 2 assert d.getall("key") == ["value1", "value2"] with pytest.raises(KeyError, match="some_key"): d.getall("some_key") def test_get(self, cls: type[CIMultiDict[int]]) -> None: d = cls([("A", 1), ("a", 2)]) assert 1 == d["a"] def test__repr__(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], key="value2") _cls = type(d) expected = "<%s('KEY': 'value1', 'key': 'value2')>" % _cls.__name__ assert str(d) == expected def test_items__repr__(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], key="value2") expected = "_ItemsView('KEY': 'value1', 'key': 'value2')" assert repr(d.items()) == expected def test_keys__repr__(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], key="value2") assert repr(d.keys()) == "_KeysView('KEY', 'key')" def test_values__repr__(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], key="value2") assert repr(d.values()) == "_ValuesView('value1', 'value2')" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_multidict_benchmarks.py0000644000175100001660000002452314766036355022113 0ustar00runnerdocker"""codspeed benchmarks for multidict.""" from typing import Dict, Union from pytest_codspeed import BenchmarkFixture from multidict import CIMultiDict, MultiDict, istr # Note that this benchmark should not be refactored to use pytest.mark.parametrize # since each benchmark name should be unique. _SENTINEL = object() def test_multidict_insert_str(benchmark: BenchmarkFixture) -> None: md: MultiDict[str] = MultiDict() items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md[i] = i def test_cimultidict_insert_str(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[str] = CIMultiDict() items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md[i] = i def test_cimultidict_insert_istr(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[istr] = CIMultiDict() items = [istr(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md[i] = i def test_multidict_add_str(benchmark: BenchmarkFixture) -> None: md: MultiDict[str] = MultiDict() items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md.add(i, i) def test_cimultidict_add_str(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[str] = CIMultiDict() items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md.add(i, i) def test_cimultidict_add_istr(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[istr] = CIMultiDict() items = [istr(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md.add(i, i) def test_multidict_pop_str(benchmark: BenchmarkFixture) -> None: md_base: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: md = md_base.copy() for i in items: md.pop(i) def test_cimultidict_pop_str(benchmark: BenchmarkFixture) -> None: md_base: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: md = md_base.copy() for i in items: md.pop(i) def test_cimultidict_pop_istr(benchmark: BenchmarkFixture) -> None: md_base: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100)] @benchmark def _run() -> None: md = md_base.copy() for i in items: md.pop(i) def test_multidict_popitem_str(benchmark: BenchmarkFixture) -> None: md_base: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100)) @benchmark def _run() -> None: md = md_base.copy() for _ in range(100): md.popitem() def test_cimultidict_popitem_str(benchmark: BenchmarkFixture) -> None: md_base: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100)) @benchmark def _run() -> None: md = md_base.copy() for _ in range(100): md.popitem() def test_multidict_clear_str(benchmark: BenchmarkFixture) -> None: md: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100)) @benchmark def _run() -> None: md.clear() def test_cimultidict_clear_str(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100)) @benchmark def _run() -> None: md.clear() def test_multidict_update_str(benchmark: BenchmarkFixture) -> None: md: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100)) items = {str(i): str(i) for i in range(100, 200)} @benchmark def _run() -> None: md.update(items) def test_cimultidict_update_str(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100)) items = {str(i): str(i) for i in range(100, 200)} @benchmark def _run() -> None: md.update(items) def test_cimultidict_update_istr(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100)) items: Dict[Union[str, istr], istr] = {istr(i): istr(i) for i in range(100, 200)} @benchmark def _run() -> None: md.update(items) def test_multidict_extend_str(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100)) items = {str(i): str(i) for i in range(200)} @benchmark def _run() -> None: md.extend(items) def test_cimultidict_extend_str(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100)) items = {str(i): str(i) for i in range(200)} @benchmark def _run() -> None: md.extend(items) def test_cimultidict_extend_istr(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100)) items = {istr(i): istr(i) for i in range(200)} @benchmark def _run() -> None: md.extend(items) def test_multidict_delitem_str(benchmark: BenchmarkFixture) -> None: md_base: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: md = md_base.copy() for i in items: del md[i] def test_cimultidict_delitem_str(benchmark: BenchmarkFixture) -> None: md_base: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: md = md_base.copy() for i in items: del md[i] def test_cimultidict_delitem_istr(benchmark: BenchmarkFixture) -> None: md_base: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100)] @benchmark def _run() -> None: md = md_base.copy() for i in items: del md[i] def test_multidict_getall_str_hit(benchmark: BenchmarkFixture) -> None: md: MultiDict[str] = MultiDict(("all", str(i)) for i in range(100)) @benchmark def _run() -> None: md.getall("all") def test_cimultidict_getall_str_hit(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[str] = CIMultiDict(("all", str(i)) for i in range(100)) @benchmark def _run() -> None: md.getall("all") def test_cimultidict_getall_istr_hit(benchmark: BenchmarkFixture) -> None: all_istr = istr("all") md: CIMultiDict[istr] = CIMultiDict((all_istr, istr(i)) for i in range(100)) @benchmark def _run() -> None: md.getall(all_istr) def test_multidict_fetch(benchmark: BenchmarkFixture) -> None: md: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md[i] def test_cimultidict_fetch_str(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md[i] def test_cimultidict_fetch_istr(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md[i] def test_multidict_get_hit(benchmark: BenchmarkFixture) -> None: md: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md.get(i) def test_multidict_get_miss(benchmark: BenchmarkFixture) -> None: md: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100, 200)] @benchmark def _run() -> None: for i in items: md.get(i) def test_cimultidict_get_hit(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md.get(i) def test_cimultidict_get_miss(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100, 200)] @benchmark def _run() -> None: for i in items: md.get(i) def test_cimultidict_get_istr_hit(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md.get(i) def test_cimultidict_get_istr_miss(benchmark: BenchmarkFixture) -> None: md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100, 200)] @benchmark def _run() -> None: for i in items: md.get(i) def test_cimultidict_get_hit_with_default( benchmark: BenchmarkFixture, ) -> None: md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md.get(i, _SENTINEL) def test_cimultidict_get_miss_with_default( benchmark: BenchmarkFixture, ) -> None: md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100, 200)] @benchmark def _run() -> None: for i in items: md.get(i, _SENTINEL) def test_cimultidict_get_istr_hit_with_default( benchmark: BenchmarkFixture, ) -> None: md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md.get(i, _SENTINEL) def test_cimultidict_get_istr_with_default_miss( benchmark: BenchmarkFixture, ) -> None: md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100, 200)] @benchmark def _run() -> None: for i in items: md.get(i, _SENTINEL) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_mutable_multidict.py0000644000175100001660000004636414766036355021436 0ustar00runnerdockerimport string import sys from typing import Union import pytest from multidict import CIMultiDict, CIMultiDictProxy, MultiDictProxy, istr class TestMutableMultiDict: def test_copy( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d1 = case_sensitive_multidict_class(key="value", a="b") d2 = d1.copy() assert d1 == d2 assert d1 is not d2 def test__repr__( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() assert str(d) == "<%s()>" % case_sensitive_multidict_class.__name__ d = case_sensitive_multidict_class([("key", "one"), ("key", "two")]) expected = ( f"<{case_sensitive_multidict_class.__name__}" "('key': 'one', 'key': 'two')>" ) assert str(d) == expected def test_getall( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class([("key", "value1")], key="value2") assert len(d) == 2 assert d.getall("key") == ["value1", "value2"] with pytest.raises(KeyError, match="some_key"): d.getall("some_key") default = object() assert d.getall("some_key", default) is default def test_add( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() assert d == {} d["key"] = "one" assert d == {"key": "one"} assert d.getall("key") == ["one"] d["key"] = "two" assert d == {"key": "two"} assert d.getall("key") == ["two"] d.add("key", "one") assert 2 == len(d) assert d.getall("key") == ["two", "one"] d.add("foo", "bar") assert 3 == len(d) assert d.getall("foo") == ["bar"] def test_extend( self, case_sensitive_multidict_class: type[CIMultiDict[Union[str, int]]], ) -> None: d = case_sensitive_multidict_class() assert d == {} d.extend([("key", "one"), ("key", "two")], key=3, foo="bar") assert d != {"key": "one", "foo": "bar"} assert 4 == len(d) itms = d.items() # we can't guarantee order of kwargs assert ("key", "one") in itms assert ("key", "two") in itms assert ("key", 3) in itms assert ("foo", "bar") in itms other = case_sensitive_multidict_class(bar="baz") assert other == {"bar": "baz"} d.extend(other) assert ("bar", "baz") in d.items() d.extend({"foo": "moo"}) assert ("foo", "moo") in d.items() d.extend() assert 6 == len(d) with pytest.raises(TypeError): d.extend("foo", "bar") # type: ignore[arg-type, call-arg] def test_extend_from_proxy( self, case_sensitive_multidict_class: type[CIMultiDict[str]], case_sensitive_multidict_proxy_class: type[MultiDictProxy[str]], ) -> None: d = case_sensitive_multidict_class([("a", "a"), ("b", "b")]) proxy = case_sensitive_multidict_proxy_class(d) d2 = case_sensitive_multidict_class() d2.extend(proxy) assert [("a", "a"), ("b", "b")] == list(d2.items()) def test_clear( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class([("key", "one")], key="two", foo="bar") d.clear() assert d == {} assert list(d.items()) == [] def test_del( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class([("key", "one"), ("key", "two")], foo="bar") assert list(d.keys()) == ["key", "key", "foo"] del d["key"] assert d == {"foo": "bar"} assert list(d.items()) == [("foo", "bar")] with pytest.raises(KeyError, match="key"): del d["key"] def test_set_default( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class([("key", "one"), ("key", "two")], foo="bar") assert "one" == d.setdefault("key", "three") assert "three" == d.setdefault("otherkey", "three") assert "otherkey" in d assert "three" == d["otherkey"] def test_popitem( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() d.add("key", "val1") d.add("key", "val2") assert ("key", "val1") == d.popitem() assert [("key", "val2")] == list(d.items()) def test_popitem_empty_multidict( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() with pytest.raises(KeyError): d.popitem() def test_pop( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() d.add("key", "val1") d.add("key", "val2") assert "val1" == d.pop("key") assert {"key": "val2"} == d def test_pop2( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() d.add("key", "val1") d.add("key2", "val2") d.add("key", "val3") assert "val1" == d.pop("key") assert [("key2", "val2"), ("key", "val3")] == list(d.items()) def test_pop_default( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class(other="val") assert "default" == d.pop("key", "default") assert "other" in d def test_pop_raises( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class(other="val") with pytest.raises(KeyError, match="key"): d.pop("key") assert "other" in d def test_replacement_order( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() d.add("key1", "val1") d.add("key2", "val2") d.add("key1", "val3") d.add("key2", "val4") d["key1"] = "val" expected = [("key1", "val"), ("key2", "val2"), ("key2", "val4")] assert expected == list(d.items()) def test_nonstr_key( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() with pytest.raises(TypeError): d[1] = "val" # type: ignore[index] def test_istr_key( self, case_sensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_str_class: type[str], ) -> None: d = case_sensitive_multidict_class() d[case_insensitive_str_class("1")] = "val" assert type(list(d.keys())[0]) is case_insensitive_str_class def test_str_derived_key( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: class A(str): pass d = case_sensitive_multidict_class() d[A("1")] = "val" assert type(list(d.keys())[0]) is A def test_istr_key_add( self, case_sensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_str_class: type[str], ) -> None: d = case_sensitive_multidict_class() d.add(case_insensitive_str_class("1"), "val") assert type(list(d.keys())[0]) is case_insensitive_str_class def test_str_derived_key_add( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: class A(str): pass d = case_sensitive_multidict_class() d.add(A("1"), "val") assert type(list(d.keys())[0]) is A def test_popall( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() d.add("key1", "val1") d.add("key2", "val2") d.add("key1", "val3") ret = d.popall("key1") assert ["val1", "val3"] == ret assert {"key2": "val2"} == d def test_popall_default( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() assert "val" == d.popall("key", "val") def test_popall_key_error( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() with pytest.raises(KeyError, match="key"): d.popall("key") def test_large_multidict_resizing( self, case_sensitive_multidict_class: type[CIMultiDict[int]], ) -> None: SIZE = 1024 d = case_sensitive_multidict_class() for i in range(SIZE): d["key" + str(i)] = i for i in range(SIZE - 1): del d["key" + str(i)] assert {"key" + str(SIZE - 1): SIZE - 1} == d class TestCIMutableMultiDict: def test_getall( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class([("KEY", "value1")], KEY="value2") assert d != {"KEY": "value1"} assert len(d) == 2 assert d.getall("key") == ["value1", "value2"] with pytest.raises(KeyError, match="some_key"): d.getall("some_key") def test_ctor( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class(k1="v1") assert "v1" == d["K1"] assert ("k1", "v1") in d.items() def test_setitem( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() d["k1"] = "v1" assert "v1" == d["K1"] assert ("k1", "v1") in d.items() def test_delitem( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() d["k1"] = "v1" assert "K1" in d del d["k1"] assert "K1" not in d def test_copy( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d1 = case_insensitive_multidict_class(key="KEY", a="b") d2 = d1.copy() assert d1 == d2 assert d1.items() == d2.items() assert d1 is not d2 def test__repr__( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() assert str(d) == "<%s()>" % case_insensitive_multidict_class.__name__ d = case_insensitive_multidict_class([("KEY", "one"), ("KEY", "two")]) expected = ( f"<{case_insensitive_multidict_class.__name__}" "('KEY': 'one', 'KEY': 'two')>" ) assert str(d) == expected def test_add( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() assert d == {} d["KEY"] = "one" assert ("KEY", "one") in d.items() assert d == case_insensitive_multidict_class({"Key": "one"}) assert d.getall("key") == ["one"] d["KEY"] = "two" assert ("KEY", "two") in d.items() assert d == case_insensitive_multidict_class({"Key": "two"}) assert d.getall("key") == ["two"] d.add("KEY", "one") assert ("KEY", "one") in d.items() assert 2 == len(d) assert d.getall("key") == ["two", "one"] d.add("FOO", "bar") assert ("FOO", "bar") in d.items() assert 3 == len(d) assert d.getall("foo") == ["bar"] d.add(key="test", value="test") assert ("test", "test") in d.items() assert 4 == len(d) assert d.getall("test") == ["test"] def test_extend( self, case_insensitive_multidict_class: type[CIMultiDict[Union[str, int]]], ) -> None: d = case_insensitive_multidict_class() assert d == {} d.extend([("KEY", "one"), ("key", "two")], key=3, foo="bar") assert 4 == len(d) itms = d.items() # we can't guarantee order of kwargs assert ("KEY", "one") in itms assert ("key", "two") in itms assert ("key", 3) in itms assert ("foo", "bar") in itms other = case_insensitive_multidict_class(Bar="baz") assert other == {"Bar": "baz"} d.extend(other) assert ("Bar", "baz") in d.items() assert "bar" in d d.extend({"Foo": "moo"}) assert ("Foo", "moo") in d.items() assert "foo" in d d.extend() assert 6 == len(d) with pytest.raises(TypeError): d.extend("foo", "bar") # type: ignore[arg-type, call-arg] def test_extend_from_proxy( self, case_insensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_multidict_proxy_class: type[CIMultiDictProxy[str]], ) -> None: d = case_insensitive_multidict_class([("a", "a"), ("b", "b")]) proxy = case_insensitive_multidict_proxy_class(d) d2 = case_insensitive_multidict_class() d2.extend(proxy) assert [("a", "a"), ("b", "b")] == list(d2.items()) def test_clear( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class([("KEY", "one")], key="two", foo="bar") d.clear() assert d == {} assert list(d.items()) == [] def test_del( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class( [("KEY", "one"), ("key", "two")], foo="bar", ) del d["key"] assert d == {"foo": "bar"} assert list(d.items()) == [("foo", "bar")] with pytest.raises(KeyError, match="key"): del d["key"] def test_set_default( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class( [("KEY", "one"), ("key", "two")], foo="bar", ) assert "one" == d.setdefault("key", "three") assert "three" == d.setdefault("otherkey", "three") assert "otherkey" in d assert ("otherkey", "three") in d.items() assert "three" == d["OTHERKEY"] def test_popitem( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() d.add("KEY", "val1") d.add("key", "val2") pair = d.popitem() assert ("KEY", "val1") == pair assert isinstance(pair[0], str) assert [("key", "val2")] == list(d.items()) def test_popitem_empty_multidict( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() with pytest.raises(KeyError): d.popitem() def test_pop( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() d.add("KEY", "val1") d.add("key", "val2") assert "val1" == d.pop("KEY") assert {"key": "val2"} == d def test_pop_lowercase( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() d.add("KEY", "val1") d.add("key", "val2") assert "val1" == d.pop("key") assert {"key": "val2"} == d def test_pop_default( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class(OTHER="val") assert "default" == d.pop("key", "default") assert "other" in d def test_pop_raises( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class(OTHER="val") with pytest.raises(KeyError, match="KEY"): d.pop("KEY") assert "other" in d def test_extend_with_istr( self, case_insensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_str_class: type[istr], ) -> None: us = case_insensitive_str_class("aBc") d = case_insensitive_multidict_class() d.extend([(us, "val")]) assert [("aBc", "val")] == list(d.items()) def test_copy_istr( self, case_insensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_str_class: type[istr], ) -> None: d = case_insensitive_multidict_class({case_insensitive_str_class("Foo"): "bar"}) d2 = d.copy() assert d == d2 def test_eq( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d1 = case_insensitive_multidict_class(Key="val") d2 = case_insensitive_multidict_class(KEY="val") assert d1 == d2 @pytest.mark.skipif( sys.implementation.name == "pypy", reason="getsizeof() is not implemented on PyPy", ) def test_sizeof( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: md = case_insensitive_multidict_class() s1 = sys.getsizeof(md) for i in string.ascii_lowercase: for j in string.ascii_uppercase: md[i + j] = i + j # multidict should be resized s2 = sys.getsizeof(md) assert s2 > s1 @pytest.mark.skipif( sys.implementation.name == "pypy", reason="getsizeof() is not implemented on PyPy", ) def test_min_sizeof( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: md = case_insensitive_multidict_class() assert sys.getsizeof(md) < 1024 def test_issue_620_items( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: # https://github.com/aio-libs/multidict/issues/620 d = case_insensitive_multidict_class({"a": "123, 456", "b": "789"}) before_mutation_items = d.items() d["c"] = "000" # This causes an error on pypy. list(before_mutation_items) def test_issue_620_keys( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: # https://github.com/aio-libs/multidict/issues/620 d = case_insensitive_multidict_class({"a": "123, 456", "b": "789"}) before_mutation_keys = d.keys() d["c"] = "000" # This causes an error on pypy. list(before_mutation_keys) def test_issue_620_values( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: # https://github.com/aio-libs/multidict/issues/620 d = case_insensitive_multidict_class({"a": "123, 456", "b": "789"}) before_mutation_values = d.values() d["c"] = "000" # This causes an error on pypy. list(before_mutation_values) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_mypy.py0000644000175100001660000001637614766036355016725 0ustar00runnerdocker# This file doesn't use test parametrization because mypy doesn't nothing about it. # Concrete types are required import multidict def test_classes_not_abstract() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) multidict.MultiDictProxy(d1) multidict.CIMultiDictProxy(d2) def test_getitem() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("a") assert d1["a"] == "b" assert d2["a"] == "b" assert d3["a"] == "b" assert d4["a"] == "b" assert d1[key] == "b" assert d2[key] == "b" assert d3[key] == "b" assert d4[key] == "b" def test_get() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("a") assert d1.get("a") == "b" assert d2.get("a") == "b" assert d3.get("a") == "b" assert d4.get("a") == "b" assert d1.get(key) == "b" assert d2.get(key) == "b" assert d3.get(key) == "b" assert d4.get(key) == "b" def test_get_default() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("b") assert d1.get("b", "d") == "d" assert d2.get("b", "d") == "d" assert d3.get("b", "d") == "d" assert d4.get("b", "d") == "d" assert d1.get(key, "d") == "d" assert d2.get(key, "d") == "d" assert d3.get(key, "d") == "d" assert d4.get(key, "d") == "d" def test_getone() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("a") assert d1.getone("a") == "b" assert d2.getone("a") == "b" assert d3.getone("a") == "b" assert d4.getone("a") == "b" assert d1.getone(key) == "b" assert d2.getone(key) == "b" assert d3.getone(key) == "b" assert d4.getone(key) == "b" def test_getone_default() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("b") assert d1.getone("b", 1) == 1 assert d2.getone("b", 1) == 1 assert d3.getone("b", 1) == 1 assert d4.getone("b", 1) == 1 assert d1.getone(key, 1) == 1 assert d2.getone(key, 1) == 1 assert d3.getone(key, 1) == 1 assert d4.getone(key, 1) == 1 def test_getall() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("a") assert d1.getall("a") == ["b"] assert d2.getall("a") == ["b"] assert d3.getall("a") == ["b"] assert d4.getall("a") == ["b"] assert d1.getall(key) == ["b"] assert d2.getall(key) == ["b"] assert d3.getall(key) == ["b"] assert d4.getall(key) == ["b"] def test_getall_default() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("b") assert d1.getall("b", 1) == 1 assert d2.getall("b", 1) == 1 assert d3.getall("b", 1) == 1 assert d4.getall("b", 1) == 1 assert d1.getall(key, 1) == 1 assert d2.getall(key, 1) == 1 assert d3.getall(key, 1) == 1 assert d4.getall(key, 1) == 1 def test_copy() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) assert d1.copy() == d1 assert d2.copy() == d2 assert d3.copy() == d1 assert d4.copy() == d2 def test_iter() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) for i in d1: i.lower() # str-specific class for i in d2: i.lower() # str-specific class for i in d3: i.lower() # str-specific class for i in d4: i.lower() # str-specific class def test_setitem() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) key = multidict.istr("a") d1["a"] = "b" d2["a"] = "b" d1[key] = "b" d2[key] = "b" def test_delitem() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) del d1["a"] del d2["a"] key = multidict.istr("a") d3: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d4: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) del d3[key] del d4[key] def test_additem() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) key = multidict.istr("a") d1.add("a", "b") d2.add("a", "b") d1.add(key, "b") d2.add(key, "b") def test_extend_mapping() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) key = multidict.istr("a") d1.extend({"a": "b"}) d2.extend({"a": "b"}) d1.extend({key: "b"}) d2.extend({key: "b"}) def test_update_mapping() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) key = multidict.istr("a") d1.update({"a": "b"}) d2.update({"a": "b"}) d1.update({key: "b"}) d2.update({key: "b"}) def test_popone() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) assert d1.popone("a") == "b" assert d2.popone("a") == "b" key = multidict.istr("a") d1 = multidict.MultiDict({"a": "b"}) d2 = multidict.CIMultiDict({"a": "b"}) assert d1.popone(key) == "b" assert d2.popone(key) == "b" def test_popall() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) assert d1.popall("a") == ["b"] assert d2.popall("a") == ["b"] key = multidict.istr("a") d1 = multidict.MultiDict({"a": "b"}) d2 = multidict.CIMultiDict({"a": "b"}) assert d1.popall(key) == ["b"] assert d2.popall(key) == ["b"] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_pickle.py0000644000175100001660000000266614766036355017173 0ustar00runnerdockerimport pickle from pathlib import Path from typing import TYPE_CHECKING import pytest from multidict import MultiDict, MultiDictProxy if TYPE_CHECKING: from conftest import MultidictImplementation here = Path(__file__).resolve().parent def test_pickle( any_multidict_class: type[MultiDict[int]], pickle_protocol: int ) -> None: d = any_multidict_class([("a", 1), ("a", 2)]) pbytes = pickle.dumps(d, pickle_protocol) obj = pickle.loads(pbytes) assert d == obj assert isinstance(obj, any_multidict_class) def test_pickle_proxy( any_multidict_class: type[MultiDict[int]], any_multidict_proxy_class: type[MultiDictProxy[int]], ) -> None: d = any_multidict_class([("a", 1), ("a", 2)]) proxy = any_multidict_proxy_class(d) with pytest.raises(TypeError): pickle.dumps(proxy) def test_load_from_file( any_multidict_class: type[MultiDict[int]], multidict_implementation: "MultidictImplementation", pickle_protocol: int, ) -> None: multidict_class_name = any_multidict_class.__name__ pickle_file_basename = "-".join( ( multidict_class_name.lower(), multidict_implementation.tag, ) ) d = any_multidict_class([("a", 1), ("a", 2)]) fname = f"{pickle_file_basename}.pickle.{pickle_protocol}" p = here / fname with p.open("rb") as f: obj = pickle.load(f) assert d == obj assert isinstance(obj, any_multidict_class) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_types.py0000644000175100001660000000655414766036355017070 0ustar00runnerdockerimport types import pytest def test_proxies(multidict_module: types.ModuleType) -> None: assert issubclass( multidict_module.CIMultiDictProxy, multidict_module.MultiDictProxy, ) def test_dicts(multidict_module: types.ModuleType) -> None: assert issubclass(multidict_module.CIMultiDict, multidict_module.MultiDict) def test_proxy_not_inherited_from_dict(multidict_module: types.ModuleType) -> None: assert not issubclass(multidict_module.MultiDictProxy, multidict_module.MultiDict) def test_dict_not_inherited_from_proxy(multidict_module: types.ModuleType) -> None: assert not issubclass(multidict_module.MultiDict, multidict_module.MultiDictProxy) def test_multidict_proxy_copy_type(multidict_module: types.ModuleType) -> None: d = multidict_module.MultiDict(key="val") p = multidict_module.MultiDictProxy(d) assert isinstance(p.copy(), multidict_module.MultiDict) def test_cimultidict_proxy_copy_type(multidict_module: types.ModuleType) -> None: d = multidict_module.CIMultiDict(key="val") p = multidict_module.CIMultiDictProxy(d) assert isinstance(p.copy(), multidict_module.CIMultiDict) def test_create_multidict_proxy_from_nonmultidict( multidict_module: types.ModuleType, ) -> None: with pytest.raises(TypeError): multidict_module.MultiDictProxy({}) def test_create_multidict_proxy_from_cimultidict( multidict_module: types.ModuleType, ) -> None: d = multidict_module.CIMultiDict(key="val") p = multidict_module.MultiDictProxy(d) assert p == d def test_create_multidict_proxy_from_multidict_proxy_from_mdict( multidict_module: types.ModuleType, ) -> None: d = multidict_module.MultiDict(key="val") p = multidict_module.MultiDictProxy(d) assert p == d p2 = multidict_module.MultiDictProxy(p) assert p2 == p def test_create_cimultidict_proxy_from_cimultidict_proxy_from_ci( multidict_module: types.ModuleType, ) -> None: d = multidict_module.CIMultiDict(key="val") p = multidict_module.CIMultiDictProxy(d) assert p == d p2 = multidict_module.CIMultiDictProxy(p) assert p2 == p def test_create_cimultidict_proxy_from_nonmultidict( multidict_module: types.ModuleType, ) -> None: with pytest.raises( TypeError, match=( "ctor requires CIMultiDict or CIMultiDictProxy instance, " "not " ), ): multidict_module.CIMultiDictProxy({}) def test_create_ci_multidict_proxy_from_multidict( multidict_module: types.ModuleType, ) -> None: d = multidict_module.MultiDict(key="val") with pytest.raises( TypeError, match=( "ctor requires CIMultiDict or CIMultiDictProxy instance, " "not " ), ): multidict_module.CIMultiDictProxy(d) def test_generic_alias(multidict_module: types.ModuleType) -> None: assert multidict_module.MultiDict[int] == types.GenericAlias( multidict_module.MultiDict, (int,) ) assert multidict_module.MultiDictProxy[int] == types.GenericAlias( multidict_module.MultiDictProxy, (int,) ) assert multidict_module.CIMultiDict[int] == types.GenericAlias( multidict_module.CIMultiDict, (int,) ) assert multidict_module.CIMultiDictProxy[int] == types.GenericAlias( multidict_module.CIMultiDictProxy, (int,) ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_update.py0000644000175100001660000000774314766036355017207 0ustar00runnerdockerfrom collections import deque from typing import Union from multidict import CIMultiDict, MultiDict _MD_Classes = Union[type[MultiDict[int]], type[CIMultiDict[int]]] def test_update_replace(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj2 = any_multidict_class([("a", 4), ("b", 5), ("a", 6)]) obj1.update(obj2) expected = [("a", 4), ("b", 5), ("a", 6), ("c", 10)] assert list(obj1.items()) == expected def test_update_append(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj2 = any_multidict_class([("a", 4), ("a", 5), ("a", 6)]) obj1.update(obj2) expected = [("a", 4), ("b", 2), ("a", 5), ("c", 10), ("a", 6)] assert list(obj1.items()) == expected def test_update_remove(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj2 = any_multidict_class([("a", 4)]) obj1.update(obj2) expected = [("a", 4), ("b", 2), ("c", 10)] assert list(obj1.items()) == expected def test_update_replace_seq(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj2 = [("a", 4), ("b", 5), ("a", 6)] obj1.update(obj2) expected = [("a", 4), ("b", 5), ("a", 6), ("c", 10)] assert list(obj1.items()) == expected def test_update_replace_seq2(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj1.update([("a", 4)], b=5, a=6) expected = [("a", 4), ("b", 5), ("a", 6), ("c", 10)] assert list(obj1.items()) == expected def test_update_append_seq(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj2 = [("a", 4), ("a", 5), ("a", 6)] obj1.update(obj2) expected = [("a", 4), ("b", 2), ("a", 5), ("c", 10), ("a", 6)] assert list(obj1.items()) == expected def test_update_remove_seq(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj2 = [("a", 4)] obj1.update(obj2) expected = [("a", 4), ("b", 2), ("c", 10)] assert list(obj1.items()) == expected def test_update_md(case_sensitive_multidict_class: type[CIMultiDict[str]]) -> None: d = case_sensitive_multidict_class() d.add("key", "val1") d.add("key", "val2") d.add("key2", "val3") d.update(key="val") assert [("key", "val"), ("key2", "val3")] == list(d.items()) def test_update_istr_ci_md( case_insensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_str_class: type[str], ) -> None: d = case_insensitive_multidict_class() d.add(case_insensitive_str_class("KEY"), "val1") d.add("key", "val2") d.add("key2", "val3") d.update({case_insensitive_str_class("key"): "val"}) assert [("key", "val"), ("key2", "val3")] == list(d.items()) def test_update_ci_md(case_insensitive_multidict_class: type[CIMultiDict[str]]) -> None: d = case_insensitive_multidict_class() d.add("KEY", "val1") d.add("key", "val2") d.add("key2", "val3") d.update(Key="val") assert [("Key", "val"), ("key2", "val3")] == list(d.items()) def test_update_list_arg_and_kwds(any_multidict_class: _MD_Classes) -> None: obj = any_multidict_class() arg = [("a", 1)] obj.update(arg, b=2) assert list(obj.items()) == [("a", 1), ("b", 2)] assert arg == [("a", 1)] def test_update_tuple_arg_and_kwds(any_multidict_class: _MD_Classes) -> None: obj = any_multidict_class() arg = (("a", 1),) obj.update(arg, b=2) assert list(obj.items()) == [("a", 1), ("b", 2)] assert arg == (("a", 1),) def test_update_deque_arg_and_kwds(any_multidict_class: _MD_Classes) -> None: obj = any_multidict_class() arg = deque([("a", 1)]) obj.update(arg, b=2) assert list(obj.items()) == [("a", 1), ("b", 2)] assert arg == deque([("a", 1)]) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1742224621.0 multidict-6.2.0/tests/test_version.py0000644000175100001660000001430614766036355017403 0ustar00runnerdockerfrom collections.abc import Callable from typing import TypeVar, Union import pytest from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy _T = TypeVar("_T") _MD_Types = Union[ MultiDict[_T], CIMultiDict[_T], MultiDictProxy[_T], CIMultiDictProxy[_T] ] GetVersion = Callable[[_MD_Types[_T]], int] def test_getversion_bad_param(multidict_getversion_callable: GetVersion[str]) -> None: with pytest.raises(TypeError): multidict_getversion_callable(1) # type: ignore[arg-type] def test_ctor( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m1 = any_multidict_class() v1 = multidict_getversion_callable(m1) m2 = any_multidict_class() v2 = multidict_getversion_callable(m2) assert v1 != v2 def test_add( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() v = multidict_getversion_callable(m) m.add("key", "val") assert multidict_getversion_callable(m) > v def test_delitem( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) del m["key"] assert multidict_getversion_callable(m) > v def test_delitem_not_found( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) with pytest.raises(KeyError): del m["notfound"] assert multidict_getversion_callable(m) == v def test_setitem( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m["key"] = "val2" assert multidict_getversion_callable(m) > v def test_setitem_not_found( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m["notfound"] = "val2" assert multidict_getversion_callable(m) > v def test_clear( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.clear() assert multidict_getversion_callable(m) > v def test_setdefault( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.setdefault("key2", "val2") assert multidict_getversion_callable(m) > v def test_popone( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.popone("key") assert multidict_getversion_callable(m) > v def test_popone_default( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.popone("key2", "default") assert multidict_getversion_callable(m) == v def test_popone_key_error( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) with pytest.raises(KeyError): m.popone("key2") assert multidict_getversion_callable(m) == v def test_pop( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.pop("key") assert multidict_getversion_callable(m) > v def test_pop_default( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.pop("key2", "default") assert multidict_getversion_callable(m) == v def test_pop_key_error( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) with pytest.raises(KeyError): m.pop("key2") assert multidict_getversion_callable(m) == v def test_popall( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.popall("key") assert multidict_getversion_callable(m) > v def test_popall_default( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.popall("key2", "default") assert multidict_getversion_callable(m) == v def test_popall_key_error( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) with pytest.raises(KeyError): m.popall("key2") assert multidict_getversion_callable(m) == v def test_popitem( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.popitem() assert multidict_getversion_callable(m) > v def test_popitem_key_error( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() v = multidict_getversion_callable(m) with pytest.raises(KeyError): m.popitem() assert multidict_getversion_callable(m) == v