././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744320002.8141801 multidict-6.4.3/0000755000175100001660000000000014776033003013130 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/.coveragerc0000644000175100001660000000136414776033000015252 0ustar00runnerdocker[html] show_contexts = true skip_covered = false [paths] _site-packages-to-src-mapping = . */lib/pypy*/site-packages */lib/python*/site-packages *\Lib\site-packages [report] exclude_also = if TYPE_CHECKING assert False : \.\.\.(\s*#.*)?$ ^ +\.\.\.$ # fail_under = 98.95 skip_covered = true skip_empty = true show_missing = true [run] branch = true cover_pylib = false # https://coverage.rtfd.io/en/latest/contexts.html#dynamic-contexts # dynamic_context = test_function # conflicts with `pytest-cov` if set here omit = setup.py # FIXME: migrate to a snapshotting plugin. See #922, #924 and #938. tests/gen_pickles.py parallel = true # plugins = # covdefaults relative_files = true source = . source_pkgs = multidict ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1744320002.79718 multidict-6.4.3/CHANGES/0000755000175100001660000000000014776033003014200 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/CHANGES/.TEMPLATE.rst0000644000175100001660000000467514776033000016234 0ustar00runnerdocker{# TOWNCRIER TEMPLATE #} *({{ versiondata.date }})* {% for section, _ in sections.items() %} {% set underline = underlines[0] %}{% if section %}{{section}} {{ underline * section|length }}{% set underline = underlines[1] %} {% endif %} {% if sections[section] %} {% for category, val in definitions.items() if category in sections[section]%} {{ definitions[category]['name'] }} {{ underline * definitions[category]['name']|length }} {% if definitions[category]['showcontent'] %} {% for text, change_note_refs in sections[section][category].items() %} - {{ text }} {{- '\n' * 2 -}} {#- NOTE: Replacing 'e' with 'f' is a hack that prevents Jinja's `int` NOTE: filter internal implementation from treating the input as an NOTE: infinite float when it looks like a scientific notation (with a NOTE: single 'e' char in between digits), raising an `OverflowError`, NOTE: subsequently. 'f' is still a hex letter so it won't affect the NOTE: check for whether it's a (short or long) commit hash or not. Ref: https://github.com/pallets/jinja/issues/1921 -#} {%- set pr_issue_numbers = change_note_refs | map('lower') | map('replace', 'e', 'f') | map('int', default=None) | select('integer') | map('string') | list -%} {%- set arbitrary_refs = [] -%} {%- set commit_refs = [] -%} {%- with -%} {%- set commit_ref_candidates = change_note_refs | reject('in', pr_issue_numbers) -%} {%- for cf in commit_ref_candidates -%} {%- if cf | length in (7, 8, 40) and cf | int(default=None, base=16) is not none -%} {%- set _ = commit_refs.append(cf) -%} {%- else -%} {%- set _ = arbitrary_refs.append(cf) -%} {%- endif -%} {%- endfor -%} {%- endwith -%} {% if pr_issue_numbers %} *Related issues and pull requests on GitHub:* :issue:`{{ pr_issue_numbers | join('`, :issue:`') }}`. {{- '\n' * 2 -}} {%- endif -%} {% if commit_refs %} *Related commits on GitHub:* :commit:`{{ commit_refs | join('`, :commit:`') }}`. {{- '\n' * 2 -}} {%- endif -%} {% if arbitrary_refs %} *Unlinked references:* {{ arbitrary_refs | join(', ') }}. {{- '\n' * 2 -}} {%- endif -%} {% endfor %} {% else %} - {{ sections[section][category]['']|join(', ') }} {% endif %} {% if sections[section][category]|length == 0 %} No significant changes. {% else %} {% endif %} {% endfor %} {% else %} No significant changes. {% endif %} {% endfor %} ---- {{ '\n' * 2 }} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/CHANGES/.gitignore0000644000175100001660000000057614776033000016175 0ustar00runnerdocker* !.TEMPLATE.rst !.gitignore !README.rst !*.bugfix !*.bugfix.rst !*.bugfix.*.rst !*.breaking !*.breaking.rst !*.breaking.*.rst !*.contrib !*.contrib.rst !*.contrib.*.rst !*.deprecation !*.deprecation.rst !*.deprecation.*.rst !*.doc !*.doc.rst !*.doc.*.rst !*.feature !*.feature.rst !*.feature.*.rst !*.misc !*.misc.rst !*.misc.*.rst !*.packaging !*.packaging.rst !*.packaging.*.rst ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/CHANGES/README.rst0000644000175100001660000001050614776033000015666 0ustar00runnerdocker.. _Adding change notes with your PRs: Adding change notes with your PRs ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It is very important to maintain a log for news of how updating to the new version of the software will affect end-users. This is why we enforce collection of the change fragment files in pull requests as per `Towncrier philosophy`_. The idea is that when somebody makes a change, they must record the bits that would affect end-users, only including information that would be useful to them. Then, when the maintainers publish a new release, they'll automatically use these records to compose a change log for the respective version. It is important to understand that including unnecessary low-level implementation related details generates noise that is not particularly useful to the end-users most of the time. And so such details should be recorded in the Git history rather than a changelog. Alright! So how to add a news fragment? ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ``multidict`` uses `towncrier `_ for changelog management. To submit a change note about your PR, add a text file into the ``CHANGES/`` folder. It should contain an explanation of what applying this PR will change in the way end-users interact with the project. One sentence is usually enough but feel free to add as many details as you feel necessary for the users to understand what it means. **Use the past tense** for the text in your fragment because, combined with others, it will be a part of the "news digest" telling the readers **what changed** in a specific version of the library *since the previous version*. You should also use *reStructuredText* syntax for highlighting code (inline or block), linking parts of the docs or external sites. However, you do not need to reference the issue or PR numbers here as *towncrier* will automatically add a reference to all of the affected issues when rendering the news file. If you wish to sign your change, feel free to add ``-- by :user:`github-username``` at the end (replace ``github-username`` with your own!). Finally, name your file following the convention that Towncrier understands: it should start with the number of an issue or a PR followed by a dot, then add a patch type, like ``feature``, ``doc``, ``contrib`` etc., and add ``.rst`` as a suffix. If you need to add more than one fragment, you may add an optional sequence number (delimited with another period) between the type and the suffix. In general the name will follow ``..rst`` pattern, where the categories are: - ``bugfix``: A bug fix for something we deemed an improper undesired behavior that got corrected in the release to match pre-agreed expectations. - ``feature``: A new behavior, public APIs. That sort of stuff. - ``deprecation``: A declaration of future API removals and breaking changes in behavior. - ``breaking``: When something public gets removed in a breaking way. Could be deprecated in an earlier release. - ``doc``: Notable updates to the documentation structure or build process. - ``packaging``: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. - ``contrib``: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. - ``misc``: Changes that are hard to assign to any of the above categories. A pull request may have more than one of these components, for example a code change may introduce a new feature that deprecates an old feature, in which case two fragments should be added. It is not necessary to make a separate documentation fragment for documentation changes accompanying the relevant code changes. Examples for adding changelog entries to your Pull Requests ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File :file:`CHANGES/603.removal.1.rst`: .. code-block:: rst Dropped Python 3.5 support; Python 3.6 is the minimal supported Python version. File :file:`CHANGES/550.bugfix.rst`: .. code-block:: rst Started shipping Windows wheels for the x86 architecture. File :file:`CHANGES/553.feature.rst`: .. code-block:: rst Added support for ``GenericAliases`` (``MultiDict[str]``) under Python 3.9 and higher. .. _Towncrier philosophy: https://towncrier.readthedocs.io/en/stable/#philosophy ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/CHANGES.rst0000644000175100001660000004540614776033000014740 0ustar00runnerdocker========= Changelog ========= .. You should *NOT* be adding new change log entries to this file, this file is managed by towncrier. You *may* edit previous change logs to fix problems like typo corrections or such. To add a new change log entry, please see https://pip.pypa.io/en/latest/development/#adding-a-news-entry we named the news folder "changes". WARNING: Don't drop the next directive! .. towncrier release notes start 6.4.3 ===== *(2025-04-10)* Bug fixes --------- - Fixed building the library in debug mode. *Related issues and pull requests on GitHub:* :issue:`1144`. - Fixed custom ``PyType_GetModuleByDef()`` when non-heap type object was passed. *Related issues and pull requests on GitHub:* :issue:`1147`. Packaging updates and notes for downstreams ------------------------------------------- - Added the ability to build in debug mode by setting :envvar:`MULTIDICT_DEBUG_BUILD` in the environment -- by :user:`bdraco`. *Related issues and pull requests on GitHub:* :issue:`1145`. ---- 6.4.2 ===== *(2025-04-09)* Bug fixes --------- - Fixed a segmentation fault when creating subclassed :py:class:`~multidict.MultiDict` objects on Python < 3.11 -- by :user:`bdraco`. The problem first appeared in 6.4.0 *Related issues and pull requests on GitHub:* :issue:`1141`. ---- 6.4.1 ===== *(2025-04-09)* No significant changes. ---- 6.4.0 ===== *(2025-04-09)* Bug fixes --------- - Fixed a memory leak creating new :class:`~multidict.istr` objects -- by :user:`bdraco`. The leak was introduced in 6.3.0 *Related issues and pull requests on GitHub:* :issue:`1133`. - Fixed reference counting when calling :py:meth:`multidict.MultiDict.update` -- by :user:`bdraco`. The leak was introduced in 4.4.0 *Related issues and pull requests on GitHub:* :issue:`1135`. Features -------- - Switched C Extension to use heap types and the module state. *Related issues and pull requests on GitHub:* :issue:`1125`. - Started building armv7l wheels -- by :user:`bdraco`. *Related issues and pull requests on GitHub:* :issue:`1127`. ---- 6.3.2 ===== *(2025-04-03)* Bug fixes --------- - Resolved a memory leak by ensuring proper reference count decrementation -- by :user:`asvetlov` and :user:`bdraco`. *Related issues and pull requests on GitHub:* :issue:`1121`. ---- 6.3.1 ===== *(2025-04-01)* Bug fixes --------- - Fixed keys not becoming case-insensitive when :class:`multidict.CIMultiDict` is created by passing in a :class:`multidict.MultiDict` -- by :user:`bdraco`. *Related issues and pull requests on GitHub:* :issue:`1112`. - Fixed the pure Python version mutating the original :class:`multidict.MultiDict` when creating a new :class:`multidict.CIMultiDict` from an existing one when keyword arguments are also passed -- by :user:`bdraco`. *Related issues and pull requests on GitHub:* :issue:`1113`. - Prevented crashing with a segfault when :func:`repr` is called for recursive multidicts and their proxies and views. *Related issues and pull requests on GitHub:* :issue:`1115`. ---- 6.3.0 ===== *(2025-03-31)* Bug fixes --------- - Set operations for ``KeysView`` and ``ItemsView`` of case-insensitive multidicts and their proxies are processed in case-insensitive manner. *Related issues and pull requests on GitHub:* :issue:`965`. - Rewrote :class:`multidict.CIMultiDict` and it proxy to always return :class:`multidict.istr` keys. ``istr`` is derived from :class:`str`, thus the change is backward compatible. The performance boost is about 15% for some operations for C Extension, pure Python implementation have got a visible (15% - 230%) speedup as well. *Related issues and pull requests on GitHub:* :issue:`1097`. - Fixed a crash when extending a multidict from multidict proxy if C Extensions were used. *Related issues and pull requests on GitHub:* :issue:`1100`. Features -------- - Implemented a custom parser for ``METH_FASTCALL | METH_KEYWORDS`` protocol -- by :user:`asvetlov`. The patch re-enables fast call protocol in the :py:mod:`multidict` C Extension. Speedup is about 25%-30% for the library benchmarks for Python 3.12+. *Related issues and pull requests on GitHub:* :issue:`1070`. - The C-extension no longer pre-allocates a Python exception object in lookup-related methods of :py:class:`~multidict.MultiDict` when the passed-in *key* is not found but *default* value is provided. Namely, this affects :py:meth:`MultiDict.getone() `, :py:meth:`MultiDict.getall() `, :py:meth:`MultiDict.get() `, :py:meth:`MultiDict.pop() `, :py:meth:`MultiDict.popone() `, and :py:meth:`MultiDict.popall() `. Additionally, the :py:class:`~multidict.MultiDict` comparison with regular :py:class:`dict`\ ionaries is now about 60% faster on Python 3.13+ in the fallback-to-default case. *Related issues and pull requests on GitHub:* :issue:`1078`. - Implemented ``__repr__()`` for C Extension classes in C. The speedup is about 2.5 times. *Related issues and pull requests on GitHub:* :issue:`1081`. - Made C version of :class:`multidict.istr` pickleable. *Related issues and pull requests on GitHub:* :issue:`1098`. - Optimized multidict creation and extending / updating if C Extensions are used. The speedup is between 25% and 70% depending on the usage scenario. *Related issues and pull requests on GitHub:* :issue:`1101`. - :meth:`multidict.MultiDict.popitem` is changed to remove the latest entry instead of the first. It gives O(1) amortized complexity. The standard :meth:`dict.popitem` removes the last entry also. *Related issues and pull requests on GitHub:* :issue:`1105`. Contributor-facing changes -------------------------- - Started running benchmarks for the pure Python implementation in addition to the C implementation -- by :user:`bdraco`. *Related issues and pull requests on GitHub:* :issue:`1092`. - The the project-wide Codecov_ metric is no longer reported via GitHub Checks API. The combined value is not very useful because one of the sources (MyPy) cannot reach 100% with the current state of the ecosystem. We may want to reconsider in the future. Instead, we now have two separate “runtime coverage” metrics for library code and tests. They are to be kept at 100% at all times. And the “type coverage” metric will remain advisory, at a lower threshold. The default patch metric check is renamed to “runtime” to better reflect its semantics. This one will also require 100% coverage. Another “typing” patch coverage metric is now reported alongside it. It's considered advisory, just like its project counterpart. When looking at Codecov_, one will likely want to look at MyPy and pytest flags separately. It is usually best to avoid looking at the PR pages that sometimes display combined coverage incorrectly. The change additionally disables the deprecated GitHub Annotations integration in Codecov_. Finally, the badge coloring range now starts at 100%. .. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg?flag=pytest :target: https://codecov.io/gh/aio-libs/multidict?flags[]=pytest :alt: Coverage metrics -- by :user:`webknjaz` *Related issues and pull requests on GitHub:* :issue:`1093`. Miscellaneous internal changes ------------------------------ - Synchronized :file:`pythoncapi_compat.h` with the latest available version. *Related issues and pull requests on GitHub:* :issue:`1063`. - Moved registering ABCs for C Extension classes from C to Python. *Related issues and pull requests on GitHub:* :issue:`1083`. - Refactored the internal ``pair_list`` implementation. *Related issues and pull requests on GitHub:* :issue:`1084`. - Implemented views comparison and disjoints in C instead of Python helpers. The performance boost is about 40%. *Related issues and pull requests on GitHub:* :issue:`1096`. ---- 6.2.0 ====== *(2025-03-17)* Bug fixes --------- - Fixed ``in`` checks throwing an exception instead of returning :data:`False` when testing non-strings. *Related issues and pull requests on GitHub:* :issue:`1045`. - Fixed a leak when the last accessed module in ``PyInit__multidict()`` init is not released. *Related issues and pull requests on GitHub:* :issue:`1061`. Features -------- - Implemented support for the free-threaded build of CPython 3.13 -- by :user:`lysnikolaou`. *Related issues and pull requests on GitHub:* :issue:`1015`. Packaging updates and notes for downstreams ------------------------------------------- - Started publishing wheels made for the free-threaded build of CPython 3.13 -- by :user:`lysnikolaou`. *Related issues and pull requests on GitHub:* :issue:`1015`. Miscellaneous internal changes ------------------------------ - Used stricter typing across the code base, resulting in improved typing accuracy across multidict classes. Funded by an ``NLnet`` grant. *Related issues and pull requests on GitHub:* :issue:`1046`. ---- 6.1.0 (2024-09-09) ================== Bug fixes --------- - Covered the unreachable code path in ``multidict._multidict_base._abc_itemsview_register()`` with typing -- by :user:`skinnyBat`. *Related issues and pull requests on GitHub:* :issue:`928`. Features -------- - Added support for Python 3.13 -- by :user:`bdraco`. *Related issues and pull requests on GitHub:* :issue:`1002`. Removals and backward incompatible breaking changes --------------------------------------------------- - Removed Python 3.7 support -- by :user:`bdraco`. *Related issues and pull requests on GitHub:* :issue:`997`. Contributor-facing changes -------------------------- - Added tests to have full code coverage of the ``multidict._multidict_base._viewbaseset_richcmp()`` function -- by :user:`skinnyBat`. *Related issues and pull requests on GitHub:* :issue:`928`. - `The deprecated `_ ``::set-output`` workflow command has been replaced by the ``$GITHUB_OUTPUT`` environment variable in the GitHub Actions CI/CD workflow definition. *Related issues and pull requests on GitHub:* :issue:`940`. - `codecov-action `_ has been temporarily downgraded to ``v3`` in the GitHub Actions CI/CD workflow definitions in order to fix uploading coverage to Codecov_. See `this issue `_ for more details. .. _Codecov: https://codecov.io/gh/aio-libs/multidict?flags[]=pytest *Related issues and pull requests on GitHub:* :issue:`941`. - In the GitHub Actions CI/CD workflow definition, the ``Get pip cache dir`` step has been fixed for Windows runners by adding ``shell: bash``. See `actions/runner#2224 `_ for more details. *Related issues and pull requests on GitHub:* :issue:`942`. - Interpolation of the ``pip`` cache keys has been fixed by adding missing ``$`` syntax in the GitHub Actions CI/CD workflow definition. *Related issues and pull requests on GitHub:* :issue:`943`. ---- 6.0.5 (2024-02-01) ================== Bug fixes --------- - Upgraded the C-API macros that have been deprecated in Python 3.9 and later removed in 3.13 -- by :user:`iemelyanov`. *Related issues and pull requests on GitHub:* :issue:`862`, :issue:`864`, :issue:`868`, :issue:`898`. - Reverted to using the public argument parsing API :c:func:`PyArg_ParseTupleAndKeywords` under Python 3.12 -- by :user:`charles-dyfis-net` and :user:`webknjaz`. The effect is that this change prevents build failures with clang 16.9.6 and gcc-14 reported in :issue:`926`. It also fixes a segmentation fault crash caused by passing keyword arguments to :py:meth:`MultiDict.getall() ` discovered by :user:`jonaslb` and :user:`hroncok` while examining the problem. *Related issues and pull requests on GitHub:* :issue:`862`, :issue:`909`, :issue:`926`, :issue:`929`. - Fixed a ``SystemError: null argument to internal routine`` error on a ``MultiDict.items().isdisjoint()`` call when using C Extensions. *Related issues and pull requests on GitHub:* :issue:`927`. Improved documentation ---------------------- - On the `Contributing docs `_ page, a link to the ``Towncrier philosophy`` has been fixed. *Related issues and pull requests on GitHub:* :issue:`911`. Packaging updates and notes for downstreams ------------------------------------------- - Stopped marking all files as installable package data -- by :user:`webknjaz`. This change helps ``setuptools`` understand that C-headers are not to be installed under :file:`lib/python3.{x}/site-packages/`. *Related commits on GitHub:* :commit:`31e1170`. - Started publishing pure-python wheels to be installed as a fallback -- by :user:`webknjaz`. *Related commits on GitHub:* :commit:`7ba0e72`. - Switched from ``setuptools``' legacy backend (``setuptools.build_meta:__legacy__``) to the modern one (``setuptools.build_meta``) by actually specifying the the ``[build-system] build-backend`` option in :file:`pyproject.toml` -- by :user:`Jackenmen`. *Related issues and pull requests on GitHub:* :issue:`802`. - Declared Python 3.12 supported officially in the distribution package metadata -- by :user:`hugovk`. *Related issues and pull requests on GitHub:* :issue:`877`. Contributor-facing changes -------------------------- - The test framework has been refactored. In the previous state, the circular imports reported in :issue:`837` caused the C-extension tests to be skipped. Now, there is a set of the ``pytest`` fixtures that is set up in a parametrized manner allowing to have a consistent way of accessing mirrored ``multidict`` implementations across all the tests. This change also implemented a pair of CLI flags (``--c-extensions`` / ``--no-c-extensions``) that allow to explicitly request deselecting the tests running against the C-extension. -- by :user:`webknjaz`. *Related issues and pull requests on GitHub:* :issue:`98`, :issue:`837`, :issue:`915`. - Updated the test pins lockfile used in the ``cibuildwheel`` test stage -- by :user:`hoodmane`. *Related issues and pull requests on GitHub:* :issue:`827`. - Added an explicit ``void`` for arguments in C-function signatures which addresses the following compiler warning: .. code-block:: console warning: a function declaration without a prototype is deprecated in all versions of C [-Wstrict-prototypes] -- by :user:`hoodmane` *Related issues and pull requests on GitHub:* :issue:`828`. - An experimental Python 3.13 job now runs in the CI -- :user:`webknjaz`. *Related issues and pull requests on GitHub:* :issue:`920`. - Added test coverage for the :ref:`and `, :ref:`or `, :py:obj:`sub `, and :py:obj:`xor ` operators in the :file:`multidict/_multidict_base.py` module. It also covers :py:data:`NotImplemented` and ":py:class:`~typing.Iterable`-but-not-:py:class:`~typing.Set`" cases there. -- by :user:`a5r0n` *Related issues and pull requests on GitHub:* :issue:`936`. - The version of pytest is now capped below 8, when running MyPy against Python 3.7. This pytest release dropped support for said runtime. *Related issues and pull requests on GitHub:* :issue:`937`. ---- 6.0.4 (2022-12-24) ================== Bugfixes -------- - Fixed a type annotations regression introduced in v6.0.2 under Python versions <3.10. It was caused by importing certain types only available in newer versions. (:issue:`798`) 6.0.3 (2022-12-03) ================== Features -------- - Declared the official support for Python 3.11 — by :user:`mlegner`. (:issue:`872`) 6.0.2 (2022-01-24) ================== Bugfixes -------- - Revert :issue:`644`, restore type annotations to as-of 5.2.0 version. (:issue:`688`) 6.0.1 (2022-01-23) ================== Bugfixes -------- - Restored back ``MultiDict``, ``CIMultiDict``, ``MultiDictProxy``, and ``CIMutiDictProxy`` generic type arguments; they are parameterized by value type, but the key type is fixed by container class. ``MultiDict[int]`` means ``MutableMultiMapping[str, int]``. The key type of ``MultiDict`` is always ``str``, while all str-like keys are accepted by API and converted to ``str`` internally. The same is true for ``CIMultiDict[int]`` which means ``MutableMultiMapping[istr, int]``. str-like keys are accepted but converted to ``istr`` internally. (:issue:`682`) 6.0.0 (2022-01-22) ================== Features -------- - Use ``METH_FASTCALL`` where it makes sense. ``MultiDict.add()`` is 2.2 times faster now, ``CIMultiDict.add()`` is 1.5 times faster. The same boost is applied to ``get*()``, ``setdefault()``, and ``pop*()`` methods. (:issue:`681`) Bugfixes -------- - Fixed type annotations for keys of multidict mapping classes. (:issue:`644`) - Support Multidict[int] for pure-python version. ``__class_getitem__`` is already provided by C Extension, making it work with the pure-extension too. (:issue:`678`) Deprecations and Removals ------------------------- - Dropped Python 3.6 support (:issue:`680`) Misc ---- - :issue:`659` 5.2.0 (2021-10-03) ===================== Features -------- - 1. Added support Python 3.10 2. Started shipping platform-specific wheels with the ``musl`` tag targeting typical Alpine Linux runtimes. 3. Started shipping platform-specific arm64 wheels for Apple Silicon. (:issue:`629`) Bugfixes -------- - Fixed pure-python implementation that used to raise "Dictionary changed during iteration" error when iterated view (``.keys()``, ``.values()`` or ``.items()``) was created before the dictionary's content change. (:issue:`620`) 5.1.0 (2020-12-03) ================== Features -------- - Supported ``GenericAliases`` (``MultiDict[str]``) for Python 3.9+ :issue:`553` Bugfixes -------- - Synchronized the declared supported Python versions in ``setup.py`` with actually supported and tested ones. :issue:`552` ---- 5.0.1 (2020-11-14) ================== Bugfixes -------- - Provided x86 Windows wheels :issue:`550` ---- 5.0.0 (2020-10-12) ================== Features -------- - Provided wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on Linux as well as ``x86_64``. :issue:`500` - Provided wheels for Python 3.9. :issue:`534` Removal ------- - Dropped Python 3.5 support; Python 3.6 is the minimal supported Python version. Misc ---- - :issue:`503` ---- ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/LICENSE0000644000175100001660000000114314776033000014131 0ustar00runnerdocker Copyright 2016 Andrew Svetlov and aio-libs contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/MANIFEST.in0000644000175100001660000000056414776033000014670 0ustar00runnerdockerinclude .coveragerc include pyproject.toml include pytest.ini include LICENSE include CHANGES.rst include README.rst include Makefile graft multidict graft docs graft CHANGES graft requirements graft tests global-exclude *.pyc include multidict/*.c exclude multidict/_multidict.html exclude multidict/*.so exclude multidict/*.pyd exclude multidict/*.pyd prune docs/_build ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/Makefile0000644000175100001660000000365014776033000014571 0ustar00runnerdocker# Some simple testing tasks (sorry, UNIX only). .PHONY: all build test vtest cov clean doc PYXS = $(wildcard multidict/*.pyx) SRC = multidict tests setup.py all: test .install-deps: $(shell find requirements -type f) pip install -r requirements/dev.txt @touch .install-deps isort-check: @if ! isort --check $(SRC); then \ echo "Import sort errors, run 'make fmt' to fix them!!!"; \ isort --diff --check $(SRC); \ false; \ fi black-check: @if ! isort --check $(SRC); then \ echo "black errors, run 'make fmt' to fix them!!!"; \ black -t py35 --diff --check $(SRC); \ false; \ fi lint: black-check isort-check python -Im pre_commit run --all-files --show-diff-on-failure fmt: black -t py35 $(SRC) isort $(SRC) .develop: .install-deps $(shell find multidict -type f) pip install -e . @touch .develop test: .develop @pytest -q vtest: .develop @pytest -s -v cov-dev: .develop @pytest --cov-report=html @echo "open file://`pwd`/htmlcov/index.html" cov-ci-run: .develop @echo "Regular run" @pytest --cov-report=html cov-dev-full: cov-ci-run @echo "open file://`pwd`/htmlcov/index.html" doc: @make -C docs html SPHINXOPTS="-W -n --keep-going -E" @echo "open file://`pwd`/docs/_build/html/index.html" doc-spelling: @make -C docs spelling SPHINXOPTS="-W -n --keep-going -E" install: @pip install -U 'pip' @pip install -Ur requirements/dev.txt install-dev: .develop clean: rm -rf `find . -name __pycache__` rm -f `find . -type f -name '*.py[co]' ` rm -f `find . -type f -name '*~' ` rm -f `find . -type f -name '.*~' ` rm -f `find . -type f -name '@*' ` rm -f `find . -type f -name '#*#' ` rm -f `find . -type f -name '*.orig' ` rm -f `find . -type f -name '*.rej' ` rm -f .coverage rm -rf coverage rm -rf build rm -rf cover rm -rf htmlcov make -C docs clean SPHINXBUILD=false python3 setup.py clean rm -f multidict/*.html rm -f multidict/*.so rm -f multidict/*.pyd rm -rf .tox ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744320002.8141801 multidict-6.4.3/PKG-INFO0000644000175100001660000001231314776033003014225 0ustar00runnerdockerMetadata-Version: 2.4 Name: multidict Version: 6.4.3 Summary: multidict implementation Home-page: https://github.com/aio-libs/multidict Author: Andrew Svetlov Author-email: andrew.svetlov@gmail.com License: Apache 2 Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org Project-URL: CI: GitHub, https://github.com/aio-libs/multidict/actions Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/multidict Project-URL: Docs: Changelog, https://multidict.aio-libs.org/en/latest/changes/ Project-URL: Docs: RTD, https://multidict.aio-libs.org Project-URL: GitHub: issues, https://github.com/aio-libs/multidict/issues Project-URL: GitHub: repo, https://github.com/aio-libs/multidict Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 Classifier: Programming Language :: Python :: 3.13 Requires-Python: >=3.9 Description-Content-Type: text/x-rst License-File: LICENSE Requires-Dist: typing-extensions>=4.1.0; python_version < "3.11" Dynamic: license-file ========= multidict ========= .. image:: https://github.com/aio-libs/multidict/actions/workflows/ci-cd.yml/badge.svg :target: https://github.com/aio-libs/multidict/actions :alt: GitHub status for master branch .. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg?flag=pytest :target: https://codecov.io/gh/aio-libs/multidict?flags[]=pytest :alt: Coverage metrics .. image:: https://img.shields.io/pypi/v/multidict.svg :target: https://pypi.org/project/multidict :alt: PyPI .. image:: https://readthedocs.org/projects/multidict/badge/?version=latest :target: https://multidict.aio-libs.org :alt: Read The Docs build status badge .. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json :target: https://codspeed.io/aio-libs/multidict :alt: CodSpeed .. image:: https://img.shields.io/pypi/pyversions/multidict.svg :target: https://pypi.org/project/multidict :alt: Python versions .. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat :target: https://matrix.to/#/%23aio-libs:matrix.org :alt: Matrix Room — #aio-libs:matrix.org .. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat :target: https://matrix.to/#/%23aio-libs-space:matrix.org :alt: Matrix Space — #aio-libs-space:matrix.org Multidict is dict-like collection of *key-value pairs* where key might occur more than once in the container. Introduction ------------ *HTTP Headers* and *URL query string* require specific data structure: *multidict*. It behaves mostly like a regular ``dict`` but it may have several *values* for the same *key* and *preserves insertion ordering*. The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries). ``multidict`` has four multidict classes: ``MultiDict``, ``MultiDictProxy``, ``CIMultiDict`` and ``CIMultiDictProxy``. Immutable proxies (``MultiDictProxy`` and ``CIMultiDictProxy``) provide a dynamic view for the proxied multidict, the view reflects underlying collection changes. They implement the ``collections.abc.Mapping`` interface. Regular mutable (``MultiDict`` and ``CIMultiDict``) classes implement ``collections.abc.MutableMapping`` and allows them to change their own content. *Case insensitive* (``CIMultiDict`` and ``CIMultiDictProxy``) assume the *keys* are case insensitive, e.g.:: >>> dct = CIMultiDict(key='val') >>> 'Key' in dct True >>> dct['Key'] 'val' *Keys* should be ``str`` or ``istr`` instances. The library has optional C Extensions for speed. License ------- Apache 2 Library Installation -------------------- .. code-block:: bash $ pip install multidict The library is Python 3 only! PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install ``multidict`` on another operating system (or *Alpine Linux* inside a Docker) the tarball will be used to compile the library from source. It requires a C compiler and Python headers to be installed. To skip the compilation, please use the `MULTIDICT_NO_EXTENSIONS` environment variable, e.g.: .. code-block:: bash $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict Please note, the pure Python (uncompiled) version is about 20-50 times slower depending on the usage scenario!!! For extension development, set the ``MULTIDICT_DEBUG_BUILD`` environment variable to compile the extensions in debug mode: .. code-block:: console $ MULTIDICT_DEBUG_BUILD=1 pip install multidict Changelog --------- See `RTD page `_. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/README.rst0000644000175100001660000000726214776033000014623 0ustar00runnerdocker========= multidict ========= .. image:: https://github.com/aio-libs/multidict/actions/workflows/ci-cd.yml/badge.svg :target: https://github.com/aio-libs/multidict/actions :alt: GitHub status for master branch .. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg?flag=pytest :target: https://codecov.io/gh/aio-libs/multidict?flags[]=pytest :alt: Coverage metrics .. image:: https://img.shields.io/pypi/v/multidict.svg :target: https://pypi.org/project/multidict :alt: PyPI .. image:: https://readthedocs.org/projects/multidict/badge/?version=latest :target: https://multidict.aio-libs.org :alt: Read The Docs build status badge .. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json :target: https://codspeed.io/aio-libs/multidict :alt: CodSpeed .. image:: https://img.shields.io/pypi/pyversions/multidict.svg :target: https://pypi.org/project/multidict :alt: Python versions .. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat :target: https://matrix.to/#/%23aio-libs:matrix.org :alt: Matrix Room — #aio-libs:matrix.org .. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat :target: https://matrix.to/#/%23aio-libs-space:matrix.org :alt: Matrix Space — #aio-libs-space:matrix.org Multidict is dict-like collection of *key-value pairs* where key might occur more than once in the container. Introduction ------------ *HTTP Headers* and *URL query string* require specific data structure: *multidict*. It behaves mostly like a regular ``dict`` but it may have several *values* for the same *key* and *preserves insertion ordering*. The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries). ``multidict`` has four multidict classes: ``MultiDict``, ``MultiDictProxy``, ``CIMultiDict`` and ``CIMultiDictProxy``. Immutable proxies (``MultiDictProxy`` and ``CIMultiDictProxy``) provide a dynamic view for the proxied multidict, the view reflects underlying collection changes. They implement the ``collections.abc.Mapping`` interface. Regular mutable (``MultiDict`` and ``CIMultiDict``) classes implement ``collections.abc.MutableMapping`` and allows them to change their own content. *Case insensitive* (``CIMultiDict`` and ``CIMultiDictProxy``) assume the *keys* are case insensitive, e.g.:: >>> dct = CIMultiDict(key='val') >>> 'Key' in dct True >>> dct['Key'] 'val' *Keys* should be ``str`` or ``istr`` instances. The library has optional C Extensions for speed. License ------- Apache 2 Library Installation -------------------- .. code-block:: bash $ pip install multidict The library is Python 3 only! PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install ``multidict`` on another operating system (or *Alpine Linux* inside a Docker) the tarball will be used to compile the library from source. It requires a C compiler and Python headers to be installed. To skip the compilation, please use the `MULTIDICT_NO_EXTENSIONS` environment variable, e.g.: .. code-block:: bash $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict Please note, the pure Python (uncompiled) version is about 20-50 times slower depending on the usage scenario!!! For extension development, set the ``MULTIDICT_DEBUG_BUILD`` environment variable to compile the extensions in debug mode: .. code-block:: console $ MULTIDICT_DEBUG_BUILD=1 pip install multidict Changelog --------- See `RTD page `_. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744320002.7991798 multidict-6.4.3/docs/0000755000175100001660000000000014776033003014060 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/docs/Makefile0000644000175100001660000001533314776033000015522 0ustar00runnerdocker# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/aiohttp.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/aiohttp.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/aiohttp" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/aiohttp" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." spelling: $(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling @echo @echo "Build finished." ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/docs/benchmark.rst0000644000175100001660000000322414776033000016542 0ustar00runnerdocker.. _benchmarking-reference: ========== Benchmarks ========== Introduction ------------ Benchmarks allow to track performance from release to release and verify that latest changes haven not affected it drastically. Benchmarks are based on the :doc:`pyperf:index`. How to run ---------- ``requirements/dev.txt`` should be installed before we can proceed with benchmarks. Please also make sure that you have :doc:`configured ` your OS to have reliable results. To run benchmarks next command can be executed: .. code-block:: bash $ python benchmarks/benchmark.py This would run benchmarks for both classes (:class:`~multidict.MultiDict` and :class:`~multidict.CIMultiDict`) of both implementations (pure-Python and C). To run benchmarks for a specific class of specific implementation please use ``--impl`` option: .. code-block:: bash $ python benchmarks/benchmark.py --impl multidict_c would run benchmarks only for :class:`~multidict.MultiDict` implemented in C. Please use ``--help`` to see all available options. Most of the options are described at :doc:`perf's Runner ` documentation. How to compare implementations ------------------------------ ``--impl`` option allows to run benchmarks for a specific implementation of class. Combined with the :ref:`compare_to ` command of :doc:`pyperf:index` we can get a good picture of how implementation performs: .. code-block:: bash $ python benchmarks/benchmark.py --impl multidict_c -o multidict_cy.json $ python benchmarks/benchmark.py --impl multidict_py -o multidict_py.json $ python -m perf compare_to multidict_cy.json multidict_py.json ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/docs/changes.rst0000644000175100001660000000060414776033000016217 0ustar00runnerdocker.. _multidict_changes: ========= Changelog ========= .. only:: not is_release To be included in v\ |release| (if present) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. towncrier-draft-entries:: |release| [UNRELEASED DRAFT] Released versions ^^^^^^^^^^^^^^^^^ .. include:: ../CHANGES.rst :start-after: .. towncrier release notes start .. include:: ../HISTORY.rst ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/docs/conf.py0000644000175100001660000003060214776033000015355 0ustar00runnerdocker#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # multidict documentation build configuration file, created by # sphinx-quickstart on Wed Mar 5 12:35:35 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. from __future__ import annotations import os import re from contextlib import suppress from pathlib import Path import alabaster from sphinx.addnodes import pending_xref from sphinx.application import Sphinx from sphinx.environment import BuildEnvironment # isort: split from docutils.nodes import literal, reference PROJECT_ROOT_DIR = Path(__file__).parents[1].resolve() IS_RELEASE_ON_RTD = ( os.getenv("READTHEDOCS", "False") == "True" and os.environ["READTHEDOCS_VERSION_TYPE"] == "tag" ) if IS_RELEASE_ON_RTD: tags: set[str] tags.add("is_release") # noqa: F821 _docs_path = Path(__file__).parent _version_path = _docs_path / ".." / "multidict" / "__init__.py" with _version_path.open(encoding="utf-8") as fp: _version_search_result = re.search( r'^__version__ = "' r"(?P\d+)" r"\.(?P\d+)" r"\.(?P\d+)" r'(?P.*)?"$', fp.read(), re.M, ) if _version_search_result is None: raise RuntimeError("Unable to determine version.") try: _version_info = _version_search_result.groupdict() except IndexError: raise RuntimeError("Unable to determine version.") # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ # stdlib-party extensions: "sphinx.ext.extlinks", "sphinx.ext.intersphinx", "sphinx.ext.viewcode", # Third-party extensions: "alabaster", "sphinxcontrib.towncrier.ext", # provides `towncrier-draft-entries` directive ] with suppress(ImportError): # spelling extension is optional, only add it when installed import sphinxcontrib.spelling # noqa extensions.append("sphinxcontrib.spelling") intersphinx_mapping = { "pyperf": ("https://pyperf.readthedocs.io/en/latest", None), "python": ("http://docs.python.org/3", None), "aiohttp": ("https://aiohttp.readthedocs.io/en/stable/", None), } # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix of source filenames. source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = "index" # General information about the project. github_url = "https://github.com" github_repo_org = "aio-libs" github_repo_name = "multidict" github_repo_slug = f"{github_repo_org}/{github_repo_name}" github_repo_url = f"{github_url}/{github_repo_slug}" github_sponsors_url = f"{github_url}/sponsors" project = github_repo_name copyright = "2016, Andrew Svetlov and aio-libs contributors" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = "{major}.{minor}".format(**_version_info) # The full version, including alpha/beta/rc tags. release = "{major}.{minor}.{patch}{tag}".format_map(_version_info) rst_epilog = f""" .. |project| replace:: {project} """ # pylint: disable=invalid-name # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. default_role = "any" # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # The default language to highlight source code in. highlight_language = "python" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # -- Extension configuration ------------------------------------------------- # -- Options for extlinks extension --------------------------------------- extlinks = { "issue": (f"{github_repo_url}/issues/%s", "#%s"), "pr": (f"{github_repo_url}/pull/%s", "PR #%s"), "commit": (f"{github_repo_url}/commit/%s", "%s"), "gh": (f"{github_url}/%s", "GitHub: %s"), "user": (f"{github_sponsors_url}/%s", "@%s"), } # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { # 'logo': 'aiohttp-icon-128x128.png', "description": project, "github_user": github_repo_org, "github_repo": github_repo_name, "github_button": True, "github_type": "star", "github_banner": True, "codecov_button": True, "pre_bg": "#FFF6E5", "note_bg": "#E5ECD1", "note_border": "#BFCF8C", "body_text": "#482C0A", "sidebar_text": "#49443E", "sidebar_header": "#4B4032", } # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [alabaster.get_path()] # type: ignore[no-untyped-call] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = 'aiohttp-icon.svg' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = 'aiohttp-icon.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { "**": [ "about.html", "navigation.html", "searchbox.html", ] } # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = "multidictdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements: dict[str, str] = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ("index", "multidict.tex", "multidict Documentation", "Andrew Svetlov", "manual"), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [("index", project, "multidict Documentation", ["Andrew Svetlov"], 1)] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( "index", project, "multidict Documentation", "Andrew Svetlov", project, "One line description of project.", "Miscellaneous", ), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # -- Strictness options -------------------------------------------------- nitpicky = True nitpick_ignore: list[str] = [] # -- Options for towncrier_draft extension ----------------------------------- towncrier_draft_autoversion_mode = "draft" # or: 'sphinx-version', 'sphinx-release' towncrier_draft_include_empty = True towncrier_draft_working_directory = PROJECT_ROOT_DIR def _replace_missing_aiohttp_hdrs_reference( app: Sphinx, env: BuildEnvironment, node: pending_xref, contnode: literal, ) -> "reference | None": if (node.get('refdomain'), node.get('reftype')) != ("py", "mod"): return None ref_target = node.get("reftarget", "") if ref_target != "aiohttp:aiohttp.hdrs": return None normalized_ref_target = "aiohttp:aiohttp.hdrs".split(":", 1)[-1] return reference( normalized_ref_target, normalized_ref_target, internal=False, refuri="https://github.com/aio-libs/aiohttp/blob/43f3e23/aiohttp/hdrs.py", ) def setup(app: Sphinx) -> dict[str, bool | str]: app.connect('missing-reference', _replace_missing_aiohttp_hdrs_reference) return { "version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True, } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/docs/index.rst0000644000175100001660000000565014776033000015724 0ustar00runnerdocker.. aiohttp documentation master file, created by sphinx-quickstart on Wed Mar 5 12:35:35 2014. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. multidict ========= Multidicts are useful for working with HTTP headers, URL query args etc. The code was extracted from aiohttp library. Introduction ------------ *HTTP Headers* and *URL query string* require specific data structure: *multidict*. It behaves mostly like a regular :class:`dict` but it may have several *values* for the same *key* and *preserves insertion ordering*. The *key* is :class:`str` (or :class:`~multidict.istr` for case-insensitive dictionaries). :mod:`multidict` has four multidict classes: :class:`~multidict.MultiDict`, :class:`~multidict.MultiDictProxy`, :class:`~multidict.CIMultiDict` and :class:`~multidict.CIMultiDictProxy`. Immutable proxies (:class:`~multidict.MultiDictProxy` and :class:`~multidict.CIMultiDictProxy`) provide a dynamic view for the proxied multidict, the view reflects underlying collection changes. They implement the :class:`~collections.abc.Mapping` interface. Regular mutable (:class:`~multidict.MultiDict` and :class:`~multidict.CIMultiDict`) classes implement :class:`~collections.abc.MutableMapping` and allows to change their own content. *Case insensitive* (:class:`~multidict.CIMultiDict` and :class:`~multidict.CIMultiDictProxy`) ones assume the *keys* are case insensitive, e.g.:: >>> dct = CIMultiDict(key='val') >>> 'Key' in dct True >>> dct['Key'] 'val' *Keys* should be either :class:`str` or :class:`~multidict.istr` instance. The library has optional C Extensions for sake of speed. Library Installation -------------------- .. code-block:: bash $ pip install multidict The library is Python 3 only! PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install ``multidict`` on another operation system (or *Alpine Linux* inside a Docker) the Tarball will be used to compile the library from sources. It requires C compiler and Python headers installed. To skip the compilation please use the :envvar:`MULTIDICT_NO_EXTENSIONS` environment variable, e.g.: .. code-block:: bash $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict Please note, Pure Python (uncompiled) version is about 20-50 times slower depending on the usage scenario!!! Source code ----------- The project is hosted on GitHub_ Please file an issue on the `bug tracker `_ if you have found a bug or have some suggestion in order to improve the library. Authors and License ------------------- The ``multidict`` package is written by Andrew Svetlov. It's *Apache 2* licensed and freely available. Contents -------- .. toctree:: multidict benchmark changes Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` .. _GitHub: https://github.com/aio-libs/multidict ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/docs/make.bat0000644000175100001660000001505714776033000015472 0ustar00runnerdocker@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\aiohttp.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\aiohttp.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/docs/multidict.rst0000644000175100001660000003001614776033000016605 0ustar00runnerdocker.. _multidict-reference: ============ Reference ============ .. module:: multidict MultiDict ========= .. class:: MultiDict(**kwargs) MultiDict(mapping, **kwargs) MultiDict(iterable, **kwargs) Creates a mutable multidict instance. Accepted parameters are the same as for :class:`dict`. If the same key appears several times it will be added, e.g.:: >>> d = MultiDict([('a', 1), ('b', 2), ('a', 3)]) >>> d .. method:: len(d) Return the number of items in multidict *d*. .. method:: d[key] Return the **first** item of *d* with key *key*. Raises a :exc:`KeyError` if key is not in the multidict. .. method:: d[key] = value Set ``d[key]`` to *value*. Replace all items where key is equal to *key* with single item ``(key, value)``. .. method:: del d[key] Remove all items where key is equal to *key* from *d*. Raises a :exc:`KeyError` if *key* is not in the map. .. method:: key in d Return ``True`` if d has a key *key*, else ``False``. .. method:: key not in d Equivalent to ``not (key in d)`` .. method:: iter(d) Return an iterator over the keys of the dictionary. This is a shortcut for ``iter(d.keys())``. .. method:: add(key, value) Append ``(key, value)`` pair to the dictionary. .. method:: clear() Remove all items from the dictionary. .. method:: copy() Return a shallow copy of the dictionary. .. method:: extend([other]) Extend the dictionary with the key/value pairs from *other*, appending the pairs to this dictionary. For existing keys, values are added. Returns ``None``. :meth:`extend` accepts either another dictionary object or an iterable of key/value pairs (as tuples or other iterables of length two). If keyword arguments are specified, the dictionary is then extended with those key/value pairs: ``d.extend(red=1, blue=2)``. Effectively the same as calling :meth:`add` for every ``(key, value)`` pair. Also see :meth:`update`, for a version that replaces existing keys. .. method:: getone(key[, default]) Return the **first** value for *key* if *key* is in the dictionary, else *default*. Raises :exc:`KeyError` if *default* is not given and *key* is not found. ``d[key]`` is equivalent to ``d.getone(key)``. .. method:: getall(key[, default]) Return a list of all values for *key* if *key* is in the dictionary, else *default*. Raises :exc:`KeyError` if *default* is not given and *key* is not found. .. method:: get(key[, default]) Return the **first** value for *key* if *key* is in the dictionary, else *default*. If *default* is not given, it defaults to ``None``, so that this method never raises a :exc:`KeyError`. ``d.get(key)`` is equivalent to ``d.getone(key, None)``. .. method:: keys() Return a new view of the dictionary's keys. View contains all keys, possibly with duplicates. .. method:: items() Return a new view of the dictionary's items (``(key, value)`` pairs). View contains all items, multiple items can have the same key. .. method:: values() Return a new view of the dictionary's values. View contains all values. .. method:: popone(key[, default]) If *key* is in the dictionary, remove it and return its the **first** value, else return *default*. If *default* is not given and *key* is not in the dictionary, a :exc:`KeyError` is raised. .. versionadded:: 3.0 .. method:: pop(key[, default]) An alias to :meth:`popone` .. versionchanged:: 3.0 Now only *first* occurrence is removed (was all). .. method:: popall(key[, default]) If *key* is in the dictionary, remove all occurrences and return a :class:`list` of all values in corresponding order (as :meth:`getall` does). If *key* is not found and *default* is provided return *default*. If *default* is not given and *key* is not in the dictionary, a :exc:`KeyError` is raised. .. versionadded:: 3.0 .. method:: popitem() Remove and return an arbitrary ``(key, value)`` pair from the dictionary. :meth:`popitem` is useful to destructively iterate over a dictionary, as often used in set algorithms. If the dictionary is empty, calling :meth:`popitem` raises a :exc:`KeyError`. .. method:: setdefault(key[, default]) If *key* is in the dictionary, return its the **first** value. If not, insert *key* with a value of *default* and return *default*. *default* defaults to ``None``. .. method:: update([other]) Update the dictionary with the key/value pairs from *other*, overwriting existing keys. Returns ``None``. :meth:`update` accepts either another dictionary object or an iterable of key/value pairs (as tuples or other iterables of length two). If keyword arguments are specified, the dictionary is then updated with those key/value pairs: ``d.update(red=1, blue=2)``. Also see :meth:`extend` for a method that adds to existing keys rather than update them. .. seealso:: :class:`MultiDictProxy` can be used to create a read-only view of a :class:`MultiDict`. CIMultiDict =========== .. class:: CIMultiDict(**kwargs) CIMultiDict(mapping, **kwargs) CIMultiDict(iterable, **kwargs) Create a case insensitive multidict instance. The behavior is the same as of :class:`MultiDict` but key comparisons are case insensitive, e.g.:: >>> dct = CIMultiDict(a='val') >>> 'A' in dct True >>> dct['A'] 'val' >>> dct['a'] 'val' >>> dct['b'] = 'new val' >>> dct['B'] 'new val' The class is inherited from :class:`MultiDict`. .. seealso:: :class:`CIMultiDictProxy` can be used to create a read-only view of a :class:`CIMultiDict`. MultiDictProxy ============== .. class:: MultiDictProxy(multidict) Create an immutable multidict proxy. It provides a dynamic view on the multidict’s entries, which means that when the multidict changes, the view reflects these changes. Raises :exc:`TypeError` if *multidict* is not a :class:`MultiDict` instance. .. method:: len(d) Return number of items in multidict *d*. .. method:: d[key] Return the **first** item of *d* with key *key*. Raises a :exc:`KeyError` if key is not in the multidict. .. method:: key in d Return ``True`` if d has a key *key*, else ``False``. .. method:: key not in d Equivalent to ``not (key in d)`` .. method:: iter(d) Return an iterator over the keys of the dictionary. This is a shortcut for ``iter(d.keys())``. .. method:: copy() Return a shallow copy of the underlying multidict. .. method:: getone(key[, default]) Return the **first** value for *key* if *key* is in the dictionary, else *default*. Raises :exc:`KeyError` if *default* is not given and *key* is not found. ``d[key]`` is equivalent to ``d.getone(key)``. .. method:: getall(key[, default]) Return a list of all values for *key* if *key* is in the dictionary, else *default*. Raises :exc:`KeyError` if *default* is not given and *key* is not found. .. method:: get(key[, default]) Return the **first** value for *key* if *key* is in the dictionary, else *default*. If *default* is not given, it defaults to ``None``, so that this method never raises a :exc:`KeyError`. ``d.get(key)`` is equivalent to ``d.getone(key, None)``. .. method:: keys() Return a new view of the dictionary's keys. View contains all keys, possibly with duplicates. .. method:: items() Return a new view of the dictionary's items (``(key, value)`` pairs). View contains all items, multiple items can have the same key. .. method:: values() Return a new view of the dictionary's values. View contains all values. CIMultiDictProxy ================ .. class:: CIMultiDictProxy(multidict) Case insensitive version of :class:`MultiDictProxy`. Raises :exc:`TypeError` if *multidict* is not :class:`CIMultiDict` instance. The class is inherited from :class:`MultiDict`. Version ======= All multidicts have an internal version flag. It's changed on every dict update, thus the flag could be used for checks like cache expiring etc. .. function:: getversion(mdict) Return a version of given *mdict* object (works for proxies also). The type of returned value is opaque and should be used for equality tests only (``==`` and ``!=``), ordering is not allowed while not prohibited explicitly. .. versionadded:: 3.0 .. seealso:: :pep:`509` istr ==== :class:`CIMultiDict` accepts :class:`str` as *key* argument for dict lookups but uses case-folded (lower-cased) strings for the comparison internally. For more effective processing it should know if the *key* is already case-folded to skip the :meth:`~str.lower()` call. The performant code may create case-folded string keys explicitly hand, e.g:: >>> key = istr('Key') >>> key 'Key' >>> mdict = CIMultiDict(key='value') >>> key in mdict True >>> mdict[key] 'value' For performance :class:`istr` strings should be created once and stored somewhere for the later usage, see :mod:`aiohttp:aiohttp.hdrs` for example. .. class:: istr(object='') istr(bytes_or_buffer[, encoding[, errors]]) Create a new **case-folded** string object from the given *object*. If *encoding* or *errors* are specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of ``object.__str__()`` (if defined) or ``repr(object)``. *encoding* defaults to ``sys.getdefaultencoding()``. *errors* defaults to ``'strict'``. The class is inherited from :class:`str` and has all regular string methods. .. versionchanged:: 2.0 ``upstr()`` is a deprecated alias for :class:`istr`. .. versionchanged:: 3.7 :class:`istr` doesn't title-case its argument anymore but uses internal lower-cased data for fast case-insensitive comparison. Abstract Base Classes ===================== The module provides two ABCs: ``MultiMapping`` and ``MutableMultiMapping``. They are similar to :class:`collections.abc.Mapping` and :class:`collections.abc.MutableMapping` and inherited from them. .. versionadded:: 3.3 Typing ====== The library is shipped with embedded type annotations, mypy just picks the annotations by default. :class:`MultiDict`, :class:`CIMultiDict`, :class:`MultiDictProxy`, and :class:`CIMultiDictProxy` are *generic* types; please use the corresponding notation for multidict value types, e.g. ``md: MultiDict[str] = MultiDict()``. The type of multidict keys is always :class:`str` or a class derived from a string. .. versionadded:: 3.7 Environment variables ===================== .. envvar:: MULTIDICT_NO_EXTENSIONS An environment variable that instructs the packaging scripts to skip compiling the C-extension based variant of :mod:`multidict`. When used in runtime, it instructs the pure-Python variant to be imported from the top-level :mod:`multidict` entry-point package, even when the C-extension implementation is available. .. caution:: The pure-Python (uncompiled) version is roughly 20-50 times slower than its C counterpart, depending on the way it's used. .. envvar:: MULTIDICT_DEBUG_BUILD An environment variable that instructs the packaging scripts to compile the C-extension based variant of :mod:`multidict` with debug symbols. This is useful for debugging the C-extension code, but it will result in a larger binary size and worse performance. .. caution:: The debug build is not intended for production use and should only be used for development and debugging purposes. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/docs/spelling_wordlist.txt0000644000175100001660000000164614776033000020371 0ustar00runnerdockerarmv aarch64 i686 ppc64le s390x x86_64 aiohttp args async autocalculated autodetection autogenerates autogeneration backend basename bugfixes cchardet cChardet changelog charset charsetdetect CPython criterias css ctor Ctrl cython Deprecations deallocation decrementation dev dict docstrings downstreams eof fallback fastpath filename gcc getitem github google gunicorn Gunicorn Indices inplace ionaries IP IPv ish istr iterable iterables javascript json keepalive keepalives keepaliving lockfile lookups manylinux middleware middlewares multidict multidicts Multidicts multipart Multipart mypy Nikolay param params performant pickable pickleable pre proxied pyenv pyinstaller pytest refactor refactored regex regexs repo runtime runtimes str subclassable subclassed subclassing subprotocol subprotocols Svetlov toolbar toolset tuples un uncompiled upstr url urlencoded urls utf websocket websockets Websockets wildcard Workflow wsgi ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1744320002.80018 multidict-6.4.3/multidict/0000755000175100001660000000000014776033003015126 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/__init__.py0000644000175100001660000000231314776033000017233 0ustar00runnerdocker"""Multidict implementation. HTTP Headers and URL query string require specific data structure: multidict. It behaves mostly like a dict but it can have several values for the same key. """ from typing import TYPE_CHECKING from ._abc import MultiMapping, MutableMultiMapping from ._compat import USE_EXTENSIONS __all__ = ( "MultiMapping", "MutableMultiMapping", "MultiDictProxy", "CIMultiDictProxy", "MultiDict", "CIMultiDict", "upstr", "istr", "getversion", ) __version__ = "6.4.3" if TYPE_CHECKING or not USE_EXTENSIONS: from ._multidict_py import ( CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, getversion, istr, ) else: from collections.abc import ItemsView, KeysView, ValuesView from ._multidict import ( CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, _ItemsView, _KeysView, _ValuesView, getversion, istr, ) MultiMapping.register(MultiDictProxy) MutableMultiMapping.register(MultiDict) KeysView.register(_KeysView) ItemsView.register(_ItemsView) ValuesView.register(_ValuesView) upstr = istr ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/_abc.py0000644000175100001660000000430414776033000016362 0ustar00runnerdockerimport abc from collections.abc import Iterable, Mapping, MutableMapping from typing import TYPE_CHECKING, Protocol, TypeVar, Union, overload if TYPE_CHECKING: from ._multidict_py import istr else: istr = str _V = TypeVar("_V") _V_co = TypeVar("_V_co", covariant=True) _T = TypeVar("_T") class SupportsKeys(Protocol[_V_co]): def keys(self) -> Iterable[str]: ... def __getitem__(self, key: str, /) -> _V_co: ... class SupportsIKeys(Protocol[_V_co]): def keys(self) -> Iterable[istr]: ... def __getitem__(self, key: istr, /) -> _V_co: ... MDArg = Union[SupportsKeys[_V], SupportsIKeys[_V], Iterable[tuple[str, _V]], None] class MultiMapping(Mapping[str, _V_co]): @overload def getall(self, key: str) -> list[_V_co]: ... @overload def getall(self, key: str, default: _T) -> Union[list[_V_co], _T]: ... @abc.abstractmethod def getall(self, key: str, default: _T = ...) -> Union[list[_V_co], _T]: """Return all values for key.""" @overload def getone(self, key: str) -> _V_co: ... @overload def getone(self, key: str, default: _T) -> Union[_V_co, _T]: ... @abc.abstractmethod def getone(self, key: str, default: _T = ...) -> Union[_V_co, _T]: """Return first value for key.""" class MutableMultiMapping(MultiMapping[_V], MutableMapping[str, _V]): @abc.abstractmethod def add(self, key: str, value: _V) -> None: """Add value to list.""" @abc.abstractmethod def extend(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None: """Add everything from arg and kwargs to the mapping.""" @overload def popone(self, key: str) -> _V: ... @overload def popone(self, key: str, default: _T) -> Union[_V, _T]: ... @abc.abstractmethod def popone(self, key: str, default: _T = ...) -> Union[_V, _T]: """Remove specified key and return the corresponding value.""" @overload def popall(self, key: str) -> list[_V]: ... @overload def popall(self, key: str, default: _T) -> Union[list[_V], _T]: ... @abc.abstractmethod def popall(self, key: str, default: _T = ...) -> Union[list[_V], _T]: """Remove all occurrences of key and return the list of corresponding values.""" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/_compat.py0000644000175100001660000000064614776033000017125 0ustar00runnerdockerimport os import platform NO_EXTENSIONS = bool(os.environ.get("MULTIDICT_NO_EXTENSIONS")) PYPY = platform.python_implementation() == "PyPy" USE_EXTENSIONS = not NO_EXTENSIONS and not PYPY if USE_EXTENSIONS: try: from . import _multidict # type: ignore[attr-defined] # noqa: F401 except ImportError: # pragma: no cover # FIXME: Refactor for coverage. See #837. USE_EXTENSIONS = False ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/_multidict.c0000644000175100001660000011342614776033000017433 0ustar00runnerdocker#include "Python.h" #include "structmember.h" #include "_multilib/pythoncapi_compat.h" #include "_multilib/dict.h" #include "_multilib/istr.h" #include "_multilib/iter.h" #include "_multilib/pair_list.h" #include "_multilib/parser.h" #include "_multilib/state.h" #include "_multilib/views.h" #define MultiDict_CheckExact(state, obj) Py_IS_TYPE(obj, state->MultiDictType) #define MultiDict_Check(state, obj) \ (MultiDict_CheckExact(state, obj) \ || PyObject_TypeCheck(obj, state->MultiDictType)) #define CIMultiDict_CheckExact(state, obj) Py_IS_TYPE(obj, state->CIMultiDictType) #define CIMultiDict_Check(state, obj) \ (CIMultiDict_CheckExact(state, obj) \ || PyObject_TypeCheck(obj, state->CIMultiDictType)) #define AnyMultiDict_Check(state, obj) \ (MultiDict_CheckExact(state, obj) \ || CIMultiDict_CheckExact(state, obj) \ || PyObject_TypeCheck(obj, state->MultiDictType)) #define MultiDictProxy_CheckExact(state, obj) Py_IS_TYPE(obj, state->MultiDictProxyType) #define MultiDictProxy_Check(state, obj) \ (MultiDictProxy_CheckExact(state, obj) \ || PyObject_TypeCheck(obj, state->MultiDictProxyType)) #define CIMultiDictProxy_CheckExact(state, obj) \ Py_IS_TYPE(obj, state->CIMultiDictProxyType) #define CIMultiDictProxy_Check(state, obj) \ (CIMultiDictProxy_CheckExact(state, obj) \ || PyObject_TypeCheck(obj, state->CIMultiDictProxyType)) #define AnyMultiDictProxy_Check(state, obj) \ (MultiDictProxy_CheckExact(state, obj) \ || CIMultiDictProxy_CheckExact(state, obj) \ || PyObject_TypeCheck(obj, state->MultiDictProxyType)) /******************** Internal Methods ********************/ static inline PyObject * _multidict_getone(MultiDictObject *self, PyObject *key, PyObject *_default) { PyObject *val = NULL; if (pair_list_get_one(&self->pairs, key, &val) <0) { return NULL; } if (val == NULL) { if (_default != NULL) { Py_INCREF(_default); return _default; } else { PyErr_SetObject(PyExc_KeyError, key); return NULL; } } else { return val; } } static inline int _multidict_extend(MultiDictObject *self, PyObject *arg, PyObject *kwds, const char *name, int do_add) { mod_state *state = self->pairs.state; PyObject *used = NULL; PyObject *seq = NULL; pair_list_t *list; if (!do_add) { used = PyDict_New(); if (used == NULL) { goto fail; } } if (kwds && !PyArg_ValidateKeywordArguments(kwds)) { goto fail; } if (arg != NULL) { if (AnyMultiDict_Check(state, arg)) { list = &((MultiDictObject*)arg)->pairs; if (pair_list_update_from_pair_list(&self->pairs, used, list) < 0) { goto fail; } } else if (AnyMultiDictProxy_Check(state, arg)) { list = &((MultiDictProxyObject*)arg)->md->pairs; if (pair_list_update_from_pair_list(&self->pairs, used, list) < 0) { goto fail; } } else if (PyDict_CheckExact(arg)) { if (pair_list_update_from_dict(&self->pairs, used, arg) < 0) { goto fail; } } else { seq = PyMapping_Items(arg); if (seq == NULL) { PyErr_Clear(); seq = Py_NewRef(arg); } if (pair_list_update_from_seq(&self->pairs, used, seq) < 0) { goto fail; } } } if (kwds != NULL) { if (pair_list_update_from_dict(&self->pairs, used, kwds) < 0) { goto fail; } } if (!do_add) { if (pair_list_post_update(&self->pairs, used) < 0) { goto fail; } } Py_CLEAR(seq); Py_CLEAR(used); return 0; fail: Py_CLEAR(seq); Py_CLEAR(used); return -1; } static inline Py_ssize_t _multidict_extend_parse_args(PyObject *args, PyObject *kwds, const char *name, PyObject **parg) { Py_ssize_t size = 0; Py_ssize_t s; if (args) { size = PyTuple_GET_SIZE(args); if (size > 1) { PyErr_Format( PyExc_TypeError, "%s takes from 1 to 2 positional arguments but %zd were given", name, size + 1, NULL ); *parg = NULL; return -1; } } if (size == 1) { *parg = Py_NewRef(PyTuple_GET_ITEM(args, 0)); s = PyObject_Length(*parg); if (s < 0) { // e.g. cannot calc size of generator object PyErr_Clear(); } else { size += s; } } else { *parg = NULL; } if (kwds != NULL) { s = PyDict_Size(kwds); if (s < 0) { return -1; } size += s; } return size; } static inline PyObject * multidict_copy(MultiDictObject *self) { MultiDictObject *new_multidict = NULL; new_multidict = (MultiDictObject*)PyType_GenericNew( Py_TYPE(self), NULL, NULL); if (new_multidict == NULL) { goto fail; } if (Py_TYPE(self)->tp_init((PyObject*)new_multidict, NULL, NULL) < 0) { goto fail; } if (pair_list_update_from_pair_list(&new_multidict->pairs, NULL, &self->pairs) < 0) { goto fail; } return (PyObject*)new_multidict; fail: Py_CLEAR(new_multidict); return NULL; } static inline PyObject * _multidict_proxy_copy(MultiDictProxyObject *self, PyTypeObject *type) { MultiDictObject *new_multidict = NULL; new_multidict = (MultiDictObject*)PyType_GenericNew(type, NULL, NULL); if (new_multidict == NULL) { goto fail; } if (type->tp_init((PyObject*)new_multidict, NULL, NULL) < 0) { goto fail; } if (pair_list_update_from_pair_list(&new_multidict->pairs, NULL, &self->md->pairs) < 0) { goto fail; } return (PyObject*)new_multidict; fail: Py_CLEAR(new_multidict); return NULL; } /******************** Base Methods ********************/ static inline PyObject * multidict_getall(MultiDictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *list = NULL, *key = NULL, *_default = NULL; if (parse2("getall", args, nargs, kwnames, 1, "key", &key, "default", &_default) < 0) { return NULL; } if (pair_list_get_all(&self->pairs, key, &list) <0) { return NULL; } if (list == NULL) { if (_default != NULL) { Py_INCREF(_default); return _default; } else { PyErr_SetObject(PyExc_KeyError, key); return NULL; } } else { return list; } } static inline PyObject * multidict_getone(MultiDictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *key = NULL, *_default = NULL; if (parse2("getone", args, nargs, kwnames, 1, "key", &key, "default", &_default) < 0) { return NULL; } return _multidict_getone(self, key, _default); } static inline PyObject * multidict_get(MultiDictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *key = NULL, *_default = NULL, *ret; if (parse2("get", args, nargs, kwnames, 1, "key", &key, "default", &_default) < 0) { return NULL; } if (_default == NULL) { // fixme, _default is potentially dangerous borrowed ref here _default = Py_None; } ret = _multidict_getone(self, key, _default); return ret; } static inline PyObject * multidict_keys(MultiDictObject *self) { return multidict_keysview_new(self); } static inline PyObject * multidict_items(MultiDictObject *self) { return multidict_itemsview_new(self); } static inline PyObject * multidict_values(MultiDictObject *self) { return multidict_valuesview_new(self); } static inline PyObject * multidict_reduce(MultiDictObject *self) { PyObject *items = NULL, *items_list = NULL, *args = NULL, *result = NULL; items = multidict_itemsview_new(self); if (items == NULL) { goto ret; } items_list = PySequence_List(items); if (items_list == NULL) { goto ret; } args = PyTuple_Pack(1, items_list); if (args == NULL) { goto ret; } result = PyTuple_Pack(2, Py_TYPE(self), args); ret: Py_XDECREF(args); Py_XDECREF(items_list); Py_XDECREF(items); return result; } static inline PyObject * multidict_repr(MultiDictObject *self) { int tmp = Py_ReprEnter((PyObject *)self); if (tmp < 0) { return NULL; } if (tmp > 0) { return PyUnicode_FromString("..."); } PyObject *name = PyObject_GetAttrString((PyObject *)Py_TYPE(self), "__name__"); if (name == NULL) { Py_ReprLeave((PyObject *)self); return NULL; } PyObject *ret = pair_list_repr(&self->pairs, name, true, true); Py_ReprLeave((PyObject *)self); Py_CLEAR(name); return ret; } static inline Py_ssize_t multidict_mp_len(MultiDictObject *self) { return pair_list_len(&self->pairs); } static inline PyObject * multidict_mp_subscript(MultiDictObject *self, PyObject *key) { return _multidict_getone(self, key, NULL); } static inline int multidict_mp_as_subscript(MultiDictObject *self, PyObject *key, PyObject *val) { if (val == NULL) { return pair_list_del(&self->pairs, key); } else { return pair_list_replace(&self->pairs, key, val); } } static inline int multidict_sq_contains(MultiDictObject *self, PyObject *key) { return pair_list_contains(&self->pairs, key, NULL); } static inline PyObject * multidict_tp_iter(MultiDictObject *self) { return multidict_keys_iter_new(self); } static inline PyObject * multidict_tp_richcompare(PyObject *self, PyObject *other, int op) { int cmp; if (op != Py_EQ && op != Py_NE) { Py_RETURN_NOTIMPLEMENTED; } if (self == other) { cmp = 1; if (op == Py_NE) { cmp = !cmp; } return PyBool_FromLong(cmp); } mod_state *state = ((MultiDictObject*)self)->pairs.state; if (AnyMultiDict_Check(state, other)) { cmp = pair_list_eq( &((MultiDictObject*)self)->pairs, &((MultiDictObject*)other)->pairs ); } else if (AnyMultiDictProxy_Check(state, other)) { cmp = pair_list_eq( &((MultiDictObject*)self)->pairs, &((MultiDictProxyObject*)other)->md->pairs ); } else { bool fits = false; fits = PyDict_Check(other); if (!fits) { PyObject *keys = PyMapping_Keys(other); if (keys != NULL) { fits = true; } else { // reset AttributeError exception PyErr_Clear(); } Py_CLEAR(keys); } if (fits) { cmp = pair_list_eq_to_mapping(&((MultiDictObject*)self)->pairs, other); } else { cmp = 0; // e.g., multidict is not equal to a list } } if (cmp < 0) { return NULL; } if (op == Py_NE) { cmp = !cmp; } return PyBool_FromLong(cmp); } static inline void multidict_tp_dealloc(MultiDictObject *self) { PyObject_GC_UnTrack(self); Py_TRASHCAN_BEGIN(self, multidict_tp_dealloc) PyObject_ClearWeakRefs((PyObject *)self); pair_list_dealloc(&self->pairs); Py_TYPE(self)->tp_free((PyObject *)self); Py_TRASHCAN_END // there should be no code after this } static inline int multidict_tp_traverse(MultiDictObject *self, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(self)); return pair_list_traverse(&self->pairs, visit, arg); } static inline int multidict_tp_clear(MultiDictObject *self) { return pair_list_clear(&self->pairs); } PyDoc_STRVAR(multidict_getall_doc, "Return a list of all values matching the key."); PyDoc_STRVAR(multidict_getone_doc, "Get first value matching the key."); PyDoc_STRVAR(multidict_get_doc, "Get first value matching the key.\n\nThe method is alias for .getone()."); PyDoc_STRVAR(multidict_keys_doc, "Return a new view of the dictionary's keys."); PyDoc_STRVAR(multidict_items_doc, "Return a new view of the dictionary's items *(key, value) pairs)."); PyDoc_STRVAR(multidict_values_doc, "Return a new view of the dictionary's values."); /******************** MultiDict ********************/ static inline int multidict_tp_init(MultiDictObject *self, PyObject *args, PyObject *kwds) { mod_state *state = get_mod_state_by_def((PyObject *)self); PyObject *arg = NULL; Py_ssize_t size = _multidict_extend_parse_args(args, kwds, "MultiDict", &arg); if (size < 0) { goto fail; } if (pair_list_init(&self->pairs, state, size) < 0) { goto fail; } if (_multidict_extend(self, arg, kwds, "MultiDict", 1) < 0) { goto fail; } Py_CLEAR(arg); return 0; fail: Py_CLEAR(arg); return -1; } static inline PyObject * multidict_add(MultiDictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *key = NULL, *val = NULL; if (parse2("add", args, nargs, kwnames, 2, "key", &key, "value", &val) < 0) { return NULL; } if (pair_list_add(&self->pairs, key, val) < 0) { return NULL; } Py_RETURN_NONE; } static inline PyObject * multidict_extend(MultiDictObject *self, PyObject *args, PyObject *kwds) { PyObject *arg = NULL; Py_ssize_t size = _multidict_extend_parse_args(args, kwds, "extend", &arg); if (size < 0) { goto fail; } pair_list_grow(&self->pairs, size); if (_multidict_extend(self, arg, kwds, "extend", 1) < 0) { goto fail; } Py_CLEAR(arg); Py_RETURN_NONE; fail: Py_CLEAR(arg); return NULL; } static inline PyObject * multidict_clear(MultiDictObject *self) { if (pair_list_clear(&self->pairs) < 0) { return NULL; } Py_RETURN_NONE; } static inline PyObject * multidict_setdefault(MultiDictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *key = NULL, *_default = NULL; if (parse2("setdefault", args, nargs, kwnames, 1, "key", &key, "default", &_default) < 0) { return NULL; } return pair_list_set_default(&self->pairs, key, _default); } static inline PyObject * multidict_popone(MultiDictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *key = NULL, *_default = NULL, *ret_val = NULL; if (parse2("popone", args, nargs, kwnames, 1, "key", &key, "default", &_default) < 0) { return NULL; } if (pair_list_pop_one(&self->pairs, key, &ret_val) < 0) { return NULL; } if (ret_val == NULL) { if (_default != NULL) { Py_INCREF(_default); return _default; } else { PyErr_SetObject(PyExc_KeyError, key); return NULL; } } else { return ret_val; } } static inline PyObject * multidict_pop( MultiDictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames ) { PyObject *key = NULL, *_default = NULL, *ret_val = NULL; if (parse2("pop", args, nargs, kwnames, 1, "key", &key, "default", &_default) < 0) { return NULL; } if (pair_list_pop_one(&self->pairs, key, &ret_val) < 0) { return NULL; } if (ret_val == NULL) { if (_default != NULL) { Py_INCREF(_default); return _default; } else { PyErr_SetObject(PyExc_KeyError, key); return NULL; } } else { return ret_val; } } static inline PyObject * multidict_popall(MultiDictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *key = NULL, *_default = NULL, *ret_val = NULL; if (parse2("popall", args, nargs, kwnames, 1, "key", &key, "default", &_default) < 0) { return NULL; } if (pair_list_pop_all(&self->pairs, key, &ret_val) < 0) { return NULL; } if (ret_val == NULL) { if (_default != NULL) { Py_INCREF(_default); return _default; } else { PyErr_SetObject(PyExc_KeyError, key); return NULL; } } else { return ret_val; } } static inline PyObject * multidict_popitem(MultiDictObject *self) { return pair_list_pop_item(&self->pairs); } static inline PyObject * multidict_update(MultiDictObject *self, PyObject *args, PyObject *kwds) { PyObject *arg = NULL; if (_multidict_extend_parse_args(args, kwds, "update", &arg) < 0) { goto fail; } if (_multidict_extend(self, arg, kwds, "update", 0) < 0) { goto fail; } Py_CLEAR(arg); Py_RETURN_NONE; fail: Py_CLEAR(arg); return NULL; } PyDoc_STRVAR(multidict_add_doc, "Add the key and value, not overwriting any previous value."); PyDoc_STRVAR(multidict_copy_doc, "Return a copy of itself."); PyDoc_STRVAR(multdicit_method_extend_doc, "Extend current MultiDict with more values.\n\ This method must be used instead of update."); PyDoc_STRVAR(multidict_clear_doc, "Remove all items from MultiDict"); PyDoc_STRVAR(multidict_setdefault_doc, "Return value for key, set value to default if key is not present."); PyDoc_STRVAR(multidict_popone_doc, "Remove the last occurrence of key and return the corresponding value.\n\n\ If key is not found, default is returned if given, otherwise KeyError is \ raised.\n"); PyDoc_STRVAR(multidict_pop_doc, "Remove the last occurrence of key and return the corresponding value.\n\n\ If key is not found, default is returned if given, otherwise KeyError is \ raised.\n"); PyDoc_STRVAR(multidict_popall_doc, "Remove all occurrences of key and return the list of corresponding values.\n\n\ If key is not found, default is returned if given, otherwise KeyError is \ raised.\n"); PyDoc_STRVAR(multidict_popitem_doc, "Remove and return an arbitrary (key, value) pair."); PyDoc_STRVAR(multidict_update_doc, "Update the dictionary from *other*, overwriting existing keys."); PyDoc_STRVAR(sizeof__doc__, "D.__sizeof__() -> size of D in memory, in bytes"); static inline PyObject * _multidict_sizeof(MultiDictObject *self) { Py_ssize_t size = sizeof(MultiDictObject); if (self->pairs.pairs != self->pairs.buffer) { size += (Py_ssize_t)sizeof(pair_t) * self->pairs.capacity; } return PyLong_FromSsize_t(size); } static PyMethodDef multidict_methods[] = { { "getall", (PyCFunction)multidict_getall, METH_FASTCALL | METH_KEYWORDS, multidict_getall_doc }, { "getone", (PyCFunction)multidict_getone, METH_FASTCALL | METH_KEYWORDS, multidict_getone_doc }, { "get", (PyCFunction)multidict_get, METH_FASTCALL | METH_KEYWORDS, multidict_get_doc }, { "keys", (PyCFunction)multidict_keys, METH_NOARGS, multidict_keys_doc }, { "items", (PyCFunction)multidict_items, METH_NOARGS, multidict_items_doc }, { "values", (PyCFunction)multidict_values, METH_NOARGS, multidict_values_doc }, { "add", (PyCFunction)multidict_add, METH_FASTCALL | METH_KEYWORDS, multidict_add_doc }, { "copy", (PyCFunction)multidict_copy, METH_NOARGS, multidict_copy_doc }, { "extend", (PyCFunction)multidict_extend, METH_VARARGS | METH_KEYWORDS, multdicit_method_extend_doc }, { "clear", (PyCFunction)multidict_clear, METH_NOARGS, multidict_clear_doc }, { "setdefault", (PyCFunction)multidict_setdefault, METH_FASTCALL | METH_KEYWORDS, multidict_setdefault_doc }, { "popone", (PyCFunction)multidict_popone, METH_FASTCALL | METH_KEYWORDS, multidict_popone_doc }, { "pop", (PyCFunction)multidict_pop, METH_FASTCALL | METH_KEYWORDS, multidict_pop_doc }, { "popall", (PyCFunction)multidict_popall, METH_FASTCALL | METH_KEYWORDS, multidict_popall_doc }, { "popitem", (PyCFunction)multidict_popitem, METH_NOARGS, multidict_popitem_doc }, { "update", (PyCFunction)multidict_update, METH_VARARGS | METH_KEYWORDS, multidict_update_doc }, { "__reduce__", (PyCFunction)multidict_reduce, METH_NOARGS, NULL, }, { "__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O | METH_CLASS, NULL }, { "__sizeof__", (PyCFunction)_multidict_sizeof, METH_NOARGS, sizeof__doc__, }, { NULL, NULL } /* sentinel */ }; PyDoc_STRVAR(MultDict_doc, "Dictionary with the support for duplicate keys."); #ifndef MANAGED_WEAKREFS static PyMemberDef multidict_members[] = { {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(MultiDictObject, weaklist), Py_READONLY}, {NULL} /* Sentinel */ }; #endif static PyType_Slot multidict_slots[] = { {Py_tp_dealloc, multidict_tp_dealloc}, {Py_tp_repr, multidict_repr}, {Py_tp_doc, (void *)MultDict_doc}, {Py_sq_contains, multidict_sq_contains}, {Py_mp_length, multidict_mp_len}, {Py_mp_subscript, multidict_mp_subscript}, {Py_mp_ass_subscript, multidict_mp_as_subscript}, {Py_tp_traverse, multidict_tp_traverse}, {Py_tp_clear, multidict_tp_clear}, {Py_tp_richcompare, multidict_tp_richcompare}, {Py_tp_iter, multidict_tp_iter}, {Py_tp_methods, multidict_methods}, {Py_tp_init, multidict_tp_init}, {Py_tp_alloc, PyType_GenericAlloc}, {Py_tp_new, PyType_GenericNew}, {Py_tp_free, PyObject_GC_Del}, #ifndef MANAGED_WEAKREFS {Py_tp_members, multidict_members}, #endif {0, NULL}, }; static PyType_Spec multidict_spec = { .name = "multidict._multidict.MultiDict", .basicsize = sizeof(MultiDictObject), .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE #if PY_VERSION_HEX >= 0x030a00f0 | Py_TPFLAGS_IMMUTABLETYPE #endif #ifdef MANAGED_WEAKREFS | Py_TPFLAGS_MANAGED_WEAKREF #endif | Py_TPFLAGS_HAVE_GC), .slots = multidict_slots, }; /******************** CIMultiDict ********************/ static inline int cimultidict_tp_init(MultiDictObject *self, PyObject *args, PyObject *kwds) { mod_state *state = get_mod_state_by_def((PyObject *)self); PyObject *arg = NULL; Py_ssize_t size = _multidict_extend_parse_args(args, kwds, "CIMultiDict", &arg); if (size < 0) { goto fail; } if (ci_pair_list_init(&self->pairs, state, size) < 0) { goto fail; } if (_multidict_extend(self, arg, kwds, "CIMultiDict", 1) < 0) { goto fail; } Py_CLEAR(arg); return 0; fail: Py_CLEAR(arg); return -1; } PyDoc_STRVAR(CIMultDict_doc, "Dictionary with the support for duplicate case-insensitive keys."); static PyType_Slot cimultidict_slots[] = { {Py_tp_doc, (void *)CIMultDict_doc}, {Py_tp_init, cimultidict_tp_init}, {0, NULL}, }; static PyType_Spec cimultidict_spec = { .name = "multidict._multidict.CIMultiDict", .basicsize = sizeof(MultiDictObject), .flags = (Py_TPFLAGS_DEFAULT #if PY_VERSION_HEX >= 0x030a00f0 | Py_TPFLAGS_IMMUTABLETYPE #endif | Py_TPFLAGS_BASETYPE), .slots = cimultidict_slots, }; /******************** MultiDictProxy ********************/ static inline int multidict_proxy_tp_init(MultiDictProxyObject *self, PyObject *args, PyObject *kwds) { mod_state *state = get_mod_state_by_def((PyObject *)self); PyObject *arg = NULL; MultiDictObject *md = NULL; if (!PyArg_UnpackTuple(args, "multidict._multidict.MultiDictProxy", 0, 1, &arg)) { return -1; } if (arg == NULL) { PyErr_Format( PyExc_TypeError, "__init__() missing 1 required positional argument: 'arg'" ); return -1; } if (!AnyMultiDictProxy_Check(state, arg) && !AnyMultiDict_Check(state, arg)) { PyErr_Format( PyExc_TypeError, "ctor requires MultiDict or MultiDictProxy instance, " "not ", Py_TYPE(arg)->tp_name ); return -1; } if (AnyMultiDictProxy_Check(state, arg)) { md = ((MultiDictProxyObject*)arg)->md; } else { md = (MultiDictObject*)arg; } Py_INCREF(md); self->md = md; return 0; } static inline PyObject * multidict_proxy_getall(MultiDictProxyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { return multidict_getall(self->md, args, nargs, kwnames); } static inline PyObject * multidict_proxy_getone(MultiDictProxyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { return multidict_getone(self->md, args, nargs, kwnames); } static inline PyObject * multidict_proxy_get(MultiDictProxyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { return multidict_get(self->md, args, nargs, kwnames); } static inline PyObject * multidict_proxy_keys(MultiDictProxyObject *self) { return multidict_keys(self->md); } static inline PyObject * multidict_proxy_items(MultiDictProxyObject *self) { return multidict_items(self->md); } static inline PyObject * multidict_proxy_values(MultiDictProxyObject *self) { return multidict_values(self->md); } static inline PyObject * multidict_proxy_copy(MultiDictProxyObject *self) { return _multidict_proxy_copy(self, self->md->pairs.state->MultiDictType); } static inline PyObject * multidict_proxy_reduce(MultiDictProxyObject *self) { PyErr_Format( PyExc_TypeError, "can't pickle %s objects", Py_TYPE(self)->tp_name ); return NULL; } static inline Py_ssize_t multidict_proxy_mp_len(MultiDictProxyObject *self) { return multidict_mp_len(self->md); } static inline PyObject * multidict_proxy_mp_subscript(MultiDictProxyObject *self, PyObject *key) { return multidict_mp_subscript(self->md, key); } static inline int multidict_proxy_sq_contains(MultiDictProxyObject *self, PyObject *key) { return multidict_sq_contains(self->md, key); } static inline PyObject * multidict_proxy_tp_iter(MultiDictProxyObject *self) { return multidict_tp_iter(self->md); } static inline PyObject * multidict_proxy_tp_richcompare(MultiDictProxyObject *self, PyObject *other, int op) { return multidict_tp_richcompare((PyObject*)self->md, other, op); } static inline void multidict_proxy_tp_dealloc(MultiDictProxyObject *self) { PyObject_GC_UnTrack(self); PyObject_ClearWeakRefs((PyObject *)self); Py_XDECREF(self->md); Py_TYPE(self)->tp_free((PyObject *)self); } static inline int multidict_proxy_tp_traverse(MultiDictProxyObject *self, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(self)); Py_VISIT(self->md); return 0; } static inline int multidict_proxy_tp_clear(MultiDictProxyObject *self) { Py_CLEAR(self->md); return 0; } static inline PyObject * multidict_proxy_repr(MultiDictProxyObject *self) { PyObject *name = PyObject_GetAttrString((PyObject*)Py_TYPE(self), "__name__"); if (name == NULL) return NULL; PyObject *ret = pair_list_repr(&self->md->pairs, name, true, true); Py_CLEAR(name); return ret; } static PyMethodDef multidict_proxy_methods[] = { { "getall", (PyCFunction)multidict_proxy_getall, METH_FASTCALL | METH_KEYWORDS, multidict_getall_doc }, { "getone", (PyCFunction)multidict_proxy_getone, METH_FASTCALL | METH_KEYWORDS, multidict_getone_doc }, { "get", (PyCFunction)multidict_proxy_get, METH_FASTCALL | METH_KEYWORDS, multidict_get_doc }, { "keys", (PyCFunction)multidict_proxy_keys, METH_NOARGS, multidict_keys_doc }, { "items", (PyCFunction)multidict_proxy_items, METH_NOARGS, multidict_items_doc }, { "values", (PyCFunction)multidict_proxy_values, METH_NOARGS, multidict_values_doc }, { "copy", (PyCFunction)multidict_proxy_copy, METH_NOARGS, multidict_copy_doc }, { "__reduce__", (PyCFunction)multidict_proxy_reduce, METH_NOARGS, NULL }, { "__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O | METH_CLASS, NULL }, { NULL, NULL } /* sentinel */ }; PyDoc_STRVAR(MultDictProxy_doc, "Read-only proxy for MultiDict instance."); #ifndef MANAGED_WEAKREFS static PyMemberDef multidict_proxy_members[] = { {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(MultiDictProxyObject, weaklist), Py_READONLY}, {NULL} /* Sentinel */ }; #endif static PyType_Slot multidict_proxy_slots[] = { {Py_tp_dealloc, multidict_proxy_tp_dealloc}, {Py_tp_repr, multidict_proxy_repr}, {Py_tp_doc, (void *)MultDictProxy_doc}, {Py_sq_contains, multidict_proxy_sq_contains}, {Py_mp_length, multidict_proxy_mp_len}, {Py_mp_subscript, multidict_proxy_mp_subscript}, {Py_tp_traverse, multidict_proxy_tp_traverse}, {Py_tp_clear, multidict_proxy_tp_clear}, {Py_tp_richcompare, multidict_proxy_tp_richcompare}, {Py_tp_iter, multidict_proxy_tp_iter}, {Py_tp_methods, multidict_proxy_methods}, {Py_tp_init, multidict_proxy_tp_init}, {Py_tp_alloc, PyType_GenericAlloc}, {Py_tp_new, PyType_GenericNew}, {Py_tp_free, PyObject_GC_Del}, #ifndef MANAGED_WEAKREFS {Py_tp_members, multidict_proxy_members}, #endif {0, NULL}, }; static PyType_Spec multidict_proxy_spec = { .name = "multidict._multidict.MultiDictProxy", .basicsize = sizeof(MultiDictProxyObject), .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE #if PY_VERSION_HEX >= 0x030a00f0 | Py_TPFLAGS_IMMUTABLETYPE #endif #ifdef MANAGED_WEAKREFS | Py_TPFLAGS_MANAGED_WEAKREF #endif | Py_TPFLAGS_HAVE_GC), .slots = multidict_proxy_slots, }; /******************** CIMultiDictProxy ********************/ static inline int cimultidict_proxy_tp_init(MultiDictProxyObject *self, PyObject *args, PyObject *kwds) { mod_state *state = get_mod_state_by_def((PyObject *)self); PyObject *arg = NULL; MultiDictObject *md = NULL; if (!PyArg_UnpackTuple(args, "multidict._multidict.CIMultiDictProxy", 1, 1, &arg)) { return -1; } if (arg == NULL) { PyErr_Format( PyExc_TypeError, "__init__() missing 1 required positional argument: 'arg'" ); return -1; } if (!CIMultiDictProxy_Check(state, arg) && !CIMultiDict_Check(state, arg)) { PyErr_Format( PyExc_TypeError, "ctor requires CIMultiDict or CIMultiDictProxy instance, " "not ", Py_TYPE(arg)->tp_name ); return -1; } if (CIMultiDictProxy_Check(state, arg)) { md = ((MultiDictProxyObject*)arg)->md; } else { md = (MultiDictObject*)arg; } Py_INCREF(md); self->md = md; return 0; } static inline PyObject * cimultidict_proxy_copy(MultiDictProxyObject *self) { return _multidict_proxy_copy(self, self->md->pairs.state->CIMultiDictType); } PyDoc_STRVAR(CIMultDictProxy_doc, "Read-only proxy for CIMultiDict instance."); PyDoc_STRVAR(cimultidict_proxy_copy_doc, "Return copy of itself"); static PyMethodDef cimultidict_proxy_methods[] = { { "copy", (PyCFunction)cimultidict_proxy_copy, METH_NOARGS, cimultidict_proxy_copy_doc }, { NULL, NULL } /* sentinel */ }; static PyType_Slot cimultidict_proxy_slots[] = { {Py_tp_doc, (void *)CIMultDictProxy_doc}, {Py_tp_methods, cimultidict_proxy_methods}, {Py_tp_init, cimultidict_proxy_tp_init}, {0, NULL}, }; static PyType_Spec cimultidict_proxy_spec = { .name = "multidict._multidict.CIMultiDictProxy", .basicsize = sizeof(MultiDictProxyObject), .flags = (Py_TPFLAGS_DEFAULT #if PY_VERSION_HEX >= 0x030a00f0 | Py_TPFLAGS_IMMUTABLETYPE #endif | Py_TPFLAGS_BASETYPE), .slots = cimultidict_proxy_slots, }; /******************** Other functions ********************/ static inline PyObject * getversion(PyObject *self, PyObject *md) { mod_state *state = get_mod_state(self); pair_list_t *pairs = NULL; if (AnyMultiDict_Check(state, md)) { pairs = &((MultiDictObject*)md)->pairs; } else if (AnyMultiDictProxy_Check(state, md)) { pairs = &((MultiDictProxyObject*)md)->md->pairs; } else { PyErr_Format(PyExc_TypeError, "unexpected type"); return NULL; } return PyLong_FromUnsignedLong(pair_list_version(pairs)); } /******************** Module ********************/ static int module_traverse(PyObject *mod, visitproc visit, void *arg) { mod_state *state = get_mod_state(mod); Py_VISIT(state->IStrType); Py_VISIT(state->MultiDictType); Py_VISIT(state->CIMultiDictType); Py_VISIT(state->MultiDictProxyType); Py_VISIT(state->CIMultiDictProxyType); Py_VISIT(state->KeysViewType); Py_VISIT(state->ItemsViewType); Py_VISIT(state->ValuesViewType); Py_VISIT(state->KeysIterType); Py_VISIT(state->ItemsIterType); Py_VISIT(state->ValuesIterType); Py_VISIT(state->str_lower); Py_VISIT(state->str_canonical); return 0; } static int module_clear(PyObject *mod) { mod_state *state = get_mod_state(mod); Py_CLEAR(state->IStrType); Py_CLEAR(state->MultiDictType); Py_CLEAR(state->CIMultiDictType); Py_CLEAR(state->MultiDictProxyType); Py_CLEAR(state->CIMultiDictProxyType); Py_CLEAR(state->KeysViewType); Py_CLEAR(state->ItemsViewType); Py_CLEAR(state->ValuesViewType); Py_CLEAR(state->KeysIterType); Py_CLEAR(state->ItemsIterType); Py_CLEAR(state->ValuesIterType); Py_CLEAR(state->str_lower); Py_CLEAR(state->str_canonical); return 0; } static inline void module_free(void *mod) { (void)module_clear((PyObject *)mod); } static PyMethodDef module_methods[] = { {"getversion", (PyCFunction)getversion, METH_O}, {NULL, NULL} /* sentinel */ }; static int module_exec(PyObject *mod) { mod_state *state = get_mod_state(mod); PyObject *tmp; PyObject *tpl = NULL; state->str_lower = PyUnicode_InternFromString("lower"); if (state->str_lower == NULL) { goto fail; } state->str_canonical = PyUnicode_InternFromString("_canonical"); if (state->str_canonical == NULL) { goto fail; } if (multidict_views_init(mod, state) < 0) { goto fail; } if (multidict_iter_init(mod, state) < 0) { goto fail; } if (istr_init(mod, state) < 0) { goto fail; } tmp = PyType_FromModuleAndSpec(mod, &multidict_spec, NULL); if (tmp == NULL) { goto fail; } state->MultiDictType = (PyTypeObject *)tmp; tpl = PyTuple_Pack(1, (PyObject *)state->MultiDictType); if (tpl == NULL) { goto fail; } tmp = PyType_FromModuleAndSpec(mod, &cimultidict_spec, tpl); if (tmp == NULL) { goto fail; } state->CIMultiDictType = (PyTypeObject *)tmp; Py_CLEAR(tpl); tmp = PyType_FromModuleAndSpec(mod, &multidict_proxy_spec, NULL); if (tmp == NULL) { goto fail; } state->MultiDictProxyType = (PyTypeObject *)tmp; tpl = PyTuple_Pack(1, (PyObject *)state->MultiDictProxyType); if (tpl == NULL) { goto fail; } tmp = PyType_FromModuleAndSpec(mod, &cimultidict_proxy_spec, tpl); if (tmp == NULL) { goto fail; } state->CIMultiDictProxyType = (PyTypeObject *)tmp; Py_CLEAR(tpl); if (PyModule_AddType(mod, state->IStrType) < 0) { goto fail; } if (PyModule_AddType(mod, state->MultiDictType) < 0) { goto fail; } if (PyModule_AddType(mod, state->CIMultiDictType) < 0) { goto fail; } if (PyModule_AddType(mod, state->MultiDictProxyType) < 0) { goto fail; } if (PyModule_AddType(mod, state->CIMultiDictProxyType) < 0) { goto fail; } if (PyModule_AddType(mod, state->ItemsViewType) < 0) { goto fail; } if (PyModule_AddType(mod, state->KeysViewType) < 0) { goto fail; } if (PyModule_AddType(mod, state->ValuesViewType) < 0) { goto fail; } return 0; fail: Py_CLEAR(tpl); return -1; } static struct PyModuleDef_Slot module_slots[] = { {Py_mod_exec, module_exec}, #if PY_VERSION_HEX >= 0x030c00f0 {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, #endif #if PY_VERSION_HEX >= 0x030d00f0 {Py_mod_gil, Py_MOD_GIL_NOT_USED}, #endif {0, NULL}, }; static PyModuleDef multidict_module = { .m_base = PyModuleDef_HEAD_INIT, .m_name = "_multidict", .m_size = sizeof(mod_state), .m_methods = module_methods, .m_slots = module_slots, .m_traverse = module_traverse, .m_clear = module_clear, .m_free = (freefunc)module_free, }; PyMODINIT_FUNC PyInit__multidict(void) { return PyModuleDef_Init(&multidict_module); } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/_multidict_py.py0000644000175100001660000006516114776033000020353 0ustar00runnerdockerimport enum import reprlib import sys from abc import abstractmethod from array import array from collections.abc import ( Callable, ItemsView, Iterable, Iterator, KeysView, Mapping, ValuesView, ) from typing import ( TYPE_CHECKING, Any, Generic, NoReturn, Optional, TypeVar, Union, cast, overload, ) from ._abc import MDArg, MultiMapping, MutableMultiMapping, SupportsKeys if sys.version_info >= (3, 11): from typing import Self else: from typing_extensions import Self class istr(str): """Case insensitive str.""" __is_istr__ = True __istr_title__: Optional[str] = None _V = TypeVar("_V") _T = TypeVar("_T") _SENTINEL = enum.Enum("_SENTINEL", "sentinel") sentinel = _SENTINEL.sentinel _version = array("Q", [0]) class _Impl(Generic[_V]): __slots__ = ("_items", "_version") def __init__(self) -> None: self._items: list[tuple[str, str, _V]] = [] self.incr_version() def incr_version(self) -> None: v = _version v[0] += 1 self._version = v[0] if sys.implementation.name != "pypy": def __sizeof__(self) -> int: return object.__sizeof__(self) + sys.getsizeof(self._items) class _Iter(Generic[_T]): __slots__ = ("_size", "_iter") def __init__(self, size: int, iterator: Iterator[_T]): self._size = size self._iter = iterator def __iter__(self) -> Self: return self def __next__(self) -> _T: return next(self._iter) def __length_hint__(self) -> int: return self._size class _ViewBase(Generic[_V]): def __init__( self, impl: _Impl[_V], identfunc: Callable[[str], str], keyfunc: Callable[[str], str], ): self._impl = impl self._identfunc = identfunc self._keyfunc = keyfunc def __len__(self) -> int: return len(self._impl._items) class _ItemsView(_ViewBase[_V], ItemsView[str, _V]): def __contains__(self, item: object) -> bool: if not isinstance(item, (tuple, list)) or len(item) != 2: return False key, value = item try: ident = self._identfunc(key) except TypeError: return False for i, k, v in self._impl._items: if ident == i and value == v: return True return False def __iter__(self) -> _Iter[tuple[str, _V]]: return _Iter(len(self), self._iter(self._impl._version)) def _iter(self, version: int) -> Iterator[tuple[str, _V]]: for i, k, v in self._impl._items: if version != self._impl._version: raise RuntimeError("Dictionary changed during iteration") yield self._keyfunc(k), v @reprlib.recursive_repr() def __repr__(self) -> str: lst = [] for i, k, v in self._impl._items: lst.append(f"'{k}': {v!r}") body = ", ".join(lst) return f"<{self.__class__.__name__}({body})>" def _parse_item( self, arg: Union[tuple[str, _V], _T] ) -> Optional[tuple[str, str, _V]]: if not isinstance(arg, tuple): return None if len(arg) != 2: return None try: return (self._identfunc(arg[0]), arg[0], arg[1]) except TypeError: return None def _tmp_set(self, it: Iterable[_T]) -> set[tuple[str, _V]]: tmp = set() for arg in it: item = self._parse_item(arg) if item is None: continue else: tmp.add((item[0], item[2])) return tmp def __and__(self, other: Iterable[Any]) -> set[tuple[str, _V]]: ret = set() try: it = iter(other) except TypeError: return NotImplemented for arg in it: item = self._parse_item(arg) if item is None: continue identity, key, value = item for i, k, v in self._impl._items: if i == identity and v == value: ret.add((k, v)) return ret def __rand__(self, other: Iterable[_T]) -> set[_T]: ret = set() try: it = iter(other) except TypeError: return NotImplemented for arg in it: item = self._parse_item(arg) if item is None: continue identity, key, value = item for i, k, v in self._impl._items: if i == identity and v == value: ret.add(arg) break return ret def __or__(self, other: Iterable[_T]) -> set[Union[tuple[str, _V], _T]]: ret: set[Union[tuple[str, _V], _T]] = set(self) try: it = iter(other) except TypeError: return NotImplemented for arg in it: item: Optional[tuple[str, str, _V]] = self._parse_item(arg) if item is None: ret.add(arg) continue identity, key, value = item for i, k, v in self._impl._items: if i == identity and v == value: break else: ret.add(arg) return ret def __ror__(self, other: Iterable[_T]) -> set[Union[tuple[str, _V], _T]]: try: ret: set[Union[tuple[str, _V], _T]] = set(other) except TypeError: return NotImplemented tmp = self._tmp_set(ret) for i, k, v in self._impl._items: if (i, v) not in tmp: ret.add((k, v)) return ret def __sub__(self, other: Iterable[_T]) -> set[Union[tuple[str, _V], _T]]: ret: set[Union[tuple[str, _V], _T]] = set() try: it = iter(other) except TypeError: return NotImplemented tmp = self._tmp_set(it) for i, k, v in self._impl._items: if (i, v) not in tmp: ret.add((k, v)) return ret def __rsub__(self, other: Iterable[_T]) -> set[_T]: ret: set[_T] = set() try: it = iter(other) except TypeError: return NotImplemented for arg in it: item = self._parse_item(arg) if item is None: ret.add(arg) continue identity, key, value = item for i, k, v in self._impl._items: if i == identity and v == value: break else: ret.add(arg) return ret def __xor__(self, other: Iterable[_T]) -> set[Union[tuple[str, _V], _T]]: try: rgt = set(other) except TypeError: return NotImplemented ret: set[Union[tuple[str, _V], _T]] = self - rgt ret |= rgt - self return ret __rxor__ = __xor__ def isdisjoint(self, other: Iterable[tuple[str, _V]]) -> bool: for arg in other: item = self._parse_item(arg) if item is None: continue identity, key, value = item for i, k, v in self._impl._items: if i == identity and v == value: return False return True class _ValuesView(_ViewBase[_V], ValuesView[_V]): def __contains__(self, value: object) -> bool: for i, k, v in self._impl._items: if v == value: return True return False def __iter__(self) -> _Iter[_V]: return _Iter(len(self), self._iter(self._impl._version)) def _iter(self, version: int) -> Iterator[_V]: for i, k, v in self._impl._items: if version != self._impl._version: raise RuntimeError("Dictionary changed during iteration") yield v @reprlib.recursive_repr() def __repr__(self) -> str: lst = [] for i, k, v in self._impl._items: lst.append(repr(v)) body = ", ".join(lst) return f"<{self.__class__.__name__}({body})>" class _KeysView(_ViewBase[_V], KeysView[str]): def __contains__(self, key: object) -> bool: if not isinstance(key, str): return False identity = self._identfunc(key) for i, k, v in self._impl._items: if i == identity: return True return False def __iter__(self) -> _Iter[str]: return _Iter(len(self), self._iter(self._impl._version)) def _iter(self, version: int) -> Iterator[str]: for i, k, v in self._impl._items: if version != self._impl._version: raise RuntimeError("Dictionary changed during iteration") yield self._keyfunc(k) def __repr__(self) -> str: lst = [] for i, k, v in self._impl._items: lst.append(f"'{k}'") body = ", ".join(lst) return f"<{self.__class__.__name__}({body})>" def __and__(self, other: Iterable[object]) -> set[str]: ret = set() try: it = iter(other) except TypeError: return NotImplemented for key in it: if not isinstance(key, str): continue identity = self._identfunc(key) for i, k, v in self._impl._items: if i == identity: ret.add(k) return ret def __rand__(self, other: Iterable[_T]) -> set[_T]: ret = set() try: it = iter(other) except TypeError: return NotImplemented for key in it: if not isinstance(key, str): continue identity = self._identfunc(key) for i, k, v in self._impl._items: if i == identity: ret.add(key) return cast(set[_T], ret) def __or__(self, other: Iterable[_T]) -> set[Union[str, _T]]: ret: set[Union[str, _T]] = set(self) try: it = iter(other) except TypeError: return NotImplemented for key in it: if not isinstance(key, str): ret.add(key) continue identity = self._identfunc(key) for i, k, v in self._impl._items: if i == identity: break else: ret.add(key) return ret def __ror__(self, other: Iterable[_T]) -> set[Union[str, _T]]: try: ret: set[Union[str, _T]] = set(other) except TypeError: return NotImplemented tmp = set() for key in ret: if not isinstance(key, str): continue identity = self._identfunc(key) tmp.add(identity) for i, k, v in self._impl._items: if i not in tmp: ret.add(k) return ret def __sub__(self, other: Iterable[object]) -> set[str]: ret = set(self) try: it = iter(other) except TypeError: return NotImplemented for key in it: if not isinstance(key, str): continue identity = self._identfunc(key) for i, k, v in self._impl._items: if i == identity: ret.discard(k) break return ret def __rsub__(self, other: Iterable[_T]) -> set[_T]: try: ret: set[_T] = set(other) except TypeError: return NotImplemented for key in other: if not isinstance(key, str): continue identity = self._identfunc(key) for i, k, v in self._impl._items: if i == identity: ret.discard(key) # type: ignore[arg-type] break return ret def __xor__(self, other: Iterable[_T]) -> set[Union[str, _T]]: try: rgt = set(other) except TypeError: return NotImplemented ret: set[Union[str, _T]] = self - rgt # type: ignore[assignment] ret |= rgt - self return ret __rxor__ = __xor__ def isdisjoint(self, other: Iterable[object]) -> bool: for key in other: if not isinstance(key, str): continue identity = self._identfunc(key) for i, k, v in self._impl._items: if i == identity: return False return True class _CSMixin: def _key(self, key: str) -> str: return key def _title(self, key: str) -> str: if isinstance(key, str): return key else: raise TypeError("MultiDict keys should be either str or subclasses of str") class _CIMixin: _ci: bool = True def _key(self, key: str) -> str: if type(key) is istr: return key else: return istr(key) def _title(self, key: str) -> str: if isinstance(key, istr): ret = key.__istr_title__ if ret is None: ret = key.title() key.__istr_title__ = ret return ret if isinstance(key, str): return key.title() else: raise TypeError("MultiDict keys should be either str or subclasses of str") class _Base(MultiMapping[_V]): _impl: _Impl[_V] _ci: bool = False @abstractmethod def _key(self, key: str) -> str: ... @abstractmethod def _title(self, key: str) -> str: ... @overload def getall(self, key: str) -> list[_V]: ... @overload def getall(self, key: str, default: _T) -> Union[list[_V], _T]: ... def getall( self, key: str, default: Union[_T, _SENTINEL] = sentinel ) -> Union[list[_V], _T]: """Return a list of all values matching the key.""" identity = self._title(key) res = [v for i, k, v in self._impl._items if i == identity] if res: return res if not res and default is not sentinel: return default raise KeyError("Key not found: %r" % key) @overload def getone(self, key: str) -> _V: ... @overload def getone(self, key: str, default: _T) -> Union[_V, _T]: ... def getone( self, key: str, default: Union[_T, _SENTINEL] = sentinel ) -> Union[_V, _T]: """Get first value matching the key. Raises KeyError if the key is not found and no default is provided. """ identity = self._title(key) for i, k, v in self._impl._items: if i == identity: return v if default is not sentinel: return default raise KeyError("Key not found: %r" % key) # Mapping interface # def __getitem__(self, key: str) -> _V: return self.getone(key) @overload def get(self, key: str, /) -> Union[_V, None]: ... @overload def get(self, key: str, /, default: _T) -> Union[_V, _T]: ... def get(self, key: str, default: Union[_T, None] = None) -> Union[_V, _T, None]: """Get first value matching the key. If the key is not found, returns the default (or None if no default is provided) """ return self.getone(key, default) def __iter__(self) -> Iterator[str]: return iter(self.keys()) def __len__(self) -> int: return len(self._impl._items) def keys(self) -> KeysView[str]: """Return a new view of the dictionary's keys.""" return _KeysView(self._impl, self._title, self._key) def items(self) -> ItemsView[str, _V]: """Return a new view of the dictionary's items *(key, value) pairs).""" return _ItemsView(self._impl, self._title, self._key) def values(self) -> _ValuesView[_V]: """Return a new view of the dictionary's values.""" return _ValuesView(self._impl, self._title, self._key) def __eq__(self, other: object) -> bool: if not isinstance(other, Mapping): return NotImplemented if isinstance(other, _Base): lft = self._impl._items rht = other._impl._items if len(lft) != len(rht): return False for (i1, k2, v1), (i2, k2, v2) in zip(lft, rht): if i1 != i2 or v1 != v2: return False return True if len(self._impl._items) != len(other): return False for k, v in self.items(): nv = other.get(k, sentinel) if v != nv: return False return True def __contains__(self, key: object) -> bool: if not isinstance(key, str): return False identity = self._title(key) for i, k, v in self._impl._items: if i == identity: return True return False @reprlib.recursive_repr() def __repr__(self) -> str: body = ", ".join(f"'{k}': {v!r}" for i, k, v in self._impl._items) return f"<{self.__class__.__name__}({body})>" class MultiDict(_CSMixin, _Base[_V], MutableMultiMapping[_V]): """Dictionary with the support for duplicate keys.""" def __init__(self, arg: MDArg[_V] = None, /, **kwargs: _V): self._impl = _Impl() self._extend(arg, kwargs, self.__class__.__name__, self._extend_items) if sys.implementation.name != "pypy": def __sizeof__(self) -> int: return object.__sizeof__(self) + sys.getsizeof(self._impl) def __reduce__(self) -> tuple[type[Self], tuple[list[tuple[str, _V]]]]: return (self.__class__, (list(self.items()),)) def add(self, key: str, value: _V) -> None: identity = self._title(key) self._impl._items.append((identity, key, value)) self._impl.incr_version() def copy(self) -> Self: """Return a copy of itself.""" cls = self.__class__ return cls(self.items()) __copy__ = copy def extend(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None: """Extend current MultiDict with more values. This method must be used instead of update. """ self._extend(arg, kwargs, "extend", self._extend_items) def _extend( self, arg: MDArg[_V], kwargs: Mapping[str, _V], name: str, method: Callable[[list[tuple[str, str, _V]]], None], ) -> None: if arg: if isinstance(arg, (MultiDict, MultiDictProxy)): if self._ci is not arg._ci: items = [(self._title(k), k, v) for _, k, v in arg._impl._items] else: items = arg._impl._items if kwargs: items = items.copy() if kwargs: for key, value in kwargs.items(): items.append((self._title(key), key, value)) else: if hasattr(arg, "keys"): arg = cast(SupportsKeys[_V], arg) arg = [(k, arg[k]) for k in arg.keys()] if kwargs: arg = list(arg) arg.extend(list(kwargs.items())) items = [] for pos, item in enumerate(arg): if not len(item) == 2: raise ValueError( f"multidict update sequence element #{pos}" f"has length {len(item)}; 2 is required" ) items.append((self._title(item[0]), item[0], item[1])) method(items) else: method([(self._title(key), key, value) for key, value in kwargs.items()]) def _extend_items(self, items: Iterable[tuple[str, str, _V]]) -> None: for identity, key, value in items: self._impl._items.append((identity, key, value)) self._impl.incr_version() def clear(self) -> None: """Remove all items from MultiDict.""" self._impl._items.clear() self._impl.incr_version() # Mapping interface # def __setitem__(self, key: str, value: _V) -> None: self._replace(key, value) def __delitem__(self, key: str) -> None: identity = self._title(key) items = self._impl._items found = False for i in range(len(items) - 1, -1, -1): if items[i][0] == identity: del items[i] found = True if not found: raise KeyError(key) else: self._impl.incr_version() @overload def setdefault( self: "MultiDict[Union[_T, None]]", key: str, default: None = None ) -> Union[_T, None]: ... @overload def setdefault(self, key: str, default: _V) -> _V: ... def setdefault(self, key: str, default: Union[_V, None] = None) -> Union[_V, None]: # type: ignore[misc] """Return value for key, set value to default if key is not present.""" identity = self._title(key) for i, k, v in self._impl._items: if i == identity: return v self.add(key, default) # type: ignore[arg-type] return default @overload def popone(self, key: str) -> _V: ... @overload def popone(self, key: str, default: _T) -> Union[_V, _T]: ... def popone( self, key: str, default: Union[_T, _SENTINEL] = sentinel ) -> Union[_V, _T]: """Remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. """ identity = self._title(key) for i in range(len(self._impl._items)): if self._impl._items[i][0] == identity: value = self._impl._items[i][2] del self._impl._items[i] self._impl.incr_version() return value if default is sentinel: raise KeyError(key) else: return default # Type checking will inherit signature for pop() if we don't confuse it here. if not TYPE_CHECKING: pop = popone @overload def popall(self, key: str) -> list[_V]: ... @overload def popall(self, key: str, default: _T) -> Union[list[_V], _T]: ... def popall( self, key: str, default: Union[_T, _SENTINEL] = sentinel ) -> Union[list[_V], _T]: """Remove all occurrences of key and return the list of corresponding values. If key is not found, default is returned if given, otherwise KeyError is raised. """ found = False identity = self._title(key) ret = [] for i in range(len(self._impl._items) - 1, -1, -1): item = self._impl._items[i] if item[0] == identity: ret.append(item[2]) del self._impl._items[i] self._impl.incr_version() found = True if not found: if default is sentinel: raise KeyError(key) else: return default else: ret.reverse() return ret def popitem(self) -> tuple[str, _V]: """Remove and return an arbitrary (key, value) pair.""" if self._impl._items: i, k, v = self._impl._items.pop() self._impl.incr_version() return self._key(k), v else: raise KeyError("empty multidict") def update(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None: """Update the dictionary from *other*, overwriting existing keys.""" self._extend(arg, kwargs, "update", self._update_items) def _update_items(self, items: list[tuple[str, str, _V]]) -> None: if not items: return used_keys: dict[str, int] = {} for identity, key, value in items: start = used_keys.get(identity, 0) for i in range(start, len(self._impl._items)): item = self._impl._items[i] if item[0] == identity: used_keys[identity] = i + 1 self._impl._items[i] = (identity, key, value) break else: self._impl._items.append((identity, key, value)) used_keys[identity] = len(self._impl._items) # drop tails i = 0 while i < len(self._impl._items): item = self._impl._items[i] identity = item[0] pos = used_keys.get(identity) if pos is None: i += 1 continue if i >= pos: del self._impl._items[i] else: i += 1 self._impl.incr_version() def _replace(self, key: str, value: _V) -> None: identity = self._title(key) items = self._impl._items for i in range(len(items)): item = items[i] if item[0] == identity: items[i] = (identity, key, value) # i points to last found item rgt = i self._impl.incr_version() break else: self._impl._items.append((identity, key, value)) self._impl.incr_version() return # remove all tail items # Mypy bug: https://github.com/python/mypy/issues/14209 i = rgt + 1 # type: ignore[possibly-undefined] while i < len(items): item = items[i] if item[0] == identity: del items[i] else: i += 1 class CIMultiDict(_CIMixin, MultiDict[_V]): """Dictionary with the support for duplicate case-insensitive keys.""" class MultiDictProxy(_CSMixin, _Base[_V]): """Read-only proxy for MultiDict instance.""" def __init__(self, arg: Union[MultiDict[_V], "MultiDictProxy[_V]"]): if not isinstance(arg, (MultiDict, MultiDictProxy)): raise TypeError( "ctor requires MultiDict or MultiDictProxy instance" f", not {type(arg)}" ) self._impl = arg._impl def __reduce__(self) -> NoReturn: raise TypeError(f"can't pickle {self.__class__.__name__} objects") def copy(self) -> MultiDict[_V]: """Return a copy of itself.""" return MultiDict(self.items()) class CIMultiDictProxy(_CIMixin, MultiDictProxy[_V]): """Read-only proxy for CIMultiDict instance.""" def __init__(self, arg: Union[MultiDict[_V], MultiDictProxy[_V]]): if not isinstance(arg, (CIMultiDict, CIMultiDictProxy)): raise TypeError( "ctor requires CIMultiDict or CIMultiDictProxy instance" f", not {type(arg)}" ) self._impl = arg._impl def copy(self) -> CIMultiDict[_V]: """Return a copy of itself.""" return CIMultiDict(self.items()) def getversion(md: Union[MultiDict[object], MultiDictProxy[object]]) -> int: if not isinstance(md, _Base): raise TypeError("Parameter should be multidict or proxy") return md._impl._version ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744320002.8021798 multidict-6.4.3/multidict/_multilib/0000755000175100001660000000000014776033003017106 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/_multilib/dict.h0000644000175100001660000000105314776033000020176 0ustar00runnerdocker#ifndef _MULTIDICT_C_H #define _MULTIDICT_C_H #ifdef __cplusplus extern "C" { #endif #include "pythoncapi_compat.h" #include "pair_list.h" #if PY_VERSION_HEX >= 0x030c00f0 #define MANAGED_WEAKREFS #endif typedef struct { // 16 or 24 for GC prefix PyObject_HEAD // 16 #ifndef MANAGED_WEAKREFS PyObject *weaklist; #endif pair_list_t pairs; } MultiDictObject; typedef struct { PyObject_HEAD #ifndef MANAGED_WEAKREFS PyObject *weaklist; #endif MultiDictObject *md; } MultiDictProxyObject; #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/_multilib/istr.h0000644000175100001660000000707114776033000020242 0ustar00runnerdocker#ifndef _MULTIDICT_ISTR_H #define _MULTIDICT_ISTR_H #ifdef __cplusplus extern "C" { #endif #include "state.h" typedef struct { PyUnicodeObject str; PyObject * canonical; mod_state *state; } istrobject; #define IStr_CheckExact(state, obj) Py_IS_TYPE(obj, state->IStrType) #define IStr_Check(state, obj) \ (IStr_CheckExact(state, obj) || PyObject_TypeCheck(obj, state->IStrType)) PyDoc_STRVAR(istr__doc__, "istr class implementation"); static inline void istr_dealloc(istrobject *self) { Py_XDECREF(self->canonical); PyUnicode_Type.tp_dealloc((PyObject*)self); } static inline PyObject * istr_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { PyObject *mod = PyType_GetModuleByDef(type, &multidict_module); if (mod == NULL) { return NULL; } mod_state *state = get_mod_state(mod); PyObject *x = NULL; static char *kwlist[] = {"object", "encoding", "errors", 0}; PyObject *encoding = NULL; PyObject *errors = NULL; PyObject *canonical = NULL; PyObject *ret = NULL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOO:str", kwlist, &x, &encoding, &errors)) { return NULL; } if (x != NULL && IStr_Check(state, x)) { Py_INCREF(x); return x; } ret = PyUnicode_Type.tp_new(type, args, kwds); if (!ret) { goto fail; } canonical = PyObject_CallMethodNoArgs(ret, state->str_lower); if (!canonical) { goto fail; } ((istrobject*)ret)->canonical = canonical; ((istrobject*)ret)->state = state; return ret; fail: Py_XDECREF(ret); return NULL; } static inline PyObject * istr_reduce(PyObject *self) { PyObject *str = NULL; PyObject *args = NULL; PyObject *result = NULL; str = PyUnicode_FromObject(self); if (str == NULL) { goto ret; } args = PyTuple_Pack(1, str); if (args == NULL) { goto ret; } result = PyTuple_Pack(2, Py_TYPE(self), args); ret: Py_CLEAR(str); Py_CLEAR(args); return result; } static PyMethodDef istr_methods[] = { {"__reduce__", (PyCFunction)istr_reduce, METH_NOARGS, NULL}, {NULL, NULL} /* sentinel */ }; static PyType_Slot istr_slots[] = { {Py_tp_dealloc, istr_dealloc}, {Py_tp_doc, (void *)istr__doc__}, {Py_tp_methods, istr_methods}, {Py_tp_new, istr_new}, {0, NULL}, }; static PyType_Spec istr_spec = { .name = "multidict._multidict.istr", .basicsize = sizeof(istrobject), .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE #if PY_VERSION_HEX >= 0x030a00f0 | Py_TPFLAGS_IMMUTABLETYPE #endif | Py_TPFLAGS_UNICODE_SUBCLASS), .slots = istr_slots, }; static inline PyObject * IStr_New(mod_state *state, PyObject *str, PyObject *canonical) { PyObject *args = NULL; PyObject *res = NULL; args = PyTuple_Pack(1, str); if (args == NULL) { goto ret; } res = PyUnicode_Type.tp_new(state->IStrType, args, NULL); if (!res) { goto ret; } Py_INCREF(canonical); ((istrobject*)res)->canonical = canonical; ((istrobject*)res)->state = state; ret: Py_CLEAR(args); return res; } static inline int istr_init(PyObject *module, mod_state *state) { PyObject *tpl = PyTuple_Pack(1, (PyObject *)&PyUnicode_Type); if (tpl == NULL) { return -1; } PyObject *tmp = PyType_FromModuleAndSpec(module, &istr_spec, tpl); Py_DECREF(tpl); if (tmp == NULL) { return -1; } state->IStrType = (PyTypeObject *)tmp; return 0; } #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/_multilib/iter.h0000644000175100001660000001433614776033000020226 0ustar00runnerdocker#ifndef _MULTIDICT_ITER_H #define _MULTIDICT_ITER_H #ifdef __cplusplus extern "C" { #endif #include "dict.h" #include "pair_list.h" #include "state.h" typedef struct multidict_iter { PyObject_HEAD MultiDictObject *md; // MultiDict or CIMultiDict pair_list_pos_t current; } MultidictIter; static inline void _init_iter(MultidictIter *it, MultiDictObject *md) { Py_INCREF(md); it->md = md; pair_list_init_pos(&md->pairs, &it->current); } static inline PyObject * multidict_items_iter_new(MultiDictObject *md) { MultidictIter *it = PyObject_GC_New( MultidictIter, md->pairs.state->ItemsIterType); if (it == NULL) { return NULL; } _init_iter(it, md); PyObject_GC_Track(it); return (PyObject *)it; } static inline PyObject * multidict_keys_iter_new(MultiDictObject *md) { MultidictIter *it = PyObject_GC_New( MultidictIter, md->pairs.state->KeysIterType); if (it == NULL) { return NULL; } _init_iter(it, md); PyObject_GC_Track(it); return (PyObject *)it; } static inline PyObject * multidict_values_iter_new(MultiDictObject *md) { MultidictIter *it = PyObject_GC_New( MultidictIter, md->pairs.state->ValuesIterType); if (it == NULL) { return NULL; } _init_iter(it, md); PyObject_GC_Track(it); return (PyObject *)it; } static inline PyObject * multidict_items_iter_iternext(MultidictIter *self) { PyObject *key = NULL; PyObject *value = NULL; PyObject *ret = NULL; int res = pair_list_next(&self->md->pairs, &self->current, NULL, &key, &value); if (res < 0) { return NULL; } if (res == 0) { Py_CLEAR(key); Py_CLEAR(value); PyErr_SetNone(PyExc_StopIteration); return NULL; } ret = PyTuple_Pack(2, key, value); Py_CLEAR(key); Py_CLEAR(value); if (ret == NULL) { return NULL; } return ret; } static inline PyObject * multidict_values_iter_iternext(MultidictIter *self) { PyObject *value = NULL; int res = pair_list_next(&self->md->pairs, &self->current, NULL, NULL, &value); if (res < 0) { return NULL; } if (res == 0) { PyErr_SetNone(PyExc_StopIteration); return NULL; } return value; } static inline PyObject * multidict_keys_iter_iternext(MultidictIter *self) { PyObject *key = NULL; int res = pair_list_next(&self->md->pairs, &self->current, NULL, &key, NULL); if (res < 0) { return NULL; } if (res == 0) { PyErr_SetNone(PyExc_StopIteration); return NULL; } return key; } static inline void multidict_iter_dealloc(MultidictIter *self) { PyObject_GC_UnTrack(self); Py_XDECREF(self->md); PyObject_GC_Del(self); } static inline int multidict_iter_traverse(MultidictIter *self, visitproc visit, void *arg) { Py_VISIT(self->md); return 0; } static inline int multidict_iter_clear(MultidictIter *self) { Py_CLEAR(self->md); return 0; } static inline PyObject * multidict_iter_len(MultidictIter *self) { return PyLong_FromLong(pair_list_len(&self->md->pairs)); } PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); static PyMethodDef multidict_iter_methods[] = { { "__length_hint__", (PyCFunction)(void(*)(void))multidict_iter_len, METH_NOARGS, length_hint_doc }, { NULL, NULL } /* sentinel */ }; /***********************************************************************/ static PyType_Slot multidict_items_iter_slots[] = { {Py_tp_dealloc, multidict_iter_dealloc}, {Py_tp_methods, multidict_iter_methods}, {Py_tp_traverse, multidict_iter_traverse}, {Py_tp_clear, multidict_iter_clear}, {Py_tp_iter, PyObject_SelfIter}, {Py_tp_iternext, multidict_items_iter_iternext}, {0, NULL}, }; static PyType_Spec multidict_items_iter_spec = { .name = "multidict._multidict._itemsiter", .basicsize = sizeof(MultidictIter), .flags = (Py_TPFLAGS_DEFAULT #if PY_VERSION_HEX >= 0x030a00f0 | Py_TPFLAGS_IMMUTABLETYPE #endif | Py_TPFLAGS_HAVE_GC), .slots = multidict_items_iter_slots, }; static PyType_Slot multidict_values_iter_slots[] = { {Py_tp_dealloc, multidict_iter_dealloc}, {Py_tp_methods, multidict_iter_methods}, {Py_tp_traverse, multidict_iter_traverse}, {Py_tp_clear, multidict_iter_clear}, {Py_tp_iter, PyObject_SelfIter}, {Py_tp_iternext, multidict_values_iter_iternext}, {0, NULL}, }; static PyType_Spec multidict_values_iter_spec = { .name = "multidict._multidict._valuesiter", .basicsize = sizeof(MultidictIter), .flags = (Py_TPFLAGS_DEFAULT #if PY_VERSION_HEX >= 0x030a00f0 | Py_TPFLAGS_IMMUTABLETYPE #endif | Py_TPFLAGS_HAVE_GC), .slots = multidict_values_iter_slots, }; static PyType_Slot multidict_keys_iter_slots[] = { {Py_tp_dealloc, multidict_iter_dealloc}, {Py_tp_methods, multidict_iter_methods}, {Py_tp_traverse, multidict_iter_traverse}, {Py_tp_clear, multidict_iter_clear}, {Py_tp_iter, PyObject_SelfIter}, {Py_tp_iternext, multidict_keys_iter_iternext}, {0, NULL}, }; static PyType_Spec multidict_keys_iter_spec = { .name = "multidict._multidict._keysiter", .basicsize = sizeof(MultidictIter), .flags = (Py_TPFLAGS_DEFAULT #if PY_VERSION_HEX >= 0x030a00f0 | Py_TPFLAGS_IMMUTABLETYPE #endif | Py_TPFLAGS_HAVE_GC), .slots = multidict_keys_iter_slots, }; static inline int multidict_iter_init(PyObject *module, mod_state *state) { PyObject * tmp; tmp = PyType_FromModuleAndSpec(module, &multidict_items_iter_spec, NULL); if (tmp == NULL) { return -1; } state->ItemsIterType = (PyTypeObject *)tmp; tmp = PyType_FromModuleAndSpec(module, &multidict_values_iter_spec, NULL); if (tmp == NULL) { return -1; } state->ValuesIterType = (PyTypeObject *)tmp; tmp = PyType_FromModuleAndSpec(module, &multidict_keys_iter_spec, NULL); if (tmp == NULL) { return -1; } state->KeysIterType = (PyTypeObject *)tmp; return 0; } #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/_multilib/pair_list.h0000644000175100001660000011276514776033000021256 0ustar00runnerdocker#include "pythoncapi_compat.h" #ifndef _MULTIDICT_PAIR_LIST_H #define _MULTIDICT_PAIR_LIST_H #ifdef __cplusplus extern "C" { #endif #include #include #include #include #include "istr.h" #include "state.h" /* Implementation note. identity always has exact PyUnicode_Type type, not a subclass. It guarantees that identity hashing and comparison never calls Python code back, and these operations has no weird side effects, e.g. deletion the key from multidict. Taking into account the fact that all multidict operations except repr(md), repr(md_proxy), or repr(view) never access to the key itself but identity instead, borrowed references during iteration over pair_list for, e.g., md.get() or md.pop() is safe. */ typedef struct pair { PyObject *identity; // 8 PyObject *key; // 8 PyObject *value; // 8 Py_hash_t hash; // 8 } pair_t; /* Note about the structure size With 28 pairs the MultiDict object size is slightly less than 1KiB To fit into 512 bytes, the structure can contain only 13 pairs which is too small, e.g. https://www.python.org returns 16 headers (9 of them are caching proxy information though). The embedded buffer intention is to fit the vast majority of possible HTTP headers into the buffer without allocating an extra memory block. */ #define EMBEDDED_CAPACITY 28 typedef struct pair_list { mod_state *state; Py_ssize_t capacity; Py_ssize_t size; uint64_t version; bool calc_ci_indentity; pair_t *pairs; pair_t buffer[EMBEDDED_CAPACITY]; } pair_list_t; #define MIN_CAPACITY 64 #define CAPACITY_STEP MIN_CAPACITY /* Global counter used to set ma_version_tag field of dictionary. * It is incremented each time that a dictionary is created and each * time that a dictionary is modified. */ static uint64_t pair_list_global_version = 0; #define NEXT_VERSION() (++pair_list_global_version) typedef struct pair_list_pos { Py_ssize_t pos; uint64_t version; } pair_list_pos_t; static inline int str_cmp(PyObject *s1, PyObject *s2) { PyObject *ret = PyUnicode_RichCompare(s1, s2, Py_EQ); if (Py_IsTrue(ret)) { Py_DECREF(ret); return 1; } else if (ret == NULL) { return -1; } else { Py_DECREF(ret); return 0; } } static inline PyObject * _key_to_ident(mod_state *state, PyObject *key) { if (IStr_Check(state, key)) { return Py_NewRef(((istrobject*)key)->canonical); } if (PyUnicode_CheckExact(key)) { return Py_NewRef(key); } if (PyUnicode_Check(key)) { return PyUnicode_FromObject(key); } PyErr_SetString(PyExc_TypeError, "MultiDict keys should be either str " "or subclasses of str"); return NULL; } static inline PyObject * _ci_key_to_ident(mod_state *state, PyObject *key) { if (IStr_Check(state, key)) { return Py_NewRef(((istrobject*)key)->canonical); } if (PyUnicode_Check(key)) { PyObject *ret = PyObject_CallMethodNoArgs(key, state->str_lower); if (!PyUnicode_CheckExact(ret)) { PyObject *tmp = PyUnicode_FromObject(ret); Py_CLEAR(ret); if (tmp == NULL) { return NULL; } ret = tmp; } return ret; } PyErr_SetString(PyExc_TypeError, "CIMultiDict keys should be either str " "or subclasses of str"); return NULL; } static inline PyObject * _arg_to_key(mod_state *state, PyObject *key, PyObject *ident) { if (PyUnicode_Check(key)) { return Py_NewRef(key); } PyErr_SetString(PyExc_TypeError, "MultiDict keys should be either str " "or subclasses of str"); return NULL; } static inline PyObject * _ci_arg_to_key(mod_state *state, PyObject *key, PyObject *ident) { if (IStr_Check(state, key)) { return Py_NewRef(key); } if (PyUnicode_Check(key)) { return IStr_New(state, key, ident); } PyErr_SetString(PyExc_TypeError, "CIMultiDict keys should be either str " "or subclasses of str"); return NULL; } static inline int pair_list_grow(pair_list_t *list, Py_ssize_t amount) { // Grow by one element if needed Py_ssize_t capacity = ((Py_ssize_t)((list->size + amount) / CAPACITY_STEP) + 1) * CAPACITY_STEP; pair_t *new_pairs; if (list->size + amount -1 < list->capacity) { return 0; } if (list->pairs == list->buffer) { new_pairs = PyMem_New(pair_t, (size_t)capacity); memcpy(new_pairs, list->buffer, (size_t)list->capacity * sizeof(pair_t)); list->pairs = new_pairs; list->capacity = capacity; return 0; } else { new_pairs = PyMem_Resize(list->pairs, pair_t, (size_t)capacity); if (NULL == new_pairs) { // Resizing error return -1; } list->pairs = new_pairs; list->capacity = capacity; return 0; } } static inline int pair_list_shrink(pair_list_t *list) { // Shrink by one element if needed. // Optimization is applied to prevent jitter // (grow-shrink-grow-shrink on adding-removing the single element // when the buffer is full). // To prevent this, the buffer is resized if the size is less than the capacity // by 2*CAPACITY_STEP factor. // The switch back to embedded buffer is never performed for both reasons: // the code simplicity and the jitter prevention. pair_t *new_pairs; Py_ssize_t new_capacity; if (list->capacity - list->size < 2 * CAPACITY_STEP) { return 0; } new_capacity = list->capacity - CAPACITY_STEP; if (new_capacity < MIN_CAPACITY) { return 0; } new_pairs = PyMem_Resize(list->pairs, pair_t, (size_t)new_capacity); if (NULL == new_pairs) { // Resizing error return -1; } list->pairs = new_pairs; list->capacity = new_capacity; return 0; } static inline int _pair_list_init(pair_list_t *list, mod_state *state, bool calc_ci_identity, Py_ssize_t preallocate) { list->state = state; list->calc_ci_indentity = calc_ci_identity; Py_ssize_t capacity = EMBEDDED_CAPACITY; if (preallocate >= capacity) { capacity = ((Py_ssize_t)(preallocate / CAPACITY_STEP) + 1) * CAPACITY_STEP; list->pairs = PyMem_New(pair_t, (size_t)capacity); } else { list->pairs = list->buffer; } list->capacity = capacity; list->size = 0; list->version = NEXT_VERSION(); return 0; } static inline int pair_list_init(pair_list_t *list, mod_state *state, Py_ssize_t size) { return _pair_list_init(list, state, /* calc_ci_identity = */ false, size); } static inline int ci_pair_list_init(pair_list_t *list, mod_state *state, Py_ssize_t size) { return _pair_list_init(list, state, /* calc_ci_identity = */ true, size); } static inline PyObject * pair_list_calc_identity(pair_list_t *list, PyObject *key) { if (list->calc_ci_indentity) return _ci_key_to_ident(list->state, key); return _key_to_ident(list->state, key); } static inline PyObject * pair_list_calc_key(pair_list_t *list, PyObject *key, PyObject *ident) { if (list->calc_ci_indentity) return _ci_arg_to_key(list->state, key, ident); return _arg_to_key(list->state, key, ident); } static inline void pair_list_dealloc(pair_list_t *list) { Py_ssize_t pos; for (pos = 0; pos < list->size; pos++) { pair_t *pair = list->pairs + pos; Py_CLEAR(pair->identity); Py_CLEAR(pair->key); Py_CLEAR(pair->value); } /* Strictly speaking, resetting size and capacity and assigning pairs to buffer is not necessary. Do it to consistency and idemotency. The cleanup doesn't hurt performance. !!! !!! The buffer deletion is crucial though. !!! */ list->size = 0; if (list->pairs != list->buffer) { PyMem_Free(list->pairs); list->pairs = list->buffer; list->capacity = EMBEDDED_CAPACITY; } } static inline Py_ssize_t pair_list_len(pair_list_t *list) { return list->size; } static inline int _pair_list_add_with_hash_steal_refs(pair_list_t *list, PyObject *identity, PyObject *key, PyObject *value, Py_hash_t hash) { if (pair_list_grow(list, 1) < 0) { return -1; } pair_t *pair = list->pairs + list->size; pair->identity = identity; pair->key = key; pair->value = value; pair->hash = hash; list->version = NEXT_VERSION(); list->size += 1; return 0; } static inline int _pair_list_add_with_hash(pair_list_t *list, PyObject *identity, PyObject *key, PyObject *value, Py_hash_t hash) { Py_INCREF(identity); Py_INCREF(key); Py_INCREF(value); return _pair_list_add_with_hash_steal_refs(list, identity, key, value, hash); } static inline int pair_list_add(pair_list_t *list, PyObject *key, PyObject *value) { PyObject *identity = pair_list_calc_identity(list, key); if (identity == NULL) { goto fail; } Py_hash_t hash = PyObject_Hash(identity); if (hash == -1) { goto fail; } int ret = _pair_list_add_with_hash(list, identity, key, value, hash); Py_DECREF(identity); return ret; fail: Py_XDECREF(identity); return -1; } static inline int pair_list_del_at(pair_list_t *list, Py_ssize_t pos) { // return 1 on success, -1 on failure pair_t *pair = list->pairs + pos; Py_DECREF(pair->identity); Py_DECREF(pair->key); Py_DECREF(pair->value); list->size -= 1; list->version = NEXT_VERSION(); if (list->size == pos) { // remove from tail, no need to shift body return 0; } Py_ssize_t tail = list->size - pos; // TODO: raise an error if tail < 0 memmove((void *)(list->pairs + pos), (void *)(list->pairs + pos + 1), sizeof(pair_t) * (size_t)tail); return pair_list_shrink(list); } static inline int _pair_list_drop_tail(pair_list_t *list, PyObject *identity, Py_hash_t hash, Py_ssize_t pos) { // return 1 if deleted, 0 if not found int found = 0; if (pos >= list->size) { return 0; } for (; pos < list->size; pos++) { pair_t *pair = list->pairs + pos; if (pair->hash != hash) { continue; } int ret = str_cmp(pair->identity, identity); if (ret > 0) { if (pair_list_del_at(list, pos) < 0) { return -1; } found = 1; pos--; } else if (ret == -1) { return -1; } } return found; } static inline int pair_list_del(pair_list_t *list, PyObject *key) { PyObject *identity = pair_list_calc_identity(list, key); if (identity == NULL) { goto fail; } Py_hash_t hash = PyObject_Hash(identity); if (hash == -1) { goto fail; } int ret = _pair_list_drop_tail(list, identity, hash, 0); if (ret < 0) { goto fail; } else if (ret == 0) { PyErr_SetObject(PyExc_KeyError, key); goto fail; } else { list->version = NEXT_VERSION(); } Py_DECREF(identity); return 0; fail: Py_XDECREF(identity); return -1; } static inline uint64_t pair_list_version(pair_list_t *list) { return list->version; } static inline void pair_list_init_pos(pair_list_t *list, pair_list_pos_t *pos) { pos->pos = 0; pos->version = list->version; } static inline int pair_list_next(pair_list_t *list, pair_list_pos_t *pos, PyObject **pidentity, PyObject **pkey, PyObject **pvalue) { if (pos->pos >= list->size) { if (pidentity) { *pidentity = NULL; } if (pkey) { *pkey = NULL; } if (pvalue) { *pvalue = NULL; } return 0; } if (pos->version != list->version) { if (pidentity) { *pidentity = NULL; } if (pkey) { *pkey = NULL; } if (pvalue) { *pvalue = NULL; } PyErr_SetString(PyExc_RuntimeError, "MultiDict changed during iteration"); return -1; } pair_t *pair = list->pairs + pos->pos; if (pidentity) { *pidentity = Py_NewRef(pair->identity);; } if (pkey) { PyObject *key = pair_list_calc_key(list, pair->key, pair->identity); if (key == NULL) { return -1; } if (key != pair->key) { Py_SETREF(pair->key, key); } else { Py_CLEAR(key); } *pkey = Py_NewRef(pair->key); } if (pvalue) { *pvalue = Py_NewRef(pair->value); } ++pos->pos; return 1; } static inline int pair_list_next_by_identity(pair_list_t *list, pair_list_pos_t *pos, PyObject *identity, PyObject **pkey, PyObject **pvalue) { if (pos->pos >= list->size) { if (pkey) { *pkey = NULL; } if (pvalue) { *pvalue = NULL; } return 0; } if (pos->version != list->version) { if (pkey) { *pkey = NULL; } if (pvalue) { *pvalue = NULL; } PyErr_SetString(PyExc_RuntimeError, "MultiDict changed during iteration"); return -1; } for (; pos->pos < list->size; ++pos->pos) { pair_t *pair = list->pairs + pos->pos; PyObject *ret = PyUnicode_RichCompare(identity, pair->identity, Py_EQ); if (Py_IsFalse(ret)) { Py_DECREF(ret); continue; } else if (ret == NULL) { return -1; } else { // equals Py_DECREF(ret); } if (pkey) { PyObject *key = pair_list_calc_key(list, pair->key, pair->identity); if (key == NULL) { return -1; } if (key != pair->key) { Py_SETREF(pair->key, key); } else { Py_CLEAR(key); } *pkey = Py_NewRef(pair->key); } if (pvalue) { *pvalue = Py_NewRef(pair->value); } ++pos->pos; return 1; } if (pkey) { *pkey = NULL; } if (pvalue) { *pvalue = NULL; } return 0; } static inline int pair_list_contains(pair_list_t *list, PyObject *key, PyObject **pret) { Py_ssize_t pos; if (!PyUnicode_Check(key)) { return 0; } PyObject *ident = pair_list_calc_identity(list, key); if (ident == NULL) { goto fail; } Py_hash_t hash = PyObject_Hash(ident); if (hash == -1) { goto fail; } Py_ssize_t size = pair_list_len(list); for(pos = 0; pos < size; ++pos) { pair_t * pair = list->pairs + pos; if (hash != pair->hash) { continue; } int tmp = str_cmp(ident, pair->identity); if (tmp > 0) { Py_DECREF(ident); if (pret != NULL) { *pret = Py_NewRef(pair->key); } return 1; } else if (tmp < 0) { goto fail; } } Py_DECREF(ident); if (pret != NULL) { *pret = NULL; } return 0; fail: Py_XDECREF(ident); if (pret != NULL) { *pret = NULL; } return -1; } static inline int pair_list_get_one(pair_list_t *list, PyObject *key, PyObject **ret) { Py_ssize_t pos; PyObject *ident = pair_list_calc_identity(list, key); if (ident == NULL) { goto fail; } Py_hash_t hash = PyObject_Hash(ident); if (hash == -1) { goto fail; } Py_ssize_t size = pair_list_len(list); for(pos = 0; pos < size; ++pos) { pair_t *pair = list->pairs + pos; if (hash != pair->hash) { continue; } int tmp = str_cmp(ident, pair->identity); if (tmp > 0) { Py_DECREF(ident); *ret = Py_NewRef(pair->value); return 0; } else if (tmp < 0) { goto fail; } } Py_DECREF(ident); return 0; fail: Py_XDECREF(ident); return -1; } static inline int pair_list_get_all(pair_list_t *list, PyObject *key, PyObject **ret) { Py_ssize_t pos; PyObject *res = NULL; PyObject *ident = pair_list_calc_identity(list, key); if (ident == NULL) { goto fail; } Py_hash_t hash = PyObject_Hash(ident); if (hash == -1) { goto fail; } Py_ssize_t size = pair_list_len(list); for(pos = 0; pos < size; ++pos) { pair_t *pair = list->pairs + pos; if (hash != pair->hash) { continue; } int tmp = str_cmp(ident, pair->identity); if (tmp > 0) { if (res == NULL) { res = PyList_New(1); if (res == NULL) { goto fail; } if (PyList_SetItem(res, 0, Py_NewRef(pair->value)) < 0) { goto fail; } } else if (PyList_Append(res, pair->value) < 0) { goto fail; } } else if (tmp < 0) { goto fail; } } if (res != NULL) { *ret = res; } Py_DECREF(ident); return 0; fail: Py_XDECREF(ident); Py_XDECREF(res); return -1; } static inline PyObject * pair_list_set_default(pair_list_t *list, PyObject *key, PyObject *value) { Py_ssize_t pos; PyObject *ident = pair_list_calc_identity(list, key); if (ident == NULL) { goto fail; } Py_hash_t hash = PyObject_Hash(ident); if (hash == -1) { goto fail; } Py_ssize_t size = pair_list_len(list); for(pos = 0; pos < size; ++pos) { pair_t * pair = list->pairs + pos; if (hash != pair->hash) { continue; } int tmp = str_cmp(ident, pair->identity); if (tmp > 0) { Py_DECREF(ident); return Py_NewRef(pair->value); } else if (tmp < 0) { goto fail; } } if (_pair_list_add_with_hash(list, ident, key, value, hash) < 0) { goto fail; } Py_DECREF(ident); return Py_NewRef(value); fail: Py_XDECREF(ident); return NULL; } static inline int pair_list_pop_one(pair_list_t *list, PyObject *key, PyObject **ret) { Py_ssize_t pos; PyObject *value = NULL; PyObject *ident = pair_list_calc_identity(list, key); if (ident == NULL) { goto fail; } Py_hash_t hash = PyObject_Hash(ident); if (hash == -1) { goto fail; } for (pos=0; pos < list->size; pos++) { pair_t *pair = list->pairs + pos; if (pair->hash != hash) { continue; } int tmp = str_cmp(ident, pair->identity); if (tmp > 0) { value = Py_NewRef(pair->value); if (pair_list_del_at(list, pos) < 0) { goto fail; } Py_DECREF(ident); *ret = value; return 0; } else if (tmp < 0) { goto fail; } } return 0; fail: Py_XDECREF(value); Py_XDECREF(ident); return -1; } static inline int pair_list_pop_all(pair_list_t *list, PyObject *key, PyObject ** ret) { Py_ssize_t pos; PyObject *lst = NULL; PyObject *ident = pair_list_calc_identity(list, key); if (ident == NULL) { goto fail; } Py_hash_t hash = PyObject_Hash(ident); if (hash == -1) { goto fail; } if (list->size == 0) { Py_DECREF(ident); return 0; } for (pos = list->size - 1; pos >= 0; pos--) { pair_t *pair = list->pairs + pos; if (hash != pair->hash) { continue; } int tmp = str_cmp(ident, pair->identity); if (tmp > 0) { if (lst == NULL) { lst = PyList_New(1); if (lst == NULL) { goto fail; } if (PyList_SetItem(lst, 0, Py_NewRef(pair->value)) < 0) { goto fail; } } else if (PyList_Append(lst, pair->value) < 0) { goto fail; } if (pair_list_del_at(list, pos) < 0) { goto fail; } } else if (tmp < 0) { goto fail; } } if (lst != NULL) { if (PyList_Reverse(lst) < 0) { goto fail; } } *ret = lst; Py_DECREF(ident); return 0; fail: Py_XDECREF(ident); Py_XDECREF(lst); return -1; } static inline PyObject * pair_list_pop_item(pair_list_t *list) { if (list->size == 0) { PyErr_SetString(PyExc_KeyError, "empty multidict"); return NULL; } Py_ssize_t pos = list->size - 1; pair_t *pair = list->pairs + pos; PyObject *key = pair_list_calc_key(list, pair->key, pair->identity); if (key == NULL) { return NULL; } PyObject *ret = PyTuple_Pack(2, key, pair->value); Py_CLEAR(key); if (ret == NULL) { return NULL; } if (pair_list_del_at(list, pos) < 0) { Py_DECREF(ret); return NULL; } return ret; } static inline int pair_list_replace(pair_list_t *list, PyObject * key, PyObject *value) { Py_ssize_t pos; int found = 0; PyObject *identity = pair_list_calc_identity(list, key); if (identity == NULL) { goto fail; } Py_hash_t hash = PyObject_Hash(identity); if (hash == -1) { goto fail; } for (pos = 0; pos < list->size; pos++) { pair_t *pair = list->pairs + pos; if (hash != pair->hash) { continue; } int tmp = str_cmp(identity, pair->identity); if (tmp > 0) { found = 1; Py_SETREF(pair->key, Py_NewRef(key)); Py_SETREF(pair->value, Py_NewRef(value)); break; } else if (tmp < 0) { goto fail; } } if (!found) { if (_pair_list_add_with_hash(list, identity, key, value, hash) < 0) { goto fail; } Py_DECREF(identity); return 0; } else { list->version = NEXT_VERSION(); if (_pair_list_drop_tail(list, identity, hash, pos+1) < 0) { goto fail; } Py_DECREF(identity); return 0; } fail: Py_XDECREF(identity); return -1; } static inline int _dict_set_number(PyObject *dict, PyObject *key, Py_ssize_t num) { PyObject *tmp = PyLong_FromSsize_t(num); if (tmp == NULL) { return -1; } if (PyDict_SetItem(dict, key, tmp) < 0) { Py_DECREF(tmp); return -1; } Py_DECREF(tmp); return 0; } static inline int pair_list_post_update(pair_list_t *list, PyObject* used) { PyObject *tmp = NULL; Py_ssize_t pos; for (pos = 0; pos < list->size; pos++) { pair_t *pair = list->pairs + pos; int status = PyDict_GetItemRef(used, pair->identity, &tmp); if (status == -1) { // exception set return -1; } else if (status == 0) { // not found continue; } Py_ssize_t num = PyLong_AsSsize_t(tmp); Py_DECREF(tmp); if (num == -1) { if (!PyErr_Occurred()) { PyErr_SetString(PyExc_RuntimeError, "invalid internal state"); } return -1; } if (pos >= num) { // del self[pos] if (pair_list_del_at(list, pos) < 0) { return -1; } pos--; } } list->version = NEXT_VERSION(); return 0; } // TODO: need refactoring function name static inline int _pair_list_update(pair_list_t *list, PyObject *key, PyObject *value, PyObject *used, PyObject *identity, Py_hash_t hash) { PyObject *item = NULL; Py_ssize_t pos; int found; int status = PyDict_GetItemRef(used, identity, &item); if (status == -1) { // exception set return -1; } else if (status == 0) { // not found pos = 0; } else { pos = PyLong_AsSsize_t(item); Py_DECREF(item); if (pos == -1) { if (!PyErr_Occurred()) { PyErr_SetString(PyExc_RuntimeError, "invalid internal state"); } return -1; } } found = 0; for (; pos < list->size; pos++) { pair_t *pair = list->pairs + pos; if (pair->hash != hash) { continue; } int ident_cmp_res = str_cmp(pair->identity, identity); if (ident_cmp_res > 0) { Py_SETREF(pair->key, Py_NewRef(key)); Py_SETREF(pair->value, Py_NewRef(value)); if (_dict_set_number(used, pair->identity, pos + 1) < 0) { return -1; } found = 1; break; } else if (ident_cmp_res < 0) { return -1; } } if (!found) { if (_pair_list_add_with_hash(list, identity, key, value, hash) < 0) { return -1; } if (_dict_set_number(used, identity, list->size) < 0) { return -1; } } return 0; } static inline int pair_list_update_from_pair_list(pair_list_t *list, PyObject* used, pair_list_t *other) { Py_ssize_t pos; Py_hash_t hash; PyObject *identity = NULL; PyObject *key = NULL; bool recalc_identity = list->calc_ci_indentity != other->calc_ci_indentity; for (pos = 0; pos < other->size; pos++) { pair_t *pair = other->pairs + pos; if (recalc_identity) { identity = pair_list_calc_identity(list, pair->key); if (identity == NULL) { goto fail; } hash = PyObject_Hash(identity); if (hash == -1) { goto fail; } /* materialize key */ key = pair_list_calc_key(other, pair->key, identity); if (key == NULL) { goto fail; } } else { identity = pair->identity; hash = pair->hash; key = pair->key; } if (used != NULL) { if (_pair_list_update(list, key, pair->value, used, identity, hash) < 0) { goto fail; } } else { if (_pair_list_add_with_hash(list, identity, key, pair->value, hash) < 0) { goto fail; } } if (recalc_identity) { Py_CLEAR(identity); Py_CLEAR(key); } } return 0; fail: if (recalc_identity) { Py_CLEAR(identity); Py_CLEAR(key); } return -1; } static inline int pair_list_update_from_dict(pair_list_t *list, PyObject* used, PyObject *kwds) { Py_ssize_t pos = 0; PyObject *identity = NULL; PyObject *key = NULL; PyObject *value = NULL; while(PyDict_Next(kwds, &pos, &key, &value)) { Py_INCREF(key); identity = pair_list_calc_identity(list, key); if (identity == NULL) { goto fail; } Py_hash_t hash = PyObject_Hash(identity); if (hash == -1) { goto fail; } if (used != NULL) { if (_pair_list_update(list, key, value, used, identity, hash) < 0) { goto fail; } } else { if (_pair_list_add_with_hash(list, identity, key, value, hash) < 0) { goto fail; } } Py_CLEAR(identity); Py_CLEAR(key); } return 0; fail: Py_CLEAR(identity); Py_CLEAR(key); return -1; } static inline void _err_not_sequence(Py_ssize_t i) { PyErr_Format(PyExc_TypeError, "multidict cannot convert sequence element #%zd" " to a sequence", i); } static inline void _err_bad_length(Py_ssize_t i, Py_ssize_t n) { PyErr_Format(PyExc_ValueError, "multidict update sequence element #%zd " "has length %zd; 2 is required", i, n); } static inline void _err_cannot_fetch(Py_ssize_t i, const char * name) { PyErr_Format(PyExc_ValueError, "multidict update sequence element #%zd's " "%s could not be fetched", name, i); } static int _pair_list_parse_item(Py_ssize_t i, PyObject *item, PyObject **pkey, PyObject **pvalue) { Py_ssize_t n; if (PyList_CheckExact(item)) { n = PyList_GET_SIZE(item); if (n != 2) { _err_bad_length(i, n); goto fail; } *pkey = Py_NewRef(PyList_GET_ITEM(item, 0)); *pvalue = Py_NewRef(PyList_GET_ITEM(item, 1)); } else if (PyTuple_CheckExact(item)) { n = PyTuple_GET_SIZE(item); if (n != 2) { _err_bad_length(i, n); goto fail; } *pkey = Py_NewRef(PyTuple_GET_ITEM(item, 0)); *pvalue = Py_NewRef(PyTuple_GET_ITEM(item, 1)); } else { if (!PySequence_Check(item)) { _err_not_sequence(i); goto fail; } n = PySequence_Size(item); if (n != 2) { _err_bad_length(i, n); goto fail; } *pkey = PySequence_ITEM(item, 0); *pvalue = PySequence_ITEM(item, 1); if (*pkey == NULL) { _err_cannot_fetch(i, "key"); goto fail; } if (*pvalue == NULL) { _err_cannot_fetch(i, "value"); goto fail; } } return 0; fail: Py_CLEAR(*pkey); Py_CLEAR(*pvalue); return -1; } static inline int pair_list_update_from_seq(pair_list_t *list, PyObject *used, PyObject *seq) { PyObject *it = NULL; PyObject *item = NULL; // seq[i] PyObject *key = NULL; PyObject *value = NULL; PyObject *identity = NULL; Py_ssize_t i; Py_ssize_t size = -1; enum {LIST, TUPLE, ITER} kind; if (PyList_CheckExact(seq)) { kind = LIST; size = PyList_GET_SIZE(seq); } else if (PyTuple_CheckExact(seq)) { kind = TUPLE; size = PyTuple_GET_SIZE(seq); } else { kind = ITER; it = PyObject_GetIter(seq); if (it == NULL) { goto fail; } } for (i = 0; ; ++i) { // i - index into seq of current element switch (kind) { case LIST: if (i >= size) { goto exit; } item = PyList_GET_ITEM(seq, i); if (item == NULL) { goto fail; } Py_INCREF(item); break; case TUPLE: if (i >= size) { goto exit; } item = PyTuple_GET_ITEM(seq, i); if (item == NULL) { goto fail; } Py_INCREF(item); break; case ITER: item = PyIter_Next(it); if (item == NULL) { if (PyErr_Occurred()) { goto fail; } goto exit; } } if (_pair_list_parse_item(i, item, &key, &value) < 0) { goto fail; } identity = pair_list_calc_identity(list, key); if (identity == NULL) { goto fail; } Py_hash_t hash = PyObject_Hash(identity); if (hash == -1) { goto fail; } if (used) { if (_pair_list_update(list, key, value, used, identity, hash) < 0) { goto fail; } Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(value); } else { if (_pair_list_add_with_hash_steal_refs(list, identity, key, value, hash) < 0) { goto fail; } identity = NULL; key = NULL; value = NULL; } Py_CLEAR(item); } exit: Py_CLEAR(it); return 0; fail: Py_CLEAR(identity); Py_CLEAR(it); Py_CLEAR(item); Py_CLEAR(key); Py_CLEAR(value); return -1; } static inline int pair_list_eq(pair_list_t *list, pair_list_t *other) { Py_ssize_t pos; if (list == other) { return 1; } Py_ssize_t size = pair_list_len(list); if (size != pair_list_len(other)) { return 0; } for(pos = 0; pos < size; ++pos) { pair_t *pair1 = list->pairs + pos; pair_t *pair2 = other->pairs +pos; if (pair1->hash != pair2->hash) { return 0; } int cmp = PyObject_RichCompareBool(pair1->identity, pair2->identity, Py_EQ); if (cmp < 0) { return -1; }; if (cmp == 0) { return 0; } cmp = PyObject_RichCompareBool(pair1->value, pair2->value, Py_EQ); if (cmp < 0) { return -1; }; if (cmp == 0) { return 0; } } return 1; } static inline int pair_list_eq_to_mapping(pair_list_t *list, PyObject *other) { PyObject *key = NULL; PyObject *avalue = NULL; PyObject *bvalue; Py_ssize_t other_len; if (!PyMapping_Check(other)) { PyErr_Format(PyExc_TypeError, "other argument must be a mapping, not %s", Py_TYPE(other)->tp_name); return -1; } other_len = PyMapping_Size(other); if (other_len < 0) { return -1; } if (pair_list_len(list) != other_len) { return 0; } pair_list_pos_t pos; pair_list_init_pos(list, &pos); for(;;) { int ret = pair_list_next(list, &pos, NULL, &key, &avalue); if (ret < 0) { return -1; } if (ret == 0) { break; } ret = PyMapping_GetOptionalItem(other, key, &bvalue); Py_CLEAR(key); if (ret < 0) { Py_CLEAR(avalue); return -1; } if (bvalue == NULL) { Py_CLEAR(avalue); return 0; } int eq = PyObject_RichCompareBool(avalue, bvalue, Py_EQ); Py_CLEAR(bvalue); Py_CLEAR(avalue); if (eq <= 0) { return eq; } } return 1; } static inline PyObject * pair_list_repr(pair_list_t *list, PyObject *name, bool show_keys, bool show_values) { PyObject *key = NULL; PyObject *value = NULL; bool comma = false; Py_ssize_t pos; uint64_t version = list->version; PyUnicodeWriter *writer = PyUnicodeWriter_Create(1024); if (writer == NULL) return NULL; if (PyUnicodeWriter_WriteChar(writer, '<') <0) goto fail; if (PyUnicodeWriter_WriteStr(writer, name) <0) goto fail; if (PyUnicodeWriter_WriteChar(writer, '(') <0) goto fail; for (pos = 0; pos < list->size; ++pos) { if (version != list->version) { PyErr_SetString(PyExc_RuntimeError, "MultiDict changed during iteration"); return NULL; } pair_t *pair = list->pairs + pos; key = Py_NewRef(pair->key); value = Py_NewRef(pair->value); if (comma) { if (PyUnicodeWriter_WriteChar(writer, ',') <0) goto fail; if (PyUnicodeWriter_WriteChar(writer, ' ') <0) goto fail; } if (show_keys) { if (PyUnicodeWriter_WriteChar(writer, '\'') <0) goto fail; /* Don't need to convert key to istr, the text is the same*/ if (PyUnicodeWriter_WriteStr(writer, key) <0) goto fail; if (PyUnicodeWriter_WriteChar(writer, '\'') <0) goto fail; } if (show_keys && show_values) { if (PyUnicodeWriter_WriteChar(writer, ':') <0) goto fail; if (PyUnicodeWriter_WriteChar(writer, ' ') <0) goto fail; } if (show_values) { if (PyUnicodeWriter_WriteRepr(writer, value) <0) goto fail; } comma = true; Py_CLEAR(key); Py_CLEAR(value); } if (PyUnicodeWriter_WriteChar(writer, ')') <0) goto fail; if (PyUnicodeWriter_WriteChar(writer, '>') <0) goto fail; return PyUnicodeWriter_Finish(writer); fail: Py_CLEAR(key); Py_CLEAR(value); PyUnicodeWriter_Discard(writer); return NULL; } /***********************************************************************/ static inline int pair_list_traverse(pair_list_t *list, visitproc visit, void *arg) { pair_t *pair = NULL; Py_ssize_t pos; for (pos = 0; pos < list->size; pos++) { pair = list->pairs + pos; // Don't need traverse the identity: it is a terminal Py_VISIT(pair->key); Py_VISIT(pair->value); } return 0; } static inline int pair_list_clear(pair_list_t *list) { pair_t *pair = NULL; Py_ssize_t pos; if (list->size == 0) { return 0; } list->version = NEXT_VERSION(); for (pos = 0; pos < list->size; pos++) { pair = list->pairs + pos; Py_CLEAR(pair->key); Py_CLEAR(pair->identity); Py_CLEAR(pair->value); } list->size = 0; if (list->pairs != list->buffer) { PyMem_Free(list->pairs); list->pairs = list->buffer; } return 0; } #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/_multilib/parser.h0000644000175100001660000001073014776033000020551 0ustar00runnerdocker#ifndef _MULTIDICT_PARSER_H #define _MULTIDICT_PARSER_H #ifdef __cplusplus extern "C" { #endif static int raise_unexpected_kwarg(const char *fname, PyObject* argname) { PyErr_Format(PyExc_TypeError, "%.150s() got an unexpected keyword argument '%.150U'", fname, argname); return -1; } static int raise_missing_posarg(const char *fname, const char* argname) { PyErr_Format(PyExc_TypeError, "%.150s() missing 1 required positional argument: '%.150s'", fname, argname); return -1; } /* Parse FASTCALL|METH_KEYWORDS arguments as two args, the first arg is mandatory and the second one is optional. If the second arg is not passed it remains NULL pointer. The parser accepts three forms: 1. all positional args, 2. fist positional, second keyword-arg 3. all named keyword args. */ static int parse2(const char* fname, PyObject*const *args, Py_ssize_t nargs, PyObject *kwnames, Py_ssize_t minargs, const char* arg1name, PyObject **arg1, const char* arg2name, PyObject **arg2 ) { assert(minargs>=1); assert(minargs<=2); if (kwnames != NULL) { Py_ssize_t kwsize = PyTuple_Size(kwnames); if (kwsize < 0) { return -1; } PyObject *argname; // borrowed ref if (kwsize == 2) { /* All args are passed by keyword, possible combinations: arg1, arg2 and arg2, arg1 */ argname = PyTuple_GetItem(kwnames, 0); if (argname == NULL) { return -1; } if (PyUnicode_CompareWithASCIIString(argname, arg1name) == 0) { argname = PyTuple_GetItem(kwnames, 1); if (argname == NULL) { return -1; } if (PyUnicode_CompareWithASCIIString(argname, arg2name) == 0) { *arg1 = args[0]; *arg2 = args[1]; return 0; } else { return raise_unexpected_kwarg(fname, argname); } } else if (PyUnicode_CompareWithASCIIString(argname, arg2name) == 0) { argname = PyTuple_GetItem(kwnames, 1); if (argname == NULL) { return -1; } if (PyUnicode_CompareWithASCIIString(argname, arg1name) == 0) { *arg1 = args[1]; *arg2 = args[0]; return 0; } else { return raise_unexpected_kwarg(fname, argname); } } else { return raise_unexpected_kwarg(fname, argname); } } else { // kwsize == 1 argname = PyTuple_GetItem(kwnames, 0); if (argname == NULL) { return -1; } if (nargs == 1) { if (PyUnicode_CompareWithASCIIString(argname, arg2name) == 0) { *arg1 = args[0]; *arg2 = args[1]; return 0; } else { return raise_unexpected_kwarg(fname, argname); } } else { // nargs == 0 if (PyUnicode_CompareWithASCIIString(argname, arg1name) == 0) { *arg1 = args[0]; *arg2 = NULL; return 0; } else { return raise_missing_posarg(fname, arg1name); } } } } else { if (nargs < 1) { PyErr_Format(PyExc_TypeError, "%.150s() missing 1 required positional argument: '%s'", fname, arg1name); return -1; } if (nargs < minargs || nargs > 2) { const char* txt; if (minargs == 2) { txt = "from 1 to 2 positional arguments"; } else { txt = "exactly 1 positional argument"; } PyErr_Format(PyExc_TypeError, "%.150s() takes %s but %zd were given", fname, txt, nargs); return -1; } *arg1 = args[0]; if (nargs == 2) { *arg2 = args[1]; } else { *arg2 = NULL; } return 0; } } #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/_multilib/pythoncapi_compat.h0000644000175100001660000016733114776033000023010 0ustar00runnerdocker// Header file providing new C API functions to old Python versions. // // File distributed under the Zero Clause BSD (0BSD) license. // Copyright Contributors to the pythoncapi_compat project. // // Homepage: // https://github.com/python/pythoncapi_compat // // Latest version: // https://raw.githubusercontent.com/python/pythoncapi-compat/main/pythoncapi_compat.h // // SPDX-License-Identifier: 0BSD #ifndef PYTHONCAPI_COMPAT #define PYTHONCAPI_COMPAT #ifdef __cplusplus extern "C" { #endif #include #include // offsetof() // Python 3.11.0b4 added PyFrame_Back() to Python.h #if PY_VERSION_HEX < 0x030b00B4 && !defined(PYPY_VERSION) # include "frameobject.h" // PyFrameObject, PyFrame_GetBack() #endif #if PY_VERSION_HEX < 0x030C00A3 # include // T_SHORT, READONLY #endif #ifndef _Py_CAST # define _Py_CAST(type, expr) ((type)(expr)) #endif // Static inline functions should use _Py_NULL rather than using directly NULL // to prevent C++ compiler warnings. On C23 and newer and on C++11 and newer, // _Py_NULL is defined as nullptr. #ifndef _Py_NULL # if (defined (__STDC_VERSION__) && __STDC_VERSION__ > 201710L) \ || (defined(__cplusplus) && __cplusplus >= 201103) # define _Py_NULL nullptr # else # define _Py_NULL NULL # endif #endif // Cast argument to PyObject* type. #ifndef _PyObject_CAST # define _PyObject_CAST(op) _Py_CAST(PyObject*, op) #endif #ifndef Py_BUILD_ASSERT # define Py_BUILD_ASSERT(cond) \ do { \ (void)sizeof(char [1 - 2 * !(cond)]); \ } while(0) #endif // bpo-42262 added Py_NewRef() to Python 3.10.0a3 #if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef) static inline PyObject* _Py_NewRef(PyObject *obj) { Py_INCREF(obj); return obj; } #define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj)) #endif // bpo-42262 added Py_XNewRef() to Python 3.10.0a3 #if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_XNewRef) static inline PyObject* _Py_XNewRef(PyObject *obj) { Py_XINCREF(obj); return obj; } #define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj)) #endif // bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4 #if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT) static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) { ob->ob_refcnt = refcnt; } #define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT(_PyObject_CAST(ob), refcnt) #endif // Py_SETREF() and Py_XSETREF() were added to Python 3.5.2. // It is excluded from the limited C API. #if (PY_VERSION_HEX < 0x03050200 && !defined(Py_SETREF)) && !defined(Py_LIMITED_API) #define Py_SETREF(dst, src) \ do { \ PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \ PyObject *_tmp_dst = (*_tmp_dst_ptr); \ *_tmp_dst_ptr = _PyObject_CAST(src); \ Py_DECREF(_tmp_dst); \ } while (0) #define Py_XSETREF(dst, src) \ do { \ PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \ PyObject *_tmp_dst = (*_tmp_dst_ptr); \ *_tmp_dst_ptr = _PyObject_CAST(src); \ Py_XDECREF(_tmp_dst); \ } while (0) #endif // bpo-43753 added Py_Is(), Py_IsNone(), Py_IsTrue() and Py_IsFalse() // to Python 3.10.0b1. #if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_Is) # define Py_Is(x, y) ((x) == (y)) #endif #if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_IsNone) # define Py_IsNone(x) Py_Is(x, Py_None) #endif #if (PY_VERSION_HEX < 0x030A00B1 || defined(PYPY_VERSION)) && !defined(Py_IsTrue) # define Py_IsTrue(x) Py_Is(x, Py_True) #endif #if (PY_VERSION_HEX < 0x030A00B1 || defined(PYPY_VERSION)) && !defined(Py_IsFalse) # define Py_IsFalse(x) Py_Is(x, Py_False) #endif // bpo-39573 added Py_SET_TYPE() to Python 3.9.0a4 #if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_TYPE) static inline void _Py_SET_TYPE(PyObject *ob, PyTypeObject *type) { ob->ob_type = type; } #define Py_SET_TYPE(ob, type) _Py_SET_TYPE(_PyObject_CAST(ob), type) #endif // bpo-39573 added Py_SET_SIZE() to Python 3.9.0a4 #if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_SIZE) static inline void _Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) { ob->ob_size = size; } #define Py_SET_SIZE(ob, size) _Py_SET_SIZE((PyVarObject*)(ob), size) #endif // bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 #if PY_VERSION_HEX < 0x030900B1 || defined(PYPY_VERSION) static inline PyCodeObject* PyFrame_GetCode(PyFrameObject *frame) { assert(frame != _Py_NULL); assert(frame->f_code != _Py_NULL); return _Py_CAST(PyCodeObject*, Py_NewRef(frame->f_code)); } #endif static inline PyCodeObject* _PyFrame_GetCodeBorrow(PyFrameObject *frame) { PyCodeObject *code = PyFrame_GetCode(frame); Py_DECREF(code); return code; } // bpo-40421 added PyFrame_GetBack() to Python 3.9.0b1 #if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION) static inline PyFrameObject* PyFrame_GetBack(PyFrameObject *frame) { assert(frame != _Py_NULL); return _Py_CAST(PyFrameObject*, Py_XNewRef(frame->f_back)); } #endif #if !defined(PYPY_VERSION) static inline PyFrameObject* _PyFrame_GetBackBorrow(PyFrameObject *frame) { PyFrameObject *back = PyFrame_GetBack(frame); Py_XDECREF(back); return back; } #endif // bpo-40421 added PyFrame_GetLocals() to Python 3.11.0a7 #if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) static inline PyObject* PyFrame_GetLocals(PyFrameObject *frame) { #if PY_VERSION_HEX >= 0x030400B1 if (PyFrame_FastToLocalsWithError(frame) < 0) { return NULL; } #else PyFrame_FastToLocals(frame); #endif return Py_NewRef(frame->f_locals); } #endif // bpo-40421 added PyFrame_GetGlobals() to Python 3.11.0a7 #if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) static inline PyObject* PyFrame_GetGlobals(PyFrameObject *frame) { return Py_NewRef(frame->f_globals); } #endif // bpo-40421 added PyFrame_GetBuiltins() to Python 3.11.0a7 #if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) static inline PyObject* PyFrame_GetBuiltins(PyFrameObject *frame) { return Py_NewRef(frame->f_builtins); } #endif // bpo-40421 added PyFrame_GetLasti() to Python 3.11.0b1 #if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION) static inline int PyFrame_GetLasti(PyFrameObject *frame) { #if PY_VERSION_HEX >= 0x030A00A7 // bpo-27129: Since Python 3.10.0a7, f_lasti is an instruction offset, // not a bytes offset anymore. Python uses 16-bit "wordcode" (2 bytes) // instructions. if (frame->f_lasti < 0) { return -1; } return frame->f_lasti * 2; #else return frame->f_lasti; #endif } #endif // gh-91248 added PyFrame_GetVar() to Python 3.12.0a2 #if PY_VERSION_HEX < 0x030C00A2 && !defined(PYPY_VERSION) static inline PyObject* PyFrame_GetVar(PyFrameObject *frame, PyObject *name) { PyObject *locals, *value; locals = PyFrame_GetLocals(frame); if (locals == NULL) { return NULL; } #if PY_VERSION_HEX >= 0x03000000 value = PyDict_GetItemWithError(locals, name); #else value = _PyDict_GetItemWithError(locals, name); #endif Py_DECREF(locals); if (value == NULL) { if (PyErr_Occurred()) { return NULL; } #if PY_VERSION_HEX >= 0x03000000 PyErr_Format(PyExc_NameError, "variable %R does not exist", name); #else PyErr_SetString(PyExc_NameError, "variable does not exist"); #endif return NULL; } return Py_NewRef(value); } #endif // gh-91248 added PyFrame_GetVarString() to Python 3.12.0a2 #if PY_VERSION_HEX < 0x030C00A2 && !defined(PYPY_VERSION) static inline PyObject* PyFrame_GetVarString(PyFrameObject *frame, const char *name) { PyObject *name_obj, *value; #if PY_VERSION_HEX >= 0x03000000 name_obj = PyUnicode_FromString(name); #else name_obj = PyString_FromString(name); #endif if (name_obj == NULL) { return NULL; } value = PyFrame_GetVar(frame, name_obj); Py_DECREF(name_obj); return value; } #endif // bpo-39947 added PyThreadState_GetInterpreter() to Python 3.9.0a5 #if PY_VERSION_HEX < 0x030900A5 || (defined(PYPY_VERSION) && PY_VERSION_HEX < 0x030B0000) static inline PyInterpreterState * PyThreadState_GetInterpreter(PyThreadState *tstate) { assert(tstate != _Py_NULL); return tstate->interp; } #endif // bpo-40429 added PyThreadState_GetFrame() to Python 3.9.0b1 #if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION) static inline PyFrameObject* PyThreadState_GetFrame(PyThreadState *tstate) { assert(tstate != _Py_NULL); return _Py_CAST(PyFrameObject *, Py_XNewRef(tstate->frame)); } #endif #if !defined(PYPY_VERSION) static inline PyFrameObject* _PyThreadState_GetFrameBorrow(PyThreadState *tstate) { PyFrameObject *frame = PyThreadState_GetFrame(tstate); Py_XDECREF(frame); return frame; } #endif // bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a5 #if PY_VERSION_HEX < 0x030900A5 || defined(PYPY_VERSION) static inline PyInterpreterState* PyInterpreterState_Get(void) { PyThreadState *tstate; PyInterpreterState *interp; tstate = PyThreadState_GET(); if (tstate == _Py_NULL) { Py_FatalError("GIL released (tstate is NULL)"); } interp = tstate->interp; if (interp == _Py_NULL) { Py_FatalError("no current interpreter"); } return interp; } #endif // bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a6 #if 0x030700A1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030900A6 && !defined(PYPY_VERSION) static inline uint64_t PyThreadState_GetID(PyThreadState *tstate) { assert(tstate != _Py_NULL); return tstate->id; } #endif // bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2 #if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) static inline void PyThreadState_EnterTracing(PyThreadState *tstate) { tstate->tracing++; #if PY_VERSION_HEX >= 0x030A00A1 tstate->cframe->use_tracing = 0; #else tstate->use_tracing = 0; #endif } #endif // bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2 #if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) static inline void PyThreadState_LeaveTracing(PyThreadState *tstate) { int use_tracing = (tstate->c_tracefunc != _Py_NULL || tstate->c_profilefunc != _Py_NULL); tstate->tracing--; #if PY_VERSION_HEX >= 0x030A00A1 tstate->cframe->use_tracing = use_tracing; #else tstate->use_tracing = use_tracing; #endif } #endif // bpo-37194 added PyObject_CallNoArgs() to Python 3.9.0a1 // PyObject_CallNoArgs() added to PyPy 3.9.16-v7.3.11 #if !defined(PyObject_CallNoArgs) && PY_VERSION_HEX < 0x030900A1 static inline PyObject* PyObject_CallNoArgs(PyObject *func) { return PyObject_CallFunctionObjArgs(func, NULL); } #endif // bpo-39245 made PyObject_CallOneArg() public (previously called // _PyObject_CallOneArg) in Python 3.9.0a4 // PyObject_CallOneArg() added to PyPy 3.9.16-v7.3.11 #if !defined(PyObject_CallOneArg) && PY_VERSION_HEX < 0x030900A4 static inline PyObject* PyObject_CallOneArg(PyObject *func, PyObject *arg) { return PyObject_CallFunctionObjArgs(func, arg, NULL); } #endif // bpo-1635741 added PyModule_AddObjectRef() to Python 3.10.0a3 #if PY_VERSION_HEX < 0x030A00A3 static inline int PyModule_AddObjectRef(PyObject *module, const char *name, PyObject *value) { int res; if (!value && !PyErr_Occurred()) { // PyModule_AddObject() raises TypeError in this case PyErr_SetString(PyExc_SystemError, "PyModule_AddObjectRef() must be called " "with an exception raised if value is NULL"); return -1; } Py_XINCREF(value); res = PyModule_AddObject(module, name, value); if (res < 0) { Py_XDECREF(value); } return res; } #endif // bpo-40024 added PyModule_AddType() to Python 3.9.0a5 #if PY_VERSION_HEX < 0x030900A5 static inline int PyModule_AddType(PyObject *module, PyTypeObject *type) { const char *name, *dot; if (PyType_Ready(type) < 0) { return -1; } // inline _PyType_Name() name = type->tp_name; assert(name != _Py_NULL); dot = strrchr(name, '.'); if (dot != _Py_NULL) { name = dot + 1; } return PyModule_AddObjectRef(module, name, _PyObject_CAST(type)); } #endif // bpo-40241 added PyObject_GC_IsTracked() to Python 3.9.0a6. // bpo-4688 added _PyObject_GC_IS_TRACKED() to Python 2.7.0a2. #if PY_VERSION_HEX < 0x030900A6 && !defined(PYPY_VERSION) static inline int PyObject_GC_IsTracked(PyObject* obj) { return (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)); } #endif // bpo-40241 added PyObject_GC_IsFinalized() to Python 3.9.0a6. // bpo-18112 added _PyGCHead_FINALIZED() to Python 3.4.0 final. #if PY_VERSION_HEX < 0x030900A6 && PY_VERSION_HEX >= 0x030400F0 && !defined(PYPY_VERSION) static inline int PyObject_GC_IsFinalized(PyObject *obj) { PyGC_Head *gc = _Py_CAST(PyGC_Head*, obj) - 1; return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED(gc)); } #endif // bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4 #if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE) static inline int _Py_IS_TYPE(PyObject *ob, PyTypeObject *type) { return Py_TYPE(ob) == type; } #define Py_IS_TYPE(ob, type) _Py_IS_TYPE(_PyObject_CAST(ob), type) #endif // bpo-46906 added PyFloat_Pack2() and PyFloat_Unpack2() to Python 3.11a7. // bpo-11734 added _PyFloat_Pack2() and _PyFloat_Unpack2() to Python 3.6.0b1. // Python 3.11a2 moved _PyFloat_Pack2() and _PyFloat_Unpack2() to the internal // C API: Python 3.11a2-3.11a6 versions are not supported. #if 0x030600B1 <= PY_VERSION_HEX && PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION) static inline int PyFloat_Pack2(double x, char *p, int le) { return _PyFloat_Pack2(x, (unsigned char*)p, le); } static inline double PyFloat_Unpack2(const char *p, int le) { return _PyFloat_Unpack2((const unsigned char *)p, le); } #endif // bpo-46906 added PyFloat_Pack4(), PyFloat_Pack8(), PyFloat_Unpack4() and // PyFloat_Unpack8() to Python 3.11a7. // Python 3.11a2 moved _PyFloat_Pack4(), _PyFloat_Pack8(), _PyFloat_Unpack4() // and _PyFloat_Unpack8() to the internal C API: Python 3.11a2-3.11a6 versions // are not supported. #if PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION) static inline int PyFloat_Pack4(double x, char *p, int le) { return _PyFloat_Pack4(x, (unsigned char*)p, le); } static inline int PyFloat_Pack8(double x, char *p, int le) { return _PyFloat_Pack8(x, (unsigned char*)p, le); } static inline double PyFloat_Unpack4(const char *p, int le) { return _PyFloat_Unpack4((const unsigned char *)p, le); } static inline double PyFloat_Unpack8(const char *p, int le) { return _PyFloat_Unpack8((const unsigned char *)p, le); } #endif // gh-92154 added PyCode_GetCode() to Python 3.11.0b1 #if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION) static inline PyObject* PyCode_GetCode(PyCodeObject *code) { return Py_NewRef(code->co_code); } #endif // gh-95008 added PyCode_GetVarnames() to Python 3.11.0rc1 #if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) static inline PyObject* PyCode_GetVarnames(PyCodeObject *code) { return Py_NewRef(code->co_varnames); } #endif // gh-95008 added PyCode_GetFreevars() to Python 3.11.0rc1 #if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) static inline PyObject* PyCode_GetFreevars(PyCodeObject *code) { return Py_NewRef(code->co_freevars); } #endif // gh-95008 added PyCode_GetCellvars() to Python 3.11.0rc1 #if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) static inline PyObject* PyCode_GetCellvars(PyCodeObject *code) { return Py_NewRef(code->co_cellvars); } #endif // Py_UNUSED() was added to Python 3.4.0b2. #if PY_VERSION_HEX < 0x030400B2 && !defined(Py_UNUSED) # if defined(__GNUC__) || defined(__clang__) # define Py_UNUSED(name) _unused_ ## name __attribute__((unused)) # else # define Py_UNUSED(name) _unused_ ## name # endif #endif // gh-105922 added PyImport_AddModuleRef() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A0 static inline PyObject* PyImport_AddModuleRef(const char *name) { return Py_XNewRef(PyImport_AddModule(name)); } #endif // gh-105927 added PyWeakref_GetRef() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D0000 static inline int PyWeakref_GetRef(PyObject *ref, PyObject **pobj) { PyObject *obj; if (ref != NULL && !PyWeakref_Check(ref)) { *pobj = NULL; PyErr_SetString(PyExc_TypeError, "expected a weakref"); return -1; } obj = PyWeakref_GetObject(ref); if (obj == NULL) { // SystemError if ref is NULL *pobj = NULL; return -1; } if (obj == Py_None) { *pobj = NULL; return 0; } *pobj = Py_NewRef(obj); return 1; } #endif // bpo-36974 added PY_VECTORCALL_ARGUMENTS_OFFSET to Python 3.8b1 #ifndef PY_VECTORCALL_ARGUMENTS_OFFSET # define PY_VECTORCALL_ARGUMENTS_OFFSET (_Py_CAST(size_t, 1) << (8 * sizeof(size_t) - 1)) #endif // bpo-36974 added PyVectorcall_NARGS() to Python 3.8b1 #if PY_VERSION_HEX < 0x030800B1 static inline Py_ssize_t PyVectorcall_NARGS(size_t n) { return n & ~PY_VECTORCALL_ARGUMENTS_OFFSET; } #endif // gh-105922 added PyObject_Vectorcall() to Python 3.9.0a4 #if PY_VERSION_HEX < 0x030900A4 static inline PyObject* PyObject_Vectorcall(PyObject *callable, PyObject *const *args, size_t nargsf, PyObject *kwnames) { #if PY_VERSION_HEX >= 0x030800B1 && !defined(PYPY_VERSION) // bpo-36974 added _PyObject_Vectorcall() to Python 3.8.0b1 return _PyObject_Vectorcall(callable, args, nargsf, kwnames); #else PyObject *posargs = NULL, *kwargs = NULL; PyObject *res; Py_ssize_t nposargs, nkwargs, i; if (nargsf != 0 && args == NULL) { PyErr_BadInternalCall(); goto error; } if (kwnames != NULL && !PyTuple_Check(kwnames)) { PyErr_BadInternalCall(); goto error; } nposargs = (Py_ssize_t)PyVectorcall_NARGS(nargsf); if (kwnames) { nkwargs = PyTuple_GET_SIZE(kwnames); } else { nkwargs = 0; } posargs = PyTuple_New(nposargs); if (posargs == NULL) { goto error; } if (nposargs) { for (i=0; i < nposargs; i++) { PyTuple_SET_ITEM(posargs, i, Py_NewRef(*args)); args++; } } if (nkwargs) { kwargs = PyDict_New(); if (kwargs == NULL) { goto error; } for (i = 0; i < nkwargs; i++) { PyObject *key = PyTuple_GET_ITEM(kwnames, i); PyObject *value = *args; args++; if (PyDict_SetItem(kwargs, key, value) < 0) { goto error; } } } else { kwargs = NULL; } res = PyObject_Call(callable, posargs, kwargs); Py_DECREF(posargs); Py_XDECREF(kwargs); return res; error: Py_DECREF(posargs); Py_XDECREF(kwargs); return NULL; #endif } #endif // gh-106521 added PyObject_GetOptionalAttr() and // PyObject_GetOptionalAttrString() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyObject_GetOptionalAttr(PyObject *obj, PyObject *attr_name, PyObject **result) { // bpo-32571 added _PyObject_LookupAttr() to Python 3.7.0b1 #if PY_VERSION_HEX >= 0x030700B1 && !defined(PYPY_VERSION) return _PyObject_LookupAttr(obj, attr_name, result); #else *result = PyObject_GetAttr(obj, attr_name); if (*result != NULL) { return 1; } if (!PyErr_Occurred()) { return 0; } if (PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Clear(); return 0; } return -1; #endif } static inline int PyObject_GetOptionalAttrString(PyObject *obj, const char *attr_name, PyObject **result) { PyObject *name_obj; int rc; #if PY_VERSION_HEX >= 0x03000000 name_obj = PyUnicode_FromString(attr_name); #else name_obj = PyString_FromString(attr_name); #endif if (name_obj == NULL) { *result = NULL; return -1; } rc = PyObject_GetOptionalAttr(obj, name_obj, result); Py_DECREF(name_obj); return rc; } #endif // gh-106307 added PyObject_GetOptionalAttr() and // PyMapping_GetOptionalItemString() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyMapping_GetOptionalItem(PyObject *obj, PyObject *key, PyObject **result) { *result = PyObject_GetItem(obj, key); if (*result) { return 1; } if (!PyErr_ExceptionMatches(PyExc_KeyError)) { return -1; } PyErr_Clear(); return 0; } static inline int PyMapping_GetOptionalItemString(PyObject *obj, const char *key, PyObject **result) { PyObject *key_obj; int rc; #if PY_VERSION_HEX >= 0x03000000 key_obj = PyUnicode_FromString(key); #else key_obj = PyString_FromString(key); #endif if (key_obj == NULL) { *result = NULL; return -1; } rc = PyMapping_GetOptionalItem(obj, key_obj, result); Py_DECREF(key_obj); return rc; } #endif // gh-108511 added PyMapping_HasKeyWithError() and // PyMapping_HasKeyStringWithError() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyMapping_HasKeyWithError(PyObject *obj, PyObject *key) { PyObject *res; int rc = PyMapping_GetOptionalItem(obj, key, &res); Py_XDECREF(res); return rc; } static inline int PyMapping_HasKeyStringWithError(PyObject *obj, const char *key) { PyObject *res; int rc = PyMapping_GetOptionalItemString(obj, key, &res); Py_XDECREF(res); return rc; } #endif // gh-108511 added PyObject_HasAttrWithError() and // PyObject_HasAttrStringWithError() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyObject_HasAttrWithError(PyObject *obj, PyObject *attr) { PyObject *res; int rc = PyObject_GetOptionalAttr(obj, attr, &res); Py_XDECREF(res); return rc; } static inline int PyObject_HasAttrStringWithError(PyObject *obj, const char *attr) { PyObject *res; int rc = PyObject_GetOptionalAttrString(obj, attr, &res); Py_XDECREF(res); return rc; } #endif // gh-106004 added PyDict_GetItemRef() and PyDict_GetItemStringRef() // to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyDict_GetItemRef(PyObject *mp, PyObject *key, PyObject **result) { #if PY_VERSION_HEX >= 0x03000000 PyObject *item = PyDict_GetItemWithError(mp, key); #else PyObject *item = _PyDict_GetItemWithError(mp, key); #endif if (item != NULL) { *result = Py_NewRef(item); return 1; // found } if (!PyErr_Occurred()) { *result = NULL; return 0; // not found } *result = NULL; return -1; } static inline int PyDict_GetItemStringRef(PyObject *mp, const char *key, PyObject **result) { int res; #if PY_VERSION_HEX >= 0x03000000 PyObject *key_obj = PyUnicode_FromString(key); #else PyObject *key_obj = PyString_FromString(key); #endif if (key_obj == NULL) { *result = NULL; return -1; } res = PyDict_GetItemRef(mp, key_obj, result); Py_DECREF(key_obj); return res; } #endif // gh-106307 added PyModule_Add() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyModule_Add(PyObject *mod, const char *name, PyObject *value) { int res = PyModule_AddObjectRef(mod, name, value); Py_XDECREF(value); return res; } #endif // gh-108014 added Py_IsFinalizing() to Python 3.13.0a1 // bpo-1856 added _Py_Finalizing to Python 3.2.1b1. // _Py_IsFinalizing() was added to PyPy 7.3.0. #if (0x030201B1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030D00A1) \ && (!defined(PYPY_VERSION_NUM) || PYPY_VERSION_NUM >= 0x7030000) static inline int Py_IsFinalizing(void) { #if PY_VERSION_HEX >= 0x030700A1 // _Py_IsFinalizing() was added to Python 3.7.0a1. return _Py_IsFinalizing(); #else return (_Py_Finalizing != NULL); #endif } #endif // gh-108323 added PyDict_ContainsString() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyDict_ContainsString(PyObject *op, const char *key) { PyObject *key_obj = PyUnicode_FromString(key); if (key_obj == NULL) { return -1; } int res = PyDict_Contains(op, key_obj); Py_DECREF(key_obj); return res; } #endif // gh-108445 added PyLong_AsInt() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyLong_AsInt(PyObject *obj) { #ifdef PYPY_VERSION long value = PyLong_AsLong(obj); if (value == -1 && PyErr_Occurred()) { return -1; } if (value < (long)INT_MIN || (long)INT_MAX < value) { PyErr_SetString(PyExc_OverflowError, "Python int too large to convert to C int"); return -1; } return (int)value; #else return _PyLong_AsInt(obj); #endif } #endif // gh-107073 added PyObject_VisitManagedDict() to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyObject_VisitManagedDict(PyObject *obj, visitproc visit, void *arg) { PyObject **dict = _PyObject_GetDictPtr(obj); if (dict == NULL || *dict == NULL) { return -1; } Py_VISIT(*dict); return 0; } static inline void PyObject_ClearManagedDict(PyObject *obj) { PyObject **dict = _PyObject_GetDictPtr(obj); if (dict == NULL || *dict == NULL) { return; } Py_CLEAR(*dict); } #endif // gh-108867 added PyThreadState_GetUnchecked() to Python 3.13.0a1 // Python 3.5.2 added _PyThreadState_UncheckedGet(). #if PY_VERSION_HEX >= 0x03050200 && PY_VERSION_HEX < 0x030D00A1 static inline PyThreadState* PyThreadState_GetUnchecked(void) { return _PyThreadState_UncheckedGet(); } #endif // gh-110289 added PyUnicode_EqualToUTF8() and PyUnicode_EqualToUTF8AndSize() // to Python 3.13.0a1 #if PY_VERSION_HEX < 0x030D00A1 static inline int PyUnicode_EqualToUTF8AndSize(PyObject *unicode, const char *str, Py_ssize_t str_len) { Py_ssize_t len; const void *utf8; PyObject *exc_type, *exc_value, *exc_tb; int res; // API cannot report errors so save/restore the exception PyErr_Fetch(&exc_type, &exc_value, &exc_tb); // Python 3.3.0a1 added PyUnicode_AsUTF8AndSize() #if PY_VERSION_HEX >= 0x030300A1 if (PyUnicode_IS_ASCII(unicode)) { utf8 = PyUnicode_DATA(unicode); len = PyUnicode_GET_LENGTH(unicode); } else { utf8 = PyUnicode_AsUTF8AndSize(unicode, &len); if (utf8 == NULL) { // Memory allocation failure. The API cannot report error, // so ignore the exception and return 0. res = 0; goto done; } } if (len != str_len) { res = 0; goto done; } res = (memcmp(utf8, str, (size_t)len) == 0); #else PyObject *bytes = PyUnicode_AsUTF8String(unicode); if (bytes == NULL) { // Memory allocation failure. The API cannot report error, // so ignore the exception and return 0. res = 0; goto done; } #if PY_VERSION_HEX >= 0x03000000 len = PyBytes_GET_SIZE(bytes); utf8 = PyBytes_AS_STRING(bytes); #else len = PyString_GET_SIZE(bytes); utf8 = PyString_AS_STRING(bytes); #endif if (len != str_len) { Py_DECREF(bytes); res = 0; goto done; } res = (memcmp(utf8, str, (size_t)len) == 0); Py_DECREF(bytes); #endif done: PyErr_Restore(exc_type, exc_value, exc_tb); return res; } static inline int PyUnicode_EqualToUTF8(PyObject *unicode, const char *str) { return PyUnicode_EqualToUTF8AndSize(unicode, str, (Py_ssize_t)strlen(str)); } #endif // gh-111138 added PyList_Extend() and PyList_Clear() to Python 3.13.0a2 #if PY_VERSION_HEX < 0x030D00A2 static inline int PyList_Extend(PyObject *list, PyObject *iterable) { return PyList_SetSlice(list, PY_SSIZE_T_MAX, PY_SSIZE_T_MAX, iterable); } static inline int PyList_Clear(PyObject *list) { return PyList_SetSlice(list, 0, PY_SSIZE_T_MAX, NULL); } #endif // gh-111262 added PyDict_Pop() and PyDict_PopString() to Python 3.13.0a2 #if PY_VERSION_HEX < 0x030D00A2 static inline int PyDict_Pop(PyObject *dict, PyObject *key, PyObject **result) { PyObject *value; if (!PyDict_Check(dict)) { PyErr_BadInternalCall(); if (result) { *result = NULL; } return -1; } // bpo-16991 added _PyDict_Pop() to Python 3.5.0b2. // Python 3.6.0b3 changed _PyDict_Pop() first argument type to PyObject*. // Python 3.13.0a1 removed _PyDict_Pop(). #if defined(PYPY_VERSION) || PY_VERSION_HEX < 0x030500b2 || PY_VERSION_HEX >= 0x030D0000 value = PyObject_CallMethod(dict, "pop", "O", key); #elif PY_VERSION_HEX < 0x030600b3 value = _PyDict_Pop(_Py_CAST(PyDictObject*, dict), key, NULL); #else value = _PyDict_Pop(dict, key, NULL); #endif if (value == NULL) { if (result) { *result = NULL; } if (PyErr_Occurred() && !PyErr_ExceptionMatches(PyExc_KeyError)) { return -1; } PyErr_Clear(); return 0; } if (result) { *result = value; } else { Py_DECREF(value); } return 1; } static inline int PyDict_PopString(PyObject *dict, const char *key, PyObject **result) { PyObject *key_obj = PyUnicode_FromString(key); if (key_obj == NULL) { if (result != NULL) { *result = NULL; } return -1; } int res = PyDict_Pop(dict, key_obj, result); Py_DECREF(key_obj); return res; } #endif #if PY_VERSION_HEX < 0x030200A4 // Python 3.2.0a4 added Py_hash_t type typedef Py_ssize_t Py_hash_t; #endif // gh-111545 added Py_HashPointer() to Python 3.13.0a3 #if PY_VERSION_HEX < 0x030D00A3 static inline Py_hash_t Py_HashPointer(const void *ptr) { #if PY_VERSION_HEX >= 0x030900A4 && !defined(PYPY_VERSION) return _Py_HashPointer(ptr); #else return _Py_HashPointer(_Py_CAST(void*, ptr)); #endif } #endif // Python 3.13a4 added a PyTime API. // Use the private API added to Python 3.5. #if PY_VERSION_HEX < 0x030D00A4 && PY_VERSION_HEX >= 0x03050000 typedef _PyTime_t PyTime_t; #define PyTime_MIN _PyTime_MIN #define PyTime_MAX _PyTime_MAX static inline double PyTime_AsSecondsDouble(PyTime_t t) { return _PyTime_AsSecondsDouble(t); } static inline int PyTime_Monotonic(PyTime_t *result) { return _PyTime_GetMonotonicClockWithInfo(result, NULL); } static inline int PyTime_Time(PyTime_t *result) { return _PyTime_GetSystemClockWithInfo(result, NULL); } static inline int PyTime_PerfCounter(PyTime_t *result) { #if PY_VERSION_HEX >= 0x03070000 && !defined(PYPY_VERSION) return _PyTime_GetPerfCounterWithInfo(result, NULL); #elif PY_VERSION_HEX >= 0x03070000 // Call time.perf_counter_ns() and convert Python int object to PyTime_t. // Cache time.perf_counter_ns() function for best performance. static PyObject *func = NULL; if (func == NULL) { PyObject *mod = PyImport_ImportModule("time"); if (mod == NULL) { return -1; } func = PyObject_GetAttrString(mod, "perf_counter_ns"); Py_DECREF(mod); if (func == NULL) { return -1; } } PyObject *res = PyObject_CallNoArgs(func); if (res == NULL) { return -1; } long long value = PyLong_AsLongLong(res); Py_DECREF(res); if (value == -1 && PyErr_Occurred()) { return -1; } Py_BUILD_ASSERT(sizeof(value) >= sizeof(PyTime_t)); *result = (PyTime_t)value; return 0; #else // Call time.perf_counter() and convert C double to PyTime_t. // Cache time.perf_counter() function for best performance. static PyObject *func = NULL; if (func == NULL) { PyObject *mod = PyImport_ImportModule("time"); if (mod == NULL) { return -1; } func = PyObject_GetAttrString(mod, "perf_counter"); Py_DECREF(mod); if (func == NULL) { return -1; } } PyObject *res = PyObject_CallNoArgs(func); if (res == NULL) { return -1; } double d = PyFloat_AsDouble(res); Py_DECREF(res); if (d == -1.0 && PyErr_Occurred()) { return -1; } // Avoid floor() to avoid having to link to libm *result = (PyTime_t)(d * 1e9); return 0; #endif } #endif // gh-111389 added hash constants to Python 3.13.0a5. These constants were // added first as private macros to Python 3.4.0b1 and PyPy 7.3.8. #if (!defined(PyHASH_BITS) \ && ((!defined(PYPY_VERSION) && PY_VERSION_HEX >= 0x030400B1) \ || (defined(PYPY_VERSION) && PY_VERSION_HEX >= 0x03070000 \ && PYPY_VERSION_NUM >= 0x07030800))) # define PyHASH_BITS _PyHASH_BITS # define PyHASH_MODULUS _PyHASH_MODULUS # define PyHASH_INF _PyHASH_INF # define PyHASH_IMAG _PyHASH_IMAG #endif // gh-111545 added Py_GetConstant() and Py_GetConstantBorrowed() // to Python 3.13.0a6 #if PY_VERSION_HEX < 0x030D00A6 && !defined(Py_CONSTANT_NONE) #define Py_CONSTANT_NONE 0 #define Py_CONSTANT_FALSE 1 #define Py_CONSTANT_TRUE 2 #define Py_CONSTANT_ELLIPSIS 3 #define Py_CONSTANT_NOT_IMPLEMENTED 4 #define Py_CONSTANT_ZERO 5 #define Py_CONSTANT_ONE 6 #define Py_CONSTANT_EMPTY_STR 7 #define Py_CONSTANT_EMPTY_BYTES 8 #define Py_CONSTANT_EMPTY_TUPLE 9 static inline PyObject* Py_GetConstant(unsigned int constant_id) { static PyObject* constants[Py_CONSTANT_EMPTY_TUPLE + 1] = {NULL}; if (constants[Py_CONSTANT_NONE] == NULL) { constants[Py_CONSTANT_NONE] = Py_None; constants[Py_CONSTANT_FALSE] = Py_False; constants[Py_CONSTANT_TRUE] = Py_True; constants[Py_CONSTANT_ELLIPSIS] = Py_Ellipsis; constants[Py_CONSTANT_NOT_IMPLEMENTED] = Py_NotImplemented; constants[Py_CONSTANT_ZERO] = PyLong_FromLong(0); if (constants[Py_CONSTANT_ZERO] == NULL) { goto fatal_error; } constants[Py_CONSTANT_ONE] = PyLong_FromLong(1); if (constants[Py_CONSTANT_ONE] == NULL) { goto fatal_error; } constants[Py_CONSTANT_EMPTY_STR] = PyUnicode_FromStringAndSize("", 0); if (constants[Py_CONSTANT_EMPTY_STR] == NULL) { goto fatal_error; } constants[Py_CONSTANT_EMPTY_BYTES] = PyBytes_FromStringAndSize("", 0); if (constants[Py_CONSTANT_EMPTY_BYTES] == NULL) { goto fatal_error; } constants[Py_CONSTANT_EMPTY_TUPLE] = PyTuple_New(0); if (constants[Py_CONSTANT_EMPTY_TUPLE] == NULL) { goto fatal_error; } // goto dance to avoid compiler warnings about Py_FatalError() goto init_done; fatal_error: // This case should never happen Py_FatalError("Py_GetConstant() failed to get constants"); } init_done: if (constant_id <= Py_CONSTANT_EMPTY_TUPLE) { return Py_NewRef(constants[constant_id]); } else { PyErr_BadInternalCall(); return NULL; } } static inline PyObject* Py_GetConstantBorrowed(unsigned int constant_id) { PyObject *obj = Py_GetConstant(constant_id); Py_XDECREF(obj); return obj; } #endif // gh-114329 added PyList_GetItemRef() to Python 3.13.0a4 #if PY_VERSION_HEX < 0x030D00A4 static inline PyObject * PyList_GetItemRef(PyObject *op, Py_ssize_t index) { PyObject *item = PyList_GetItem(op, index); Py_XINCREF(item); return item; } #endif // gh-114329 added PyList_GetItemRef() to Python 3.13.0a4 #if PY_VERSION_HEX < 0x030D00A4 static inline int PyDict_SetDefaultRef(PyObject *d, PyObject *key, PyObject *default_value, PyObject **result) { PyObject *value; if (PyDict_GetItemRef(d, key, &value) < 0) { // get error if (result) { *result = NULL; } return -1; } if (value != NULL) { // present if (result) { *result = value; } else { Py_DECREF(value); } return 1; } // missing: set the item if (PyDict_SetItem(d, key, default_value) < 0) { // set error if (result) { *result = NULL; } return -1; } if (result) { *result = Py_NewRef(default_value); } return 0; } #endif #if PY_VERSION_HEX < 0x030D00B3 # define Py_BEGIN_CRITICAL_SECTION(op) { # define Py_END_CRITICAL_SECTION() } # define Py_BEGIN_CRITICAL_SECTION2(a, b) { # define Py_END_CRITICAL_SECTION2() } #endif #if PY_VERSION_HEX < 0x030E0000 && PY_VERSION_HEX >= 0x03060000 && !defined(PYPY_VERSION) typedef struct PyUnicodeWriter PyUnicodeWriter; static inline void PyUnicodeWriter_Discard(PyUnicodeWriter *writer) { _PyUnicodeWriter_Dealloc((_PyUnicodeWriter*)writer); PyMem_Free(writer); } static inline PyUnicodeWriter* PyUnicodeWriter_Create(Py_ssize_t length) { if (length < 0) { PyErr_SetString(PyExc_ValueError, "length must be positive"); return NULL; } const size_t size = sizeof(_PyUnicodeWriter); PyUnicodeWriter *pub_writer = (PyUnicodeWriter *)PyMem_Malloc(size); if (pub_writer == _Py_NULL) { PyErr_NoMemory(); return _Py_NULL; } _PyUnicodeWriter *writer = (_PyUnicodeWriter *)pub_writer; _PyUnicodeWriter_Init(writer); if (_PyUnicodeWriter_Prepare(writer, length, 127) < 0) { PyUnicodeWriter_Discard(pub_writer); return NULL; } writer->overallocate = 1; return pub_writer; } static inline PyObject* PyUnicodeWriter_Finish(PyUnicodeWriter *writer) { PyObject *str = _PyUnicodeWriter_Finish((_PyUnicodeWriter*)writer); assert(((_PyUnicodeWriter*)writer)->buffer == NULL); PyMem_Free(writer); return str; } static inline int PyUnicodeWriter_WriteChar(PyUnicodeWriter *writer, Py_UCS4 ch) { if (ch > 0x10ffff) { PyErr_SetString(PyExc_ValueError, "character must be in range(0x110000)"); return -1; } return _PyUnicodeWriter_WriteChar((_PyUnicodeWriter*)writer, ch); } static inline int PyUnicodeWriter_WriteStr(PyUnicodeWriter *writer, PyObject *obj) { PyObject *str = PyObject_Str(obj); if (str == NULL) { return -1; } int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str); Py_DECREF(str); return res; } static inline int PyUnicodeWriter_WriteRepr(PyUnicodeWriter *writer, PyObject *obj) { PyObject *str = PyObject_Repr(obj); if (str == NULL) { return -1; } int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str); Py_DECREF(str); return res; } static inline int PyUnicodeWriter_WriteUTF8(PyUnicodeWriter *writer, const char *str, Py_ssize_t size) { if (size < 0) { size = (Py_ssize_t)strlen(str); } PyObject *str_obj = PyUnicode_FromStringAndSize(str, size); if (str_obj == _Py_NULL) { return -1; } int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj); Py_DECREF(str_obj); return res; } static inline int PyUnicodeWriter_WriteWideChar(PyUnicodeWriter *writer, const wchar_t *str, Py_ssize_t size) { if (size < 0) { size = (Py_ssize_t)wcslen(str); } PyObject *str_obj = PyUnicode_FromWideChar(str, size); if (str_obj == _Py_NULL) { return -1; } int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj); Py_DECREF(str_obj); return res; } static inline int PyUnicodeWriter_WriteSubstring(PyUnicodeWriter *writer, PyObject *str, Py_ssize_t start, Py_ssize_t end) { if (!PyUnicode_Check(str)) { PyErr_Format(PyExc_TypeError, "expect str, not %T", str); return -1; } if (start < 0 || start > end) { PyErr_Format(PyExc_ValueError, "invalid start argument"); return -1; } if (end > PyUnicode_GET_LENGTH(str)) { PyErr_Format(PyExc_ValueError, "invalid end argument"); return -1; } return _PyUnicodeWriter_WriteSubstring((_PyUnicodeWriter*)writer, str, start, end); } static inline int PyUnicodeWriter_Format(PyUnicodeWriter *writer, const char *format, ...) { va_list vargs; va_start(vargs, format); PyObject *str = PyUnicode_FromFormatV(format, vargs); va_end(vargs); if (str == _Py_NULL) { return -1; } int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str); Py_DECREF(str); return res; } #endif // PY_VERSION_HEX < 0x030E0000 // gh-116560 added PyLong_GetSign() to Python 3.14.0a0 #if PY_VERSION_HEX < 0x030E00A0 static inline int PyLong_GetSign(PyObject *obj, int *sign) { if (!PyLong_Check(obj)) { PyErr_Format(PyExc_TypeError, "expect int, got %s", Py_TYPE(obj)->tp_name); return -1; } *sign = _PyLong_Sign(obj); return 0; } #endif // gh-126061 added PyLong_IsPositive/Negative/Zero() to Python in 3.14.0a2 #if PY_VERSION_HEX < 0x030E00A2 static inline int PyLong_IsPositive(PyObject *obj) { if (!PyLong_Check(obj)) { PyErr_Format(PyExc_TypeError, "expected int, got %s", Py_TYPE(obj)->tp_name); return -1; } return _PyLong_Sign(obj) == 1; } static inline int PyLong_IsNegative(PyObject *obj) { if (!PyLong_Check(obj)) { PyErr_Format(PyExc_TypeError, "expected int, got %s", Py_TYPE(obj)->tp_name); return -1; } return _PyLong_Sign(obj) == -1; } static inline int PyLong_IsZero(PyObject *obj) { if (!PyLong_Check(obj)) { PyErr_Format(PyExc_TypeError, "expected int, got %s", Py_TYPE(obj)->tp_name); return -1; } return _PyLong_Sign(obj) == 0; } #endif // gh-124502 added PyUnicode_Equal() to Python 3.14.0a0 #if PY_VERSION_HEX < 0x030E00A0 static inline int PyUnicode_Equal(PyObject *str1, PyObject *str2) { if (!PyUnicode_Check(str1)) { PyErr_Format(PyExc_TypeError, "first argument must be str, not %s", Py_TYPE(str1)->tp_name); return -1; } if (!PyUnicode_Check(str2)) { PyErr_Format(PyExc_TypeError, "second argument must be str, not %s", Py_TYPE(str2)->tp_name); return -1; } #if PY_VERSION_HEX >= 0x030d0000 && !defined(PYPY_VERSION) PyAPI_FUNC(int) _PyUnicode_Equal(PyObject *str1, PyObject *str2); return _PyUnicode_Equal(str1, str2); #elif PY_VERSION_HEX >= 0x03060000 && !defined(PYPY_VERSION) return _PyUnicode_EQ(str1, str2); #elif PY_VERSION_HEX >= 0x03090000 && defined(PYPY_VERSION) return _PyUnicode_EQ(str1, str2); #else return (PyUnicode_Compare(str1, str2) == 0); #endif } #endif // gh-121645 added PyBytes_Join() to Python 3.14.0a0 #if PY_VERSION_HEX < 0x030E00A0 static inline PyObject* PyBytes_Join(PyObject *sep, PyObject *iterable) { return _PyBytes_Join(sep, iterable); } #endif #if PY_VERSION_HEX < 0x030E00A0 static inline Py_hash_t Py_HashBuffer(const void *ptr, Py_ssize_t len) { #if PY_VERSION_HEX >= 0x03000000 && !defined(PYPY_VERSION) PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void *src, Py_ssize_t len); return _Py_HashBytes(ptr, len); #else Py_hash_t hash; PyObject *bytes = PyBytes_FromStringAndSize((const char*)ptr, len); if (bytes == NULL) { return -1; } hash = PyObject_Hash(bytes); Py_DECREF(bytes); return hash; #endif } #endif #if PY_VERSION_HEX < 0x030E00A0 static inline int PyIter_NextItem(PyObject *iter, PyObject **item) { iternextfunc tp_iternext; assert(iter != NULL); assert(item != NULL); tp_iternext = Py_TYPE(iter)->tp_iternext; if (tp_iternext == NULL) { *item = NULL; PyErr_Format(PyExc_TypeError, "expected an iterator, got '%s'", Py_TYPE(iter)->tp_name); return -1; } if ((*item = tp_iternext(iter))) { return 1; } if (!PyErr_Occurred()) { return 0; } if (PyErr_ExceptionMatches(PyExc_StopIteration)) { PyErr_Clear(); return 0; } return -1; } #endif #if PY_VERSION_HEX < 0x030E00A0 static inline PyObject* PyLong_FromInt32(int32_t value) { Py_BUILD_ASSERT(sizeof(long) >= 4); return PyLong_FromLong(value); } static inline PyObject* PyLong_FromInt64(int64_t value) { Py_BUILD_ASSERT(sizeof(long long) >= 8); return PyLong_FromLongLong(value); } static inline PyObject* PyLong_FromUInt32(uint32_t value) { Py_BUILD_ASSERT(sizeof(unsigned long) >= 4); return PyLong_FromUnsignedLong(value); } static inline PyObject* PyLong_FromUInt64(uint64_t value) { Py_BUILD_ASSERT(sizeof(unsigned long long) >= 8); return PyLong_FromUnsignedLongLong(value); } static inline int PyLong_AsInt32(PyObject *obj, int32_t *pvalue) { Py_BUILD_ASSERT(sizeof(int) == 4); int value = PyLong_AsInt(obj); if (value == -1 && PyErr_Occurred()) { return -1; } *pvalue = (int32_t)value; return 0; } static inline int PyLong_AsInt64(PyObject *obj, int64_t *pvalue) { Py_BUILD_ASSERT(sizeof(long long) == 8); long long value = PyLong_AsLongLong(obj); if (value == -1 && PyErr_Occurred()) { return -1; } *pvalue = (int64_t)value; return 0; } static inline int PyLong_AsUInt32(PyObject *obj, uint32_t *pvalue) { Py_BUILD_ASSERT(sizeof(long) >= 4); unsigned long value = PyLong_AsUnsignedLong(obj); if (value == (unsigned long)-1 && PyErr_Occurred()) { return -1; } #if SIZEOF_LONG > 4 if ((unsigned long)UINT32_MAX < value) { PyErr_SetString(PyExc_OverflowError, "Python int too large to convert to C uint32_t"); return -1; } #endif *pvalue = (uint32_t)value; return 0; } static inline int PyLong_AsUInt64(PyObject *obj, uint64_t *pvalue) { Py_BUILD_ASSERT(sizeof(long long) == 8); unsigned long long value = PyLong_AsUnsignedLongLong(obj); if (value == (unsigned long long)-1 && PyErr_Occurred()) { return -1; } *pvalue = (uint64_t)value; return 0; } #endif // gh-102471 added import and export API for integers to 3.14.0a2. #if PY_VERSION_HEX < 0x030E00A2 && PY_VERSION_HEX >= 0x03000000 && !defined(PYPY_VERSION) // Helpers to access PyLongObject internals. static inline void _PyLong_SetSignAndDigitCount(PyLongObject *op, int sign, Py_ssize_t size) { #if PY_VERSION_HEX >= 0x030C0000 op->long_value.lv_tag = (uintptr_t)(1 - sign) | ((uintptr_t)(size) << 3); #elif PY_VERSION_HEX >= 0x030900A4 Py_SET_SIZE(op, sign * size); #else Py_SIZE(op) = sign * size; #endif } static inline Py_ssize_t _PyLong_DigitCount(const PyLongObject *op) { #if PY_VERSION_HEX >= 0x030C0000 return (Py_ssize_t)(op->long_value.lv_tag >> 3); #else return _PyLong_Sign((PyObject*)op) < 0 ? -Py_SIZE(op) : Py_SIZE(op); #endif } static inline digit* _PyLong_GetDigits(const PyLongObject *op) { #if PY_VERSION_HEX >= 0x030C0000 return (digit*)(op->long_value.ob_digit); #else return (digit*)(op->ob_digit); #endif } typedef struct PyLongLayout { uint8_t bits_per_digit; uint8_t digit_size; int8_t digits_order; int8_t digit_endianness; } PyLongLayout; typedef struct PyLongExport { int64_t value; uint8_t negative; Py_ssize_t ndigits; const void *digits; Py_uintptr_t _reserved; } PyLongExport; typedef struct PyLongWriter PyLongWriter; static inline const PyLongLayout* PyLong_GetNativeLayout(void) { static const PyLongLayout PyLong_LAYOUT = { PyLong_SHIFT, sizeof(digit), -1, // least significant first PY_LITTLE_ENDIAN ? -1 : 1, }; return &PyLong_LAYOUT; } static inline int PyLong_Export(PyObject *obj, PyLongExport *export_long) { if (!PyLong_Check(obj)) { memset(export_long, 0, sizeof(*export_long)); PyErr_Format(PyExc_TypeError, "expected int, got %s", Py_TYPE(obj)->tp_name); return -1; } // Fast-path: try to convert to a int64_t PyLongObject *self = (PyLongObject*)obj; int overflow; #if SIZEOF_LONG == 8 long value = PyLong_AsLongAndOverflow(obj, &overflow); #else // Windows has 32-bit long, so use 64-bit long long instead long long value = PyLong_AsLongLongAndOverflow(obj, &overflow); #endif Py_BUILD_ASSERT(sizeof(value) == sizeof(int64_t)); // the function cannot fail since obj is a PyLongObject assert(!(value == -1 && PyErr_Occurred())); if (!overflow) { export_long->value = value; export_long->negative = 0; export_long->ndigits = 0; export_long->digits = 0; export_long->_reserved = 0; } else { export_long->value = 0; export_long->negative = _PyLong_Sign(obj) < 0; export_long->ndigits = _PyLong_DigitCount(self); if (export_long->ndigits == 0) { export_long->ndigits = 1; } export_long->digits = _PyLong_GetDigits(self); export_long->_reserved = (Py_uintptr_t)Py_NewRef(obj); } return 0; } static inline void PyLong_FreeExport(PyLongExport *export_long) { PyObject *obj = (PyObject*)export_long->_reserved; if (obj) { export_long->_reserved = 0; Py_DECREF(obj); } } static inline PyLongWriter* PyLongWriter_Create(int negative, Py_ssize_t ndigits, void **digits) { if (ndigits <= 0) { PyErr_SetString(PyExc_ValueError, "ndigits must be positive"); return NULL; } assert(digits != NULL); PyLongObject *obj = _PyLong_New(ndigits); if (obj == NULL) { return NULL; } _PyLong_SetSignAndDigitCount(obj, negative?-1:1, ndigits); *digits = _PyLong_GetDigits(obj); return (PyLongWriter*)obj; } static inline void PyLongWriter_Discard(PyLongWriter *writer) { PyLongObject *obj = (PyLongObject *)writer; assert(Py_REFCNT(obj) == 1); Py_DECREF(obj); } static inline PyObject* PyLongWriter_Finish(PyLongWriter *writer) { PyObject *obj = (PyObject *)writer; PyLongObject *self = (PyLongObject*)obj; Py_ssize_t j = _PyLong_DigitCount(self); Py_ssize_t i = j; int sign = _PyLong_Sign(obj); assert(Py_REFCNT(obj) == 1); // Normalize and get singleton if possible while (i > 0 && _PyLong_GetDigits(self)[i-1] == 0) { --i; } if (i != j) { if (i == 0) { sign = 0; } _PyLong_SetSignAndDigitCount(self, sign, i); } if (i <= 1) { long val = sign * (long)(_PyLong_GetDigits(self)[0]); Py_DECREF(obj); return PyLong_FromLong(val); } return obj; } #endif #if PY_VERSION_HEX < 0x030C00A3 # define Py_T_SHORT T_SHORT # define Py_T_INT T_INT # define Py_T_LONG T_LONG # define Py_T_FLOAT T_FLOAT # define Py_T_DOUBLE T_DOUBLE # define Py_T_STRING T_STRING # define _Py_T_OBJECT T_OBJECT # define Py_T_CHAR T_CHAR # define Py_T_BYTE T_BYTE # define Py_T_UBYTE T_UBYTE # define Py_T_USHORT T_USHORT # define Py_T_UINT T_UINT # define Py_T_ULONG T_ULONG # define Py_T_STRING_INPLACE T_STRING_INPLACE # define Py_T_BOOL T_BOOL # define Py_T_OBJECT_EX T_OBJECT_EX # define Py_T_LONGLONG T_LONGLONG # define Py_T_ULONGLONG T_ULONGLONG # define Py_T_PYSSIZET T_PYSSIZET # if PY_VERSION_HEX >= 0x03000000 && !defined(PYPY_VERSION) # define _Py_T_NONE T_NONE # endif # define Py_READONLY READONLY # define Py_AUDIT_READ READ_RESTRICTED # define _Py_WRITE_RESTRICTED PY_WRITE_RESTRICTED #endif // gh-127350 added Py_fopen() and Py_fclose() to Python 3.14a4 #if PY_VERSION_HEX < 0x030E00A4 static inline FILE* Py_fopen(PyObject *path, const char *mode) { #if 0x030400A2 <= PY_VERSION_HEX && !defined(PYPY_VERSION) PyAPI_FUNC(FILE*) _Py_fopen_obj(PyObject *path, const char *mode); return _Py_fopen_obj(path, mode); #else FILE *f; PyObject *bytes; #if PY_VERSION_HEX >= 0x03000000 if (!PyUnicode_FSConverter(path, &bytes)) { return NULL; } #else if (!PyString_Check(path)) { PyErr_SetString(PyExc_TypeError, "except str"); return NULL; } bytes = Py_NewRef(path); #endif const char *path_bytes = PyBytes_AS_STRING(bytes); f = fopen(path_bytes, mode); Py_DECREF(bytes); if (f == NULL) { PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, path); return NULL; } return f; #endif } static inline int Py_fclose(FILE *file) { return fclose(file); } #endif #if 0x03090000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030E0000 && !defined(PYPY_VERSION) static inline PyObject* PyConfig_Get(const char *name) { typedef enum { _PyConfig_MEMBER_INT, _PyConfig_MEMBER_UINT, _PyConfig_MEMBER_ULONG, _PyConfig_MEMBER_BOOL, _PyConfig_MEMBER_WSTR, _PyConfig_MEMBER_WSTR_OPT, _PyConfig_MEMBER_WSTR_LIST, } PyConfigMemberType; typedef struct { const char *name; size_t offset; PyConfigMemberType type; const char *sys_attr; } PyConfigSpec; #define PYTHONCAPI_COMPAT_SPEC(MEMBER, TYPE, sys_attr) \ {#MEMBER, offsetof(PyConfig, MEMBER), \ _PyConfig_MEMBER_##TYPE, sys_attr} static const PyConfigSpec config_spec[] = { PYTHONCAPI_COMPAT_SPEC(argv, WSTR_LIST, "argv"), PYTHONCAPI_COMPAT_SPEC(base_exec_prefix, WSTR_OPT, "base_exec_prefix"), PYTHONCAPI_COMPAT_SPEC(base_executable, WSTR_OPT, "_base_executable"), PYTHONCAPI_COMPAT_SPEC(base_prefix, WSTR_OPT, "base_prefix"), PYTHONCAPI_COMPAT_SPEC(bytes_warning, UINT, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(exec_prefix, WSTR_OPT, "exec_prefix"), PYTHONCAPI_COMPAT_SPEC(executable, WSTR_OPT, "executable"), PYTHONCAPI_COMPAT_SPEC(inspect, BOOL, _Py_NULL), #if 0x030C0000 <= PY_VERSION_HEX PYTHONCAPI_COMPAT_SPEC(int_max_str_digits, UINT, _Py_NULL), #endif PYTHONCAPI_COMPAT_SPEC(interactive, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(module_search_paths, WSTR_LIST, "path"), PYTHONCAPI_COMPAT_SPEC(optimization_level, UINT, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(parser_debug, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(platlibdir, WSTR, "platlibdir"), PYTHONCAPI_COMPAT_SPEC(prefix, WSTR_OPT, "prefix"), PYTHONCAPI_COMPAT_SPEC(pycache_prefix, WSTR_OPT, "pycache_prefix"), PYTHONCAPI_COMPAT_SPEC(quiet, BOOL, _Py_NULL), #if 0x030B0000 <= PY_VERSION_HEX PYTHONCAPI_COMPAT_SPEC(stdlib_dir, WSTR_OPT, "_stdlib_dir"), #endif PYTHONCAPI_COMPAT_SPEC(use_environment, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(verbose, UINT, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(warnoptions, WSTR_LIST, "warnoptions"), PYTHONCAPI_COMPAT_SPEC(write_bytecode, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(xoptions, WSTR_LIST, "_xoptions"), PYTHONCAPI_COMPAT_SPEC(buffered_stdio, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(check_hash_pycs_mode, WSTR, _Py_NULL), #if 0x030B0000 <= PY_VERSION_HEX PYTHONCAPI_COMPAT_SPEC(code_debug_ranges, BOOL, _Py_NULL), #endif PYTHONCAPI_COMPAT_SPEC(configure_c_stdio, BOOL, _Py_NULL), #if 0x030D0000 <= PY_VERSION_HEX PYTHONCAPI_COMPAT_SPEC(cpu_count, INT, _Py_NULL), #endif PYTHONCAPI_COMPAT_SPEC(dev_mode, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(dump_refs, BOOL, _Py_NULL), #if 0x030B0000 <= PY_VERSION_HEX PYTHONCAPI_COMPAT_SPEC(dump_refs_file, WSTR_OPT, _Py_NULL), #endif #ifdef Py_GIL_DISABLED PYTHONCAPI_COMPAT_SPEC(enable_gil, INT, _Py_NULL), #endif PYTHONCAPI_COMPAT_SPEC(faulthandler, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(filesystem_encoding, WSTR, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(filesystem_errors, WSTR, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(hash_seed, ULONG, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(home, WSTR_OPT, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(import_time, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(install_signal_handlers, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(isolated, BOOL, _Py_NULL), #ifdef MS_WINDOWS PYTHONCAPI_COMPAT_SPEC(legacy_windows_stdio, BOOL, _Py_NULL), #endif PYTHONCAPI_COMPAT_SPEC(malloc_stats, BOOL, _Py_NULL), #if 0x030A0000 <= PY_VERSION_HEX PYTHONCAPI_COMPAT_SPEC(orig_argv, WSTR_LIST, "orig_argv"), #endif PYTHONCAPI_COMPAT_SPEC(parse_argv, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(pathconfig_warnings, BOOL, _Py_NULL), #if 0x030C0000 <= PY_VERSION_HEX PYTHONCAPI_COMPAT_SPEC(perf_profiling, UINT, _Py_NULL), #endif PYTHONCAPI_COMPAT_SPEC(program_name, WSTR, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(run_command, WSTR_OPT, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(run_filename, WSTR_OPT, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(run_module, WSTR_OPT, _Py_NULL), #if 0x030B0000 <= PY_VERSION_HEX PYTHONCAPI_COMPAT_SPEC(safe_path, BOOL, _Py_NULL), #endif PYTHONCAPI_COMPAT_SPEC(show_ref_count, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(site_import, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(skip_source_first_line, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(stdio_encoding, WSTR, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(stdio_errors, WSTR, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(tracemalloc, UINT, _Py_NULL), #if 0x030B0000 <= PY_VERSION_HEX PYTHONCAPI_COMPAT_SPEC(use_frozen_modules, BOOL, _Py_NULL), #endif PYTHONCAPI_COMPAT_SPEC(use_hash_seed, BOOL, _Py_NULL), PYTHONCAPI_COMPAT_SPEC(user_site_directory, BOOL, _Py_NULL), #if 0x030A0000 <= PY_VERSION_HEX PYTHONCAPI_COMPAT_SPEC(warn_default_encoding, BOOL, _Py_NULL), #endif }; #undef PYTHONCAPI_COMPAT_SPEC const PyConfigSpec *spec; int found = 0; for (size_t i=0; i < sizeof(config_spec) / sizeof(config_spec[0]); i++) { spec = &config_spec[i]; if (strcmp(spec->name, name) == 0) { found = 1; break; } } if (found) { if (spec->sys_attr != NULL) { PyObject *value = PySys_GetObject(spec->sys_attr); if (value == NULL) { PyErr_Format(PyExc_RuntimeError, "lost sys.%s", spec->sys_attr); return NULL; } return Py_NewRef(value); } PyAPI_FUNC(const PyConfig*) _Py_GetConfig(void); const PyConfig *config = _Py_GetConfig(); void *member = (char *)config + spec->offset; switch (spec->type) { case _PyConfig_MEMBER_INT: case _PyConfig_MEMBER_UINT: { int value = *(int *)member; return PyLong_FromLong(value); } case _PyConfig_MEMBER_BOOL: { int value = *(int *)member; return PyBool_FromLong(value != 0); } case _PyConfig_MEMBER_ULONG: { unsigned long value = *(unsigned long *)member; return PyLong_FromUnsignedLong(value); } case _PyConfig_MEMBER_WSTR: case _PyConfig_MEMBER_WSTR_OPT: { wchar_t *wstr = *(wchar_t **)member; if (wstr != NULL) { return PyUnicode_FromWideChar(wstr, -1); } else { return Py_NewRef(Py_None); } } case _PyConfig_MEMBER_WSTR_LIST: { const PyWideStringList *list = (const PyWideStringList *)member; PyObject *tuple = PyTuple_New(list->length); if (tuple == NULL) { return NULL; } for (Py_ssize_t i = 0; i < list->length; i++) { PyObject *item = PyUnicode_FromWideChar(list->items[i], -1); if (item == NULL) { Py_DECREF(tuple); return NULL; } PyTuple_SET_ITEM(tuple, i, item); } return tuple; } default: Py_UNREACHABLE(); } } PyErr_Format(PyExc_ValueError, "unknown config option name: %s", name); return NULL; } static inline int PyConfig_GetInt(const char *name, int *value) { PyObject *obj = PyConfig_Get(name); if (obj == NULL) { return -1; } if (!PyLong_Check(obj)) { Py_DECREF(obj); PyErr_Format(PyExc_TypeError, "config option %s is not an int", name); return -1; } int as_int = PyLong_AsInt(obj); Py_DECREF(obj); if (as_int == -1 && PyErr_Occurred()) { PyErr_Format(PyExc_OverflowError, "config option %s value does not fit into a C int", name); return -1; } *value = as_int; return 0; } #endif // PY_VERSION_HEX > 0x03090000 && !defined(PYPY_VERSION) #ifdef __cplusplus } #endif #endif // PYTHONCAPI_COMPAT ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/_multilib/state.h0000644000175100001660000000562514776033000020404 0ustar00runnerdocker#ifndef _MULTIDICT_STATE_H #define _MULTIDICT_STATE_H #ifdef __cplusplus extern "C" { #endif /* State of the _multidict module */ typedef struct { PyTypeObject *IStrType; PyTypeObject *MultiDictType; PyTypeObject *CIMultiDictType; PyTypeObject *MultiDictProxyType; PyTypeObject *CIMultiDictProxyType; PyTypeObject *KeysViewType; PyTypeObject *ItemsViewType; PyTypeObject *ValuesViewType; PyTypeObject *KeysIterType; PyTypeObject *ItemsIterType; PyTypeObject *ValuesIterType; PyObject *str_lower; PyObject *str_canonical; } mod_state; static inline mod_state * get_mod_state(PyObject *mod) { mod_state *state = (mod_state *)PyModule_GetState(mod); assert(state != NULL); return state; } static inline mod_state * get_mod_state_by_cls(PyTypeObject *cls) { mod_state *state = (mod_state *)PyType_GetModuleState(cls); assert(state != NULL); return state; } #if PY_VERSION_HEX < 0x030b0000 PyObject * PyType_GetModuleByDef(PyTypeObject *tp, PyModuleDef *def) { PyModuleDef * mod_def; if (!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE)) { goto err; } PyObject *mod = NULL; mod = PyType_GetModule(tp); if (mod == NULL) { PyErr_Clear(); } else { mod_def = PyModule_GetDef(mod); if (mod_def == def) { return mod; } } PyObject *mro = tp->tp_mro; assert(mro != NULL); assert(PyTuple_Check(mro)); assert(PyTuple_GET_SIZE(mro) >= 1); assert(PyTuple_GET_ITEM(mro, 0) == (PyObject *)tp); Py_ssize_t n = PyTuple_GET_SIZE(mro); for (Py_ssize_t i = 1; i < n; i++) { PyObject *super = PyTuple_GET_ITEM(mro, i); if (!PyType_HasFeature((PyTypeObject *)super, Py_TPFLAGS_HEAPTYPE)) { continue; } mod = PyType_GetModule((PyTypeObject*)super); if (mod == NULL) { PyErr_Clear(); } else { mod_def = PyModule_GetDef(mod); if (mod_def == def) { return mod; } } } err: PyErr_Format( PyExc_TypeError, "PyType_GetModuleByDef: No superclass of '%s' has the given module", tp->tp_name); return NULL; } #endif static PyModuleDef multidict_module; static inline int get_mod_state_by_def_checked(PyObject *self, mod_state **ret) { PyTypeObject *tp = Py_TYPE(self); PyObject *mod = PyType_GetModuleByDef(tp, &multidict_module); if (mod == NULL) { *ret = NULL; if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); return 0; } return -1; } *ret = get_mod_state(mod); return 1; } static inline mod_state * get_mod_state_by_def(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject *mod = PyType_GetModuleByDef(tp, &multidict_module); assert(mod != NULL); return get_mod_state(mod); } #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/_multilib/views.h0000644000175100001660000012370614776033000020422 0ustar00runnerdocker#ifndef _MULTIDICT_VIEWS_H #define _MULTIDICT_VIEWS_H #ifdef __cplusplus extern "C" { #endif #include "dict.h" #include "pair_list.h" #include "state.h" typedef struct { PyObject_HEAD MultiDictObject *md; } _Multidict_ViewObject; #define Items_CheckExact(state, obj) Py_IS_TYPE(obj, state->ItemsViewType) #define Keys_CheckExact(state, obj) Py_IS_TYPE(obj, state->KeysViewType) #define Values_CheckExact(state, obj) Py_IS_TYPE(obj, state->ValuesViewType) /********** Base **********/ static inline void _init_view(_Multidict_ViewObject *self, MultiDictObject *md) { Py_INCREF(md); self->md = md; } static inline void multidict_view_dealloc(_Multidict_ViewObject *self) { PyObject_GC_UnTrack(self); Py_XDECREF(self->md); PyObject_GC_Del(self); } static inline int multidict_view_traverse(_Multidict_ViewObject *self, visitproc visit, void *arg) { Py_VISIT(self->md); return 0; } static inline int multidict_view_clear(_Multidict_ViewObject *self) { Py_CLEAR(self->md); return 0; } static inline Py_ssize_t multidict_view_len(_Multidict_ViewObject *self) { return pair_list_len(&self->md->pairs); } static inline PyObject * multidict_view_richcompare(PyObject *self, PyObject *other, int op) { int tmp; Py_ssize_t self_size = PyObject_Length(self); if (self_size < 0) { return NULL; } Py_ssize_t size = PyObject_Length(other); if (size < 0) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } PyObject *iter = NULL; PyObject *item = NULL; switch(op) { case Py_LT: if (self_size >= size) Py_RETURN_FALSE; return PyObject_RichCompare(self, other, Py_LE); case Py_LE: if (self_size > size) { Py_RETURN_FALSE; } iter = PyObject_GetIter(self); if (iter == NULL) { goto fail; } while ((item = PyIter_Next(iter))) { tmp = PySequence_Contains(other, item); if (tmp < 0) { goto fail; } Py_CLEAR(item); if (tmp == 0) { Py_CLEAR(iter); Py_RETURN_FALSE; } } Py_CLEAR(iter); if (PyErr_Occurred()) { goto fail; } Py_RETURN_TRUE; case Py_EQ: if (self_size != size) Py_RETURN_FALSE; return PyObject_RichCompare(self, other, Py_LE); case Py_NE: tmp = PyObject_RichCompareBool(self, other, Py_EQ); if (tmp < 0) goto fail; return PyBool_FromLong(!tmp); case Py_GT: if (self_size <= size) Py_RETURN_FALSE; return PyObject_RichCompare(self, other, Py_GE); case Py_GE: if (self_size < size) { Py_RETURN_FALSE; } iter = PyObject_GetIter(other); if (iter == NULL) { goto fail; } while ((item = PyIter_Next(iter))) { tmp = PySequence_Contains(self, item); if (tmp < 0) { goto fail; } Py_CLEAR(item); if (tmp == 0) { Py_CLEAR(iter); Py_RETURN_FALSE; } } Py_CLEAR(iter); if (PyErr_Occurred()) { goto fail; } Py_RETURN_TRUE; } fail: Py_CLEAR(item); Py_CLEAR(iter); return NULL; } /********** Items **********/ static inline PyObject * multidict_itemsview_new(MultiDictObject *md) { _Multidict_ViewObject *mv = PyObject_GC_New( _Multidict_ViewObject, md->pairs.state->ItemsViewType); if (mv == NULL) { return NULL; } _init_view(mv, md); PyObject_GC_Track(mv); return (PyObject *)mv; } static inline PyObject * multidict_itemsview_iter(_Multidict_ViewObject *self) { return multidict_items_iter_new(self->md); } static inline PyObject * multidict_itemsview_repr(_Multidict_ViewObject *self) { int tmp = Py_ReprEnter((PyObject *)self); if (tmp < 0) { return NULL; } if (tmp > 0) { return PyUnicode_FromString("..."); } PyObject *name = PyObject_GetAttrString((PyObject*)Py_TYPE(self), "__name__"); if (name == NULL) { Py_ReprLeave((PyObject *)self); return NULL; } PyObject *ret = pair_list_repr(&self->md->pairs, name, true, true); Py_ReprLeave((PyObject *)self); Py_CLEAR(name); return ret; } static inline int _multidict_itemsview_parse_item(_Multidict_ViewObject *self, PyObject *arg, PyObject **pidentity, PyObject **pkey, PyObject **pvalue) { assert(pidentity != NULL); if (!PyTuple_Check(arg)) { return 0; } Py_ssize_t size = PyTuple_Size(arg); if (size != 2) { return 0; } PyObject *key = Py_NewRef(PyTuple_GET_ITEM(arg, 0)); if (pkey != NULL) { *pkey = Py_NewRef(key); } if (pvalue != NULL) { *pvalue = Py_NewRef(PyTuple_GET_ITEM(arg, 1)); } *pidentity = pair_list_calc_identity(&self->md->pairs, key); Py_DECREF(key); if (*pidentity == NULL) { if (pkey != NULL) { Py_CLEAR(*pkey); } if (pvalue != NULL) { Py_CLEAR(*pvalue); } if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); return 0; } else { return -1; } } return 1; } static int _set_add(PyObject *set, PyObject *key, PyObject * value) { PyObject *tpl = PyTuple_Pack(2, key, value); if (tpl == NULL) { return -1; } int tmp = PySet_Add(set, tpl); Py_DECREF(tpl); return tmp; } static inline PyObject * multidict_itemsview_and1(_Multidict_ViewObject *self, PyObject *other) { PyObject *identity = NULL; PyObject *key = NULL; PyObject *key2 = NULL; PyObject *value = NULL; PyObject *value2 = NULL; PyObject *arg = NULL; PyObject *ret = NULL; pair_list_pos_t pos; PyObject *iter = PyObject_GetIter(other); if (iter == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } ret = PySet_New(NULL); if (ret == NULL) { goto fail; } while ((arg = PyIter_Next(iter))) { int tmp = _multidict_itemsview_parse_item(self, arg, &identity, &key, &value); if (tmp < 0) { goto fail; } else if (tmp == 0) { Py_CLEAR(arg); continue; } pair_list_init_pos(&self->md->pairs, &pos); while (true) { tmp = pair_list_next_by_identity(&self->md->pairs, &pos, identity, &key2, &value2); if (tmp < 0) { goto fail; } else if (tmp == 0) { break; } else { tmp = PyObject_RichCompareBool(value, value2, Py_EQ); if (tmp < 0) { goto fail; } if (tmp > 0) { if (_set_add(ret, key2, value2) < 0) { goto fail; } } } Py_CLEAR(key2); Py_CLEAR(value2); } Py_CLEAR(arg); Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(value); } if (PyErr_Occurred()) { goto fail; } Py_CLEAR(iter); return ret; fail: Py_CLEAR(arg); Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(key2); Py_CLEAR(value); Py_CLEAR(value2); Py_CLEAR(iter); Py_CLEAR(ret); return NULL; } static inline PyObject * multidict_itemsview_and2(_Multidict_ViewObject *self, PyObject *other) { PyObject *identity = NULL; PyObject *key = NULL; PyObject *value = NULL; PyObject *value2 = NULL; PyObject *arg = NULL; PyObject *ret = NULL; pair_list_pos_t pos; PyObject *iter = PyObject_GetIter(other); if (iter == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } ret = PySet_New(NULL); if (ret == NULL) { goto fail; } while ((arg = PyIter_Next(iter))) { int tmp = _multidict_itemsview_parse_item(self, arg, &identity, &key, &value); if (tmp < 0) { goto fail; } else if (tmp == 0) { Py_CLEAR(arg); continue; } pair_list_init_pos(&self->md->pairs, &pos); while (true) { tmp = pair_list_next_by_identity(&self->md->pairs, &pos, identity, NULL, &value2); if (tmp < 0) { goto fail; } else if (tmp == 0) { break; } else { tmp = PyObject_RichCompareBool(value, value2, Py_EQ); if (tmp < 0) { goto fail; } if (tmp > 0) { if (_set_add(ret, key, value2) < 0) { goto fail; } } } Py_CLEAR(value2); } Py_CLEAR(arg); Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(value); } if (PyErr_Occurred()) { goto fail; } Py_CLEAR(iter); return ret; fail: Py_CLEAR(arg); Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(value); Py_CLEAR(value2); Py_CLEAR(iter); Py_CLEAR(ret); return NULL; } static inline PyObject * multidict_itemsview_and(PyObject *lft, PyObject *rht) { mod_state * state; int tmp = get_mod_state_by_def_checked(lft, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { tmp = get_mod_state_by_def_checked(rht, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { Py_RETURN_NOTIMPLEMENTED; } } assert(state != NULL); if (Items_CheckExact(state, lft)) { return multidict_itemsview_and1((_Multidict_ViewObject *)lft, rht); } else if (Items_CheckExact(state, rht)) { return multidict_itemsview_and2((_Multidict_ViewObject *)rht, lft); } Py_RETURN_NOTIMPLEMENTED; } static inline PyObject * multidict_itemsview_or1(_Multidict_ViewObject *self, PyObject *other) { PyObject *identity = NULL; PyObject *key = NULL; PyObject *value = NULL; PyObject *value2 = NULL; PyObject *arg = NULL; PyObject *ret = NULL; pair_list_pos_t pos; PyObject *iter = PyObject_GetIter(other); if (iter == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } ret = PySet_New((PyObject *)self); if (ret == NULL) { goto fail; } while ((arg = PyIter_Next(iter))) { int tmp = _multidict_itemsview_parse_item(self, arg, &identity, &key, &value); if (tmp < 0) { goto fail; } else if (tmp == 0) { if (PySet_Add(ret, arg) < 0) { goto fail; } Py_CLEAR(arg); continue; } pair_list_init_pos(&self->md->pairs, &pos); while (true) { tmp = pair_list_next_by_identity(&self->md->pairs, &pos, identity, NULL, &value2); if (tmp < 0) { goto fail; } else if (tmp == 0) { if (PySet_Add(ret, arg) < 0) { goto fail; } break; } else { tmp = PyObject_RichCompareBool(value, value2, Py_EQ); if (tmp < 0) { goto fail; } if (tmp > 0) { Py_CLEAR(value2); break; } } Py_CLEAR(value2); } Py_CLEAR(arg); Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(value); } if (PyErr_Occurred()) { goto fail; } Py_CLEAR(iter); return ret; fail: Py_CLEAR(arg); Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(value); Py_CLEAR(value2); Py_CLEAR(iter); Py_CLEAR(ret); return NULL; } static inline PyObject * multidict_itemsview_or2(_Multidict_ViewObject *self, PyObject *other) { PyObject *identity = NULL; PyObject *iter = NULL; PyObject *key = NULL; PyObject *value = NULL; PyObject *arg = NULL; PyObject *tmp_set = NULL; pair_list_pos_t pos; PyObject *ret = PySet_New(other); if (ret == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } iter = PyObject_GetIter(other); if (iter == NULL) { goto fail; } tmp_set = PySet_New(NULL); if (tmp_set == NULL) { goto fail; } while ((arg = PyIter_Next(iter))) { int tmp = _multidict_itemsview_parse_item(self, arg, &identity, NULL, &value); if (tmp < 0) { goto fail; } else if (tmp > 0) { if (_set_add(tmp_set, identity, value) < 0) { goto fail; } } Py_CLEAR(arg); } if (PyErr_Occurred()) { goto fail; } Py_CLEAR(iter); pair_list_init_pos(&self->md->pairs, &pos); while (true) { int tmp = pair_list_next(&self->md->pairs, &pos, &identity, &key, &value); if (tmp < 0) { goto fail; } else if (tmp == 0) { break; } else { PyObject *tpl = PyTuple_Pack(2, identity, value); if (tpl == NULL) { goto fail; } tmp = PySet_Contains(tmp_set, tpl); if (tmp < 0) { goto fail; } if (tmp == 0) { if (_set_add(ret, key, value) < 0) { goto fail; } } Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(value); } } Py_CLEAR(tmp_set); return ret; fail: Py_CLEAR(arg); Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(value); Py_CLEAR(iter); Py_CLEAR(ret); Py_CLEAR(tmp_set); return NULL; } static inline PyObject * multidict_itemsview_or(PyObject *lft, PyObject *rht) { mod_state * state; int tmp = get_mod_state_by_def_checked(lft, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { tmp = get_mod_state_by_def_checked(rht, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { Py_RETURN_NOTIMPLEMENTED; } } assert(state != NULL); if (Items_CheckExact(state, lft)) { return multidict_itemsview_or1((_Multidict_ViewObject *)lft, rht); } else if (Items_CheckExact(state, rht)) { return multidict_itemsview_or2((_Multidict_ViewObject *)rht, lft); } Py_RETURN_NOTIMPLEMENTED; } static inline PyObject * multidict_itemsview_sub1(_Multidict_ViewObject *self, PyObject *other) { PyObject *arg = NULL; PyObject *identity = NULL; PyObject *key = NULL; PyObject *value = NULL; PyObject *ret = NULL; PyObject *tmp_set = NULL; pair_list_pos_t pos; PyObject *iter = PyObject_GetIter(other); if (iter == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } ret = PySet_New(NULL); if (ret == NULL) { goto fail; } tmp_set = PySet_New(NULL); if (tmp_set == NULL) { goto fail; } while ((arg = PyIter_Next(iter))) { int tmp = _multidict_itemsview_parse_item(self, arg, &identity, NULL, &value); if (tmp < 0) { goto fail; } else if (tmp > 0) { if (_set_add(tmp_set, identity, value) < 0) { goto fail; } } Py_CLEAR(arg); } if (PyErr_Occurred()) { goto fail; } Py_CLEAR(iter); pair_list_init_pos(&self->md->pairs, &pos); while (true) { int tmp = pair_list_next(&self->md->pairs, &pos, &identity, &key, &value); if (tmp < 0) { goto fail; } else if (tmp == 0) { break; } else { PyObject *tpl = PyTuple_Pack(2, identity, value); if (tpl == NULL) { goto fail; } tmp = PySet_Contains(tmp_set, tpl); if (tmp < 0) { goto fail; } if (tmp == 0) { if (_set_add(ret, key, value) < 0) { goto fail; } } Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(value); } } Py_CLEAR(tmp_set); return ret; fail: Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(value); Py_CLEAR(ret); Py_CLEAR(tmp_set); Py_CLEAR(iter); Py_CLEAR(ret); return NULL; } static inline PyObject * multidict_itemsview_sub2(_Multidict_ViewObject *self, PyObject *other) { PyObject *arg = NULL; PyObject *identity = NULL; PyObject *key = NULL; PyObject *value = NULL; PyObject *value2 = NULL; PyObject *ret = NULL; PyObject *iter = PyObject_GetIter(other); pair_list_pos_t pos; if (iter == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } ret = PySet_New(NULL); if (ret == NULL) { goto fail; } while ((arg = PyIter_Next(iter))) { int tmp = _multidict_itemsview_parse_item(self, arg, &identity, NULL, &value); if (tmp < 0) { goto fail; } else if (tmp == 0) { if (PySet_Add(ret, arg) < 0) { goto fail; } Py_CLEAR(arg); continue; } pair_list_init_pos(&self->md->pairs, &pos); while (true) { tmp = pair_list_next_by_identity(&self->md->pairs, &pos, identity, NULL, &value2); if (tmp < 0) { goto fail; } else if (tmp == 0) { if (PySet_Add(ret, arg) < 0) { goto fail; } break; } else { tmp = PyObject_RichCompareBool(value, value2, Py_EQ); if (tmp < 0) { goto fail; } if (tmp > 0) { Py_CLEAR(value2); break; } } Py_CLEAR(value2); } Py_CLEAR(arg); Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(value); } if (PyErr_Occurred()) { goto fail; } Py_CLEAR(iter); return ret; fail: Py_CLEAR(arg); Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(value); Py_CLEAR(iter); Py_CLEAR(ret); return NULL; } static inline PyObject * multidict_itemsview_sub(PyObject *lft, PyObject *rht) { mod_state * state; int tmp = get_mod_state_by_def_checked(lft, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { tmp = get_mod_state_by_def_checked(rht, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { Py_RETURN_NOTIMPLEMENTED; } } assert(state != NULL); if (Items_CheckExact(state, lft)) { return multidict_itemsview_sub1((_Multidict_ViewObject *)lft, rht); } else if (Items_CheckExact(state, rht)) { return multidict_itemsview_sub2((_Multidict_ViewObject *)rht, lft); } Py_RETURN_NOTIMPLEMENTED; } static inline PyObject * multidict_itemsview_xor(_Multidict_ViewObject *self, PyObject *other) { mod_state * state; int tmp = get_mod_state_by_def_checked((PyObject *)self, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { tmp = get_mod_state_by_def_checked(other, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { Py_RETURN_NOTIMPLEMENTED; } } assert(state != NULL); if (!Items_CheckExact(state, self)) { if (Items_CheckExact(state, other)) { return multidict_itemsview_xor((_Multidict_ViewObject *)other, (PyObject *)self); } else { Py_RETURN_NOTIMPLEMENTED; } } PyObject *ret = NULL; PyObject *tmp1 = NULL; PyObject *tmp2 = NULL; PyObject *rht = PySet_New(other); if (rht == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } tmp1 = PyNumber_Subtract((PyObject *)self, rht); if (tmp1 == NULL) { goto fail; } tmp2 = PyNumber_Subtract(rht, (PyObject *)self); if (tmp2 == NULL) { goto fail; } ret = PyNumber_InPlaceOr(tmp1, tmp2); if (ret == NULL) { goto fail; } Py_CLEAR(tmp1); Py_CLEAR(tmp2); Py_CLEAR(rht); return ret; fail: Py_CLEAR(tmp1); Py_CLEAR(tmp2); Py_CLEAR(rht); Py_CLEAR(ret); return NULL; } static inline int multidict_itemsview_contains(_Multidict_ViewObject *self, PyObject *obj) { PyObject *akey = NULL, *aval = NULL, *bkey = NULL, *bval = NULL, *iter = NULL, *item = NULL; int ret1, ret2; if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 2) { return 0; } bkey = PyTuple_GET_ITEM(obj, 0); bval = PyTuple_GET_ITEM(obj, 1); iter = multidict_itemsview_iter(self); if (iter == NULL) { return 0; } while ((item = PyIter_Next(iter)) != NULL) { akey = PyTuple_GET_ITEM(item, 0); aval = PyTuple_GET_ITEM(item, 1); ret1 = PyObject_RichCompareBool(akey, bkey, Py_EQ); if (ret1 < 0) { Py_DECREF(iter); Py_DECREF(item); return -1; } ret2 = PyObject_RichCompareBool(aval, bval, Py_EQ); if (ret2 < 0) { Py_DECREF(iter); Py_DECREF(item); return -1; } if (ret1 > 0 && ret2 > 0) { Py_DECREF(iter); Py_DECREF(item); return 1; } Py_DECREF(item); } Py_DECREF(iter); if (PyErr_Occurred()) { return -1; } return 0; } static inline PyObject * multidict_itemsview_isdisjoint(_Multidict_ViewObject *self, PyObject *other) { PyObject *iter = PyObject_GetIter(other); if (iter == NULL) { return NULL; } PyObject *arg = NULL; PyObject *identity = NULL; PyObject *value = NULL; PyObject *value2 = NULL; pair_list_pos_t pos; while ((arg = PyIter_Next(iter))) { int tmp = _multidict_itemsview_parse_item(self, arg, &identity, NULL, &value); if (tmp < 0) { goto fail; } else if (tmp == 0) { Py_CLEAR(arg); continue; } pair_list_init_pos(&self->md->pairs, &pos); while (true) { tmp = pair_list_next_by_identity(&self->md->pairs, &pos, identity, NULL, &value2); if (tmp < 0) { goto fail; } else if (tmp == 0) { Py_CLEAR(value2); break; } else { tmp = PyObject_RichCompareBool(value, value2, Py_EQ); Py_CLEAR(value2); if (tmp < 0) { goto fail; } if (tmp > 0) { Py_CLEAR(iter); Py_CLEAR(arg); Py_CLEAR(identity); Py_CLEAR(value); Py_RETURN_FALSE; } } } Py_CLEAR(arg); Py_CLEAR(identity); Py_CLEAR(value); } Py_CLEAR(iter); if (PyErr_Occurred()) { return NULL; } Py_RETURN_TRUE; fail: Py_CLEAR(iter); Py_CLEAR(arg); Py_CLEAR(identity); Py_CLEAR(value); Py_CLEAR(value2); return NULL; } PyDoc_STRVAR(itemsview_isdisjoint_doc, "Return True if two sets have a null intersection."); static PyMethodDef multidict_itemsview_methods[] = { {"isdisjoint", (PyCFunction)multidict_itemsview_isdisjoint, METH_O, itemsview_isdisjoint_doc}, {NULL, NULL} /* sentinel */ }; static PyType_Slot multidict_itemsview_slots[] = { {Py_tp_dealloc, multidict_view_dealloc}, {Py_tp_repr, multidict_itemsview_repr}, {Py_nb_subtract, multidict_itemsview_sub}, {Py_nb_and, multidict_itemsview_and}, {Py_nb_xor, multidict_itemsview_xor}, {Py_nb_or, multidict_itemsview_or}, {Py_sq_length, multidict_view_len}, {Py_sq_contains, multidict_itemsview_contains}, {Py_tp_getattro, PyObject_GenericGetAttr}, {Py_tp_traverse, multidict_view_traverse}, {Py_tp_clear, multidict_view_clear}, {Py_tp_richcompare, multidict_view_richcompare}, {Py_tp_iter, multidict_itemsview_iter}, {Py_tp_methods, multidict_itemsview_methods}, {0, NULL}, }; static PyType_Spec multidict_itemsview_spec = { .name = "multidict._multidict._ItemsView", .basicsize = sizeof(_Multidict_ViewObject), .flags = (Py_TPFLAGS_DEFAULT #if PY_VERSION_HEX >= 0x030a0000 | Py_TPFLAGS_IMMUTABLETYPE #endif | Py_TPFLAGS_HAVE_GC), .slots = multidict_itemsview_slots, }; /********** Keys **********/ static inline PyObject * multidict_keysview_new(MultiDictObject *md) { _Multidict_ViewObject *mv = PyObject_GC_New( _Multidict_ViewObject, md->pairs.state->KeysViewType); if (mv == NULL) { return NULL; } _init_view(mv, md); PyObject_GC_Track(mv); return (PyObject *)mv; } static inline PyObject * multidict_keysview_iter(_Multidict_ViewObject *self) { return multidict_keys_iter_new(self->md); } static inline PyObject * multidict_keysview_repr(_Multidict_ViewObject *self) { PyObject *name = PyObject_GetAttrString((PyObject*)Py_TYPE(self), "__name__"); if (name == NULL) { return NULL; } PyObject *ret = pair_list_repr(&self->md->pairs, name, true, false); Py_CLEAR(name); return ret; } static inline PyObject * multidict_keysview_and1(_Multidict_ViewObject *self, PyObject *other) { PyObject *key = NULL; PyObject *key2 = NULL; PyObject *ret = NULL; PyObject *iter = PyObject_GetIter(other); if (iter == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } ret = PySet_New(NULL); if (ret == NULL) { goto fail; } while ((key = PyIter_Next(iter))) { if (!PyUnicode_Check(key)) { Py_CLEAR(key); continue; } int tmp = pair_list_contains(&self->md->pairs, key, &key2); if (tmp < 0) { goto fail; } if (tmp > 0) { if (PySet_Add(ret, key2) < 0) { goto fail; } } Py_CLEAR(key); Py_CLEAR(key2); } if (PyErr_Occurred()) { goto fail; } Py_CLEAR(iter); return ret; fail: Py_CLEAR(key); Py_CLEAR(key2); Py_CLEAR(iter); Py_CLEAR(ret); return NULL; } static inline PyObject * multidict_keysview_and2(_Multidict_ViewObject *self, PyObject *other) { PyObject *key = NULL; PyObject *ret = NULL; PyObject *iter = PyObject_GetIter(other); if (iter == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } ret = PySet_New(NULL); if (ret == NULL) { goto fail; } while ((key = PyIter_Next(iter))) { if (!PyUnicode_Check(key)) { Py_CLEAR(key); continue; } int tmp = pair_list_contains(&self->md->pairs, key, NULL); if (tmp < 0) { goto fail; } if (tmp > 0) { if (PySet_Add(ret, key) < 0) { goto fail; } } Py_CLEAR(key); } if (PyErr_Occurred()) { goto fail; } Py_CLEAR(iter); return ret; fail: Py_CLEAR(key); Py_CLEAR(iter); Py_CLEAR(ret); return NULL; } static inline PyObject * multidict_keysview_and(PyObject *lft, PyObject *rht) { mod_state * state; int tmp = get_mod_state_by_def_checked(lft, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { tmp = get_mod_state_by_def_checked(rht, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { Py_RETURN_NOTIMPLEMENTED; } } assert(state != NULL); if (Keys_CheckExact(state, lft)) { return multidict_keysview_and1((_Multidict_ViewObject *)lft, rht); } else if (Keys_CheckExact(state, rht)) { return multidict_keysview_and2((_Multidict_ViewObject *)rht, lft); } Py_RETURN_NOTIMPLEMENTED; } static inline PyObject * multidict_keysview_or1(_Multidict_ViewObject *self, PyObject *other) { PyObject *key = NULL; PyObject *ret = NULL; PyObject *iter = PyObject_GetIter(other); if (iter == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } ret = PySet_New((PyObject *)self); if (ret == NULL) { goto fail; } while ((key = PyIter_Next(iter))) { if (!PyUnicode_Check(key)) { if (PySet_Add(ret, key) < 0) { goto fail; } Py_CLEAR(key); continue; } int tmp = pair_list_contains(&self->md->pairs, key, NULL); if (tmp < 0) { goto fail; } if (tmp == 0) { if (PySet_Add(ret, key) < 0) { goto fail; } } Py_CLEAR(key); } if (PyErr_Occurred()) { goto fail; } Py_CLEAR(iter); return ret; fail: Py_CLEAR(key); Py_CLEAR(iter); Py_CLEAR(ret); return NULL; } static inline PyObject * multidict_keysview_or2(_Multidict_ViewObject *self, PyObject *other) { PyObject *iter = NULL; PyObject *identity = NULL; PyObject *key = NULL; PyObject *tmp_set = NULL; PyObject *ret = PySet_New(other); if (ret == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } iter = PyObject_GetIter(ret); if (iter == NULL) { goto fail; } tmp_set = PySet_New(NULL); if (tmp_set == NULL) { goto fail; } while ((key = PyIter_Next(iter))) { if (!PyUnicode_Check(key)) { Py_CLEAR(key); continue; } identity = pair_list_calc_identity(&self->md->pairs, key); if (identity == NULL) { goto fail; } if (PySet_Add(tmp_set, identity) < 0) { goto fail; } Py_CLEAR(identity); Py_CLEAR(key); } if (PyErr_Occurred()) { goto fail; } Py_CLEAR(iter); pair_list_pos_t pos; pair_list_init_pos(&self->md->pairs, &pos); while (true) { int tmp = pair_list_next(&self->md->pairs, &pos, &identity, &key, NULL); if (tmp < 0) { goto fail; } else if (tmp == 0) { break; } tmp = PySet_Contains(tmp_set, identity); if (tmp < 0) { goto fail; } if (tmp == 0) { if (PySet_Add(ret, key) < 0) { goto fail; } } Py_CLEAR(identity); Py_CLEAR(key); } Py_CLEAR(tmp_set); return ret; fail: Py_CLEAR(identity); Py_CLEAR(key); Py_CLEAR(iter); Py_CLEAR(ret); Py_CLEAR(tmp_set); return NULL; } static inline PyObject * multidict_keysview_or(PyObject *lft, PyObject *rht) { mod_state * state; int tmp = get_mod_state_by_def_checked(lft, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { tmp = get_mod_state_by_def_checked(rht, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { Py_RETURN_NOTIMPLEMENTED; } } assert(state != NULL); if (Keys_CheckExact(state, lft)) { return multidict_keysview_or1((_Multidict_ViewObject *)lft, rht); } else if (Keys_CheckExact(state, rht)) { return multidict_keysview_or2((_Multidict_ViewObject *)rht, lft); } Py_RETURN_NOTIMPLEMENTED; } static inline PyObject * multidict_keysview_sub1(_Multidict_ViewObject *self, PyObject *other) { int tmp; PyObject *key = NULL; PyObject *key2 = NULL; PyObject *ret = NULL; PyObject *iter = PyObject_GetIter(other); if (iter == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } ret = PySet_New((PyObject *)self); if (ret == NULL) { goto fail; } while ((key = PyIter_Next(iter))) { if (!PyUnicode_Check(key)) { Py_CLEAR(key); continue; } tmp = pair_list_contains(&self->md->pairs, key, &key2); if (tmp < 0) { goto fail; } if (tmp > 0) { if (PySet_Discard(ret, key2) < 0) { goto fail; } } Py_CLEAR(key); Py_CLEAR(key2); } if (PyErr_Occurred()) { goto fail; } Py_CLEAR(iter); return ret; fail: Py_CLEAR(key); Py_CLEAR(key2); Py_CLEAR(iter); Py_CLEAR(ret); return NULL; } static inline PyObject * multidict_keysview_sub2(_Multidict_ViewObject *self, PyObject *other) { int tmp; PyObject *key = NULL; PyObject *ret = NULL; PyObject *iter = PyObject_GetIter(other); if (iter == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } ret = PySet_New(other); if (ret == NULL) { goto fail; } while ((key = PyIter_Next(iter))) { if (!PyUnicode_Check(key)) { Py_CLEAR(key); continue; } tmp = pair_list_contains(&self->md->pairs, key, NULL); if (tmp < 0) { goto fail; } if (tmp > 0) { if (PySet_Discard(ret, key) < 0) { goto fail; } } Py_CLEAR(key); } if (PyErr_Occurred()) { goto fail; } Py_CLEAR(iter); return ret; fail: Py_CLEAR(key); Py_CLEAR(iter); Py_CLEAR(ret); return NULL; } static inline PyObject * multidict_keysview_sub(PyObject *lft, PyObject *rht) { mod_state * state; int tmp = get_mod_state_by_def_checked(lft, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { tmp = get_mod_state_by_def_checked(rht, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { Py_RETURN_NOTIMPLEMENTED; } } assert(state != NULL); if (Keys_CheckExact(state, lft)) { return multidict_keysview_sub1((_Multidict_ViewObject *)lft, rht); } else if (Keys_CheckExact(state, rht)) { return multidict_keysview_sub2((_Multidict_ViewObject *)rht, lft); } Py_RETURN_NOTIMPLEMENTED; } static inline PyObject * multidict_keysview_xor(_Multidict_ViewObject *self, PyObject *other) { mod_state * state; int tmp = get_mod_state_by_def_checked((PyObject *)self, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { tmp = get_mod_state_by_def_checked(other, &state); if (tmp < 0) { return NULL; } else if (tmp == 0) { Py_RETURN_NOTIMPLEMENTED; } } assert(state != NULL); if (!Keys_CheckExact(state, self)) { if (Keys_CheckExact(state, other)) { return multidict_keysview_xor((_Multidict_ViewObject *)other, (PyObject *)self); } else { Py_RETURN_NOTIMPLEMENTED; } } PyObject *ret = NULL; PyObject *tmp1 = NULL; PyObject *tmp2 = NULL; PyObject *rht = PySet_New(other); if (rht == NULL) { if (PyErr_ExceptionMatches(PyExc_TypeError)) { PyErr_Clear(); Py_RETURN_NOTIMPLEMENTED; } goto fail; } tmp1 = PyNumber_Subtract((PyObject *)self, rht); if (tmp1 == NULL) { goto fail; } tmp2 = PyNumber_Subtract(rht, (PyObject *)self); if (tmp2 == NULL) { goto fail; } ret = PyNumber_InPlaceOr(tmp1, tmp2); if (ret == NULL) { goto fail; } Py_CLEAR(tmp1); Py_CLEAR(tmp2); Py_CLEAR(rht); return ret; fail: Py_CLEAR(tmp1); Py_CLEAR(tmp2); Py_CLEAR(rht); Py_CLEAR(ret); return NULL; } static inline int multidict_keysview_contains(_Multidict_ViewObject *self, PyObject *key) { return pair_list_contains(&self->md->pairs, key, NULL); } static inline PyObject * multidict_keysview_isdisjoint(_Multidict_ViewObject *self, PyObject *other) { PyObject *iter = PyObject_GetIter(other); if (iter == NULL) { return NULL; } PyObject *key = NULL; while ((key = PyIter_Next(iter))) { int tmp = pair_list_contains(&self->md->pairs, key, NULL); Py_CLEAR(key); if (tmp < 0) { Py_CLEAR(iter); return NULL; } if (tmp > 0) { Py_CLEAR(iter); Py_RETURN_FALSE; } } Py_CLEAR(iter); if (PyErr_Occurred()) { return NULL; } Py_RETURN_TRUE; } PyDoc_STRVAR(keysview_isdisjoint_doc, "Return True if two sets have a null intersection."); static PyMethodDef multidict_keysview_methods[] = { {"isdisjoint", (PyCFunction)multidict_keysview_isdisjoint, METH_O, keysview_isdisjoint_doc}, {NULL, NULL} /* sentinel */ }; static PyType_Slot multidict_keysview_slots[] = { {Py_tp_dealloc, multidict_view_dealloc}, {Py_tp_repr, multidict_keysview_repr}, {Py_nb_subtract, multidict_keysview_sub}, {Py_nb_and, multidict_keysview_and}, {Py_nb_xor, multidict_keysview_xor}, {Py_nb_or, multidict_keysview_or}, {Py_sq_length, multidict_view_len}, {Py_sq_contains, multidict_keysview_contains}, {Py_tp_getattro, PyObject_GenericGetAttr}, {Py_tp_traverse, multidict_view_traverse}, {Py_tp_clear, multidict_view_clear}, {Py_tp_richcompare, multidict_view_richcompare}, {Py_tp_iter, multidict_keysview_iter}, {Py_tp_methods, multidict_keysview_methods}, {0, NULL}, }; static PyType_Spec multidict_keysview_spec = { .name = "multidict._multidict._KeysView", .basicsize = sizeof(_Multidict_ViewObject), .flags = (Py_TPFLAGS_DEFAULT #if PY_VERSION_HEX >= 0x030a0000 | Py_TPFLAGS_IMMUTABLETYPE #endif | Py_TPFLAGS_HAVE_GC), .slots = multidict_keysview_slots, }; /********** Values **********/ static inline PyObject * multidict_valuesview_new(MultiDictObject *md) { _Multidict_ViewObject *mv = PyObject_GC_New( _Multidict_ViewObject, md->pairs.state->ValuesViewType); if (mv == NULL) { return NULL; } _init_view(mv, md); PyObject_GC_Track(mv); return (PyObject *)mv; } static inline PyObject * multidict_valuesview_iter(_Multidict_ViewObject *self) { return multidict_values_iter_new(self->md); } static inline PyObject * multidict_valuesview_repr(_Multidict_ViewObject *self) { int tmp = Py_ReprEnter((PyObject *)self); if (tmp < 0) { return NULL; } if (tmp > 0) { return PyUnicode_FromString("..."); } PyObject *name = PyObject_GetAttrString((PyObject*)Py_TYPE(self), "__name__"); if (name == NULL) { Py_ReprLeave((PyObject *)self); return NULL; } PyObject *ret = pair_list_repr(&self->md->pairs, name, false, true); Py_ReprLeave((PyObject *)self); Py_CLEAR(name); return ret; } static PyType_Slot multidict_valuesview_slots[] = { {Py_tp_dealloc, multidict_view_dealloc}, {Py_tp_repr, multidict_valuesview_repr}, {Py_sq_length, multidict_view_len}, {Py_tp_getattro, PyObject_GenericGetAttr}, {Py_tp_traverse, multidict_view_traverse}, {Py_tp_clear, multidict_view_clear}, {Py_tp_iter, multidict_valuesview_iter}, {0, NULL}, }; static PyType_Spec multidict_valuesview_spec = { .name = "multidict._multidict._ValuesView", .basicsize = sizeof(_Multidict_ViewObject), .flags = (Py_TPFLAGS_DEFAULT #if PY_VERSION_HEX >= 0x030a0000 | Py_TPFLAGS_IMMUTABLETYPE #endif | Py_TPFLAGS_HAVE_GC), .slots = multidict_valuesview_slots, }; static inline int multidict_views_init(PyObject *module, mod_state *state) { PyObject * tmp; tmp = PyType_FromModuleAndSpec(module, &multidict_itemsview_spec, NULL); if (tmp == NULL) { return -1; } state->ItemsViewType = (PyTypeObject *)tmp; tmp = PyType_FromModuleAndSpec(module, &multidict_valuesview_spec, NULL); if (tmp == NULL) { return -1; } state->ValuesViewType = (PyTypeObject *)tmp; tmp = PyType_FromModuleAndSpec(module, &multidict_keysview_spec, NULL); if (tmp == NULL) { return -1; } state->KeysViewType = (PyTypeObject *)tmp; return 0; } #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/multidict/py.typed0000644000175100001660000000001714776033000016620 0ustar00runnerdockerPEP-561 marker.././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744320002.8141801 multidict-6.4.3/multidict.egg-info/0000755000175100001660000000000014776033003016620 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320002.0 multidict-6.4.3/multidict.egg-info/PKG-INFO0000644000175100001660000001231314776033002017714 0ustar00runnerdockerMetadata-Version: 2.4 Name: multidict Version: 6.4.3 Summary: multidict implementation Home-page: https://github.com/aio-libs/multidict Author: Andrew Svetlov Author-email: andrew.svetlov@gmail.com License: Apache 2 Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org Project-URL: CI: GitHub, https://github.com/aio-libs/multidict/actions Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/multidict Project-URL: Docs: Changelog, https://multidict.aio-libs.org/en/latest/changes/ Project-URL: Docs: RTD, https://multidict.aio-libs.org Project-URL: GitHub: issues, https://github.com/aio-libs/multidict/issues Project-URL: GitHub: repo, https://github.com/aio-libs/multidict Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 Classifier: Programming Language :: Python :: 3.13 Requires-Python: >=3.9 Description-Content-Type: text/x-rst License-File: LICENSE Requires-Dist: typing-extensions>=4.1.0; python_version < "3.11" Dynamic: license-file ========= multidict ========= .. image:: https://github.com/aio-libs/multidict/actions/workflows/ci-cd.yml/badge.svg :target: https://github.com/aio-libs/multidict/actions :alt: GitHub status for master branch .. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg?flag=pytest :target: https://codecov.io/gh/aio-libs/multidict?flags[]=pytest :alt: Coverage metrics .. image:: https://img.shields.io/pypi/v/multidict.svg :target: https://pypi.org/project/multidict :alt: PyPI .. image:: https://readthedocs.org/projects/multidict/badge/?version=latest :target: https://multidict.aio-libs.org :alt: Read The Docs build status badge .. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json :target: https://codspeed.io/aio-libs/multidict :alt: CodSpeed .. image:: https://img.shields.io/pypi/pyversions/multidict.svg :target: https://pypi.org/project/multidict :alt: Python versions .. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat :target: https://matrix.to/#/%23aio-libs:matrix.org :alt: Matrix Room — #aio-libs:matrix.org .. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat :target: https://matrix.to/#/%23aio-libs-space:matrix.org :alt: Matrix Space — #aio-libs-space:matrix.org Multidict is dict-like collection of *key-value pairs* where key might occur more than once in the container. Introduction ------------ *HTTP Headers* and *URL query string* require specific data structure: *multidict*. It behaves mostly like a regular ``dict`` but it may have several *values* for the same *key* and *preserves insertion ordering*. The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries). ``multidict`` has four multidict classes: ``MultiDict``, ``MultiDictProxy``, ``CIMultiDict`` and ``CIMultiDictProxy``. Immutable proxies (``MultiDictProxy`` and ``CIMultiDictProxy``) provide a dynamic view for the proxied multidict, the view reflects underlying collection changes. They implement the ``collections.abc.Mapping`` interface. Regular mutable (``MultiDict`` and ``CIMultiDict``) classes implement ``collections.abc.MutableMapping`` and allows them to change their own content. *Case insensitive* (``CIMultiDict`` and ``CIMultiDictProxy``) assume the *keys* are case insensitive, e.g.:: >>> dct = CIMultiDict(key='val') >>> 'Key' in dct True >>> dct['Key'] 'val' *Keys* should be ``str`` or ``istr`` instances. The library has optional C Extensions for speed. License ------- Apache 2 Library Installation -------------------- .. code-block:: bash $ pip install multidict The library is Python 3 only! PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install ``multidict`` on another operating system (or *Alpine Linux* inside a Docker) the tarball will be used to compile the library from source. It requires a C compiler and Python headers to be installed. To skip the compilation, please use the `MULTIDICT_NO_EXTENSIONS` environment variable, e.g.: .. code-block:: bash $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict Please note, the pure Python (uncompiled) version is about 20-50 times slower depending on the usage scenario!!! For extension development, set the ``MULTIDICT_DEBUG_BUILD`` environment variable to compile the extensions in debug mode: .. code-block:: console $ MULTIDICT_DEBUG_BUILD=1 pip install multidict Changelog --------- See `RTD page `_. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320002.0 multidict-6.4.3/multidict.egg-info/SOURCES.txt0000644000175100001660000000554114776033002020510 0ustar00runnerdocker.coveragerc CHANGES.rst LICENSE MANIFEST.in Makefile README.rst pyproject.toml pytest.ini setup.cfg setup.py CHANGES/.TEMPLATE.rst CHANGES/.gitignore CHANGES/README.rst docs/Makefile docs/benchmark.rst docs/changes.rst docs/conf.py docs/index.rst docs/make.bat docs/multidict.rst docs/spelling_wordlist.txt multidict/__init__.py multidict/_abc.py multidict/_compat.py multidict/_multidict.c multidict/_multidict_py.py multidict/py.typed multidict.egg-info/PKG-INFO multidict.egg-info/SOURCES.txt multidict.egg-info/dependency_links.txt multidict.egg-info/requires.txt multidict.egg-info/top_level.txt multidict/_multilib/dict.h multidict/_multilib/istr.h multidict/_multilib/iter.h multidict/_multilib/pair_list.h multidict/_multilib/parser.h multidict/_multilib/pythoncapi_compat.h multidict/_multilib/state.h multidict/_multilib/views.h requirements/ci.txt requirements/dev.txt requirements/doc-spelling.txt requirements/doc.txt requirements/lint.txt requirements/pytest.txt requirements/towncrier.txt requirements/wheel.txt tests/cimultidict-c-extension.pickle.0 tests/cimultidict-c-extension.pickle.1 tests/cimultidict-c-extension.pickle.2 tests/cimultidict-c-extension.pickle.3 tests/cimultidict-c-extension.pickle.4 tests/cimultidict-c-extension.pickle.5 tests/cimultidict-pure-python.pickle.0 tests/cimultidict-pure-python.pickle.1 tests/cimultidict-pure-python.pickle.2 tests/cimultidict-pure-python.pickle.3 tests/cimultidict-pure-python.pickle.4 tests/cimultidict-pure-python.pickle.5 tests/conftest.py tests/gen_pickles.py tests/istr-c-extension.pickle.0 tests/istr-c-extension.pickle.1 tests/istr-c-extension.pickle.2 tests/istr-c-extension.pickle.3 tests/istr-c-extension.pickle.4 tests/istr-c-extension.pickle.5 tests/istr-pure-python.pickle.0 tests/istr-pure-python.pickle.1 tests/istr-pure-python.pickle.2 tests/istr-pure-python.pickle.3 tests/istr-pure-python.pickle.4 tests/istr-pure-python.pickle.5 tests/multidict-c-extension.pickle.0 tests/multidict-c-extension.pickle.1 tests/multidict-c-extension.pickle.2 tests/multidict-c-extension.pickle.3 tests/multidict-c-extension.pickle.4 tests/multidict-c-extension.pickle.5 tests/multidict-pure-python.pickle.0 tests/multidict-pure-python.pickle.1 tests/multidict-pure-python.pickle.2 tests/multidict-pure-python.pickle.3 tests/multidict-pure-python.pickle.4 tests/multidict-pure-python.pickle.5 tests/test_abc.py tests/test_circular_imports.py tests/test_copy.py tests/test_guard.py tests/test_incorrect_args.py tests/test_istr.py tests/test_leaks.py tests/test_multidict.py tests/test_multidict_benchmarks.py tests/test_mutable_multidict.py tests/test_mypy.py tests/test_pickle.py tests/test_types.py tests/test_update.py tests/test_version.py tests/test_views_benchmarks.py tests/isolated/multidict_extend_dict.py tests/isolated/multidict_extend_multidict.py tests/isolated/multidict_extend_tuple.py tests/isolated/multidict_update_multidict.py././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320002.0 multidict-6.4.3/multidict.egg-info/dependency_links.txt0000644000175100001660000000000114776033002022665 0ustar00runnerdocker ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320002.0 multidict-6.4.3/multidict.egg-info/requires.txt0000644000175100001660000000006514776033002021220 0ustar00runnerdocker [:python_version < "3.11"] typing-extensions>=4.1.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320002.0 multidict-6.4.3/multidict.egg-info/top_level.txt0000644000175100001660000000001214776033002021342 0ustar00runnerdockermultidict ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/pyproject.toml0000644000175100001660000000067714776033000016053 0ustar00runnerdocker[build-system] requires = ["setuptools >= 40"] build-backend = "setuptools.build_meta" [tool.cibuildwheel] test-requires = "-r requirements/pytest.txt" test-command = 'pytest -m "not leaks" --no-cov {project}/tests' # don't build PyPy wheels, install from source instead skip = "pp*" free-threaded-support = true [tool.cibuildwheel.linux] before-all = "yum install -y libffi-devel || apk add --upgrade libffi-dev || apt-get install libffi-dev" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/pytest.ini0000644000175100001660000000332114776033000015155 0ustar00runnerdocker[pytest] addopts = # `pytest-xdist`: # --numprocesses=auto # NOTE: the plugin disabled because it's slower with so few tests # --numprocesses=0 # Show 10 slowest invocations: --durations=10 # Report all the things == -rxXs: -ra # Show values of the local vars in errors/tracebacks: --showlocals # Autocollect and invoke the doctests from all modules: # https://docs.pytest.org/en/stable/doctest.html --doctest-modules # Pre-load the `pytest-cov` plugin early: -p pytest_cov # `pytest-cov`: --cov --cov-config=.coveragerc --cov-context=test # Fail on config parsing warnings: # --strict-config # Fail on non-existing markers: # * Deprecated since v6.2.0 but may be reintroduced later covering a # broader scope: # --strict # * Exists since v4.5.0 (advised to be used instead of `--strict`): --strict-markers doctest_optionflags = ALLOW_UNICODE ELLIPSIS # Marks tests with an empty parameterset as xfail(run=False) empty_parameter_set_mark = xfail faulthandler_timeout = 30 filterwarnings = error # https://docs.pytest.org/en/stable/usage.html#creating-junitxml-format-files junit_duration_report = call # xunit1 contains more metadata than xunit2 so it's better for CI UIs: junit_family = xunit1 junit_logging = all junit_log_passing_tests = true junit_suite_name = multidict_test_suite # A mapping of markers to their descriptions allowed in strict mode: markers = leaks: memory leak tests minversion = 3.8.2 # Optimize pytest's lookup by restricting potentially deep dir tree scan: norecursedirs = build dist docs multidict.egg-info requirements venv virtualenv .cache .eggs .git .github .tox *.egg testpaths = tests/ xfail_strict = true ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1744320002.80418 multidict-6.4.3/requirements/0000755000175100001660000000000014776033003015653 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/requirements/ci.txt0000644000175100001660000000002314776033000016777 0ustar00runnerdocker-e . -r pytest.txt ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/requirements/dev.txt0000644000175100001660000000011014776033000017157 0ustar00runnerdocker-r ci.txt -r lint.txt -r towncrier.txt -r doc.txt pyperformance==1.11.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/requirements/doc-spelling.txt0000644000175100001660000000014414776033000020770 0ustar00runnerdocker-r doc.txt sphinxcontrib-spelling==8.0.1; platform_system!="Windows" # We only use it in Travis CI ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/requirements/doc.txt0000644000175100001660000000014614776033000017157 0ustar00runnerdocker-r towncrier.txt sphinx==8.2.3 pyenchant==3.2.2 sphinxcontrib-spelling==8.0.1 sphinxcontrib-towncrier ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/requirements/lint.txt0000644000175100001660000000011114776033000017350 0ustar00runnerdocker-r ci.txt -r towncrier.txt black==25.1.0 isort==6.0.1 pre-commit==4.2.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/requirements/pytest.txt0000644000175100001660000000043214776033000017740 0ustar00runnerdockerobjgraph==3.6.2 pytest==8.3.5; platform_python_implementation != 'PyPy' pytest < 8.2.2; platform_python_implementation == 'PyPy' # FIXME: Drop conditionals once the regression is gone. See https://github.com/pytest-dev/pytest/issues/13312. pytest-codspeed==3.2.0 pytest-cov==6.0.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/requirements/towncrier.txt0000644000175100001660000000002314776033000020420 0ustar00runnerdockertowncrier==23.11.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/requirements/wheel.txt0000644000175100001660000000000614776033000017511 0ustar00runnerdocker-e . ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1744320002.81518 multidict-6.4.3/setup.cfg0000644000175100001660000000324514776033003014755 0ustar00runnerdocker[bdist_wheel] universal = 0 [metadata] name = multidict version = attr: multidict.__version__ url = https://github.com/aio-libs/multidict project_urls = Chat: Matrix = https://matrix.to/#/#aio-libs:matrix.org Chat: Matrix Space = https://matrix.to/#/#aio-libs-space:matrix.org CI: GitHub = https://github.com/aio-libs/multidict/actions Code of Conduct = https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md Coverage: codecov = https://codecov.io/github/aio-libs/multidict Docs: Changelog = https://multidict.aio-libs.org/en/latest/changes/ Docs: RTD = https://multidict.aio-libs.org GitHub: issues = https://github.com/aio-libs/multidict/issues GitHub: repo = https://github.com/aio-libs/multidict description = multidict implementation long_description = file: README.rst long_description_content_type = text/x-rst author = Andrew Svetlov author_email = andrew.svetlov@gmail.com license = Apache 2 license_files = LICENSE classifiers = Development Status :: 5 - Production/Stable Intended Audience :: Developers License :: OSI Approved :: Apache Software License Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 Programming Language :: Python :: 3.13 [options] python_requires = >= 3.9 install_requires = typing-extensions >= 4.1.0; python_version < '3.11' packages = multidict [isort] multi_line_output = 3 include_trailing_comma = True force_grid_wrap = 0 use_parentheses = True known_first_party = multidict known_third_party = pytest [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/setup.py0000644000175100001660000000176114776033000014644 0ustar00runnerdockerimport os import platform import sys from setuptools import Extension, setup NO_EXTENSIONS = bool(os.environ.get("MULTIDICT_NO_EXTENSIONS")) DEBUG_BUILD = bool(os.environ.get("MULTIDICT_DEBUG_BUILD")) if sys.implementation.name != "cpython": NO_EXTENSIONS = True CFLAGS = ["-O0", "-g3", "-UNDEBUG"] if DEBUG_BUILD else ["-O3"] if platform.system() != "Windows": CFLAGS.extend( [ "-std=c99", "-Wall", "-Wsign-compare", "-Wconversion", "-fno-strict-aliasing", "-pedantic", ] ) extensions = [ Extension( "multidict._multidict", ["multidict/_multidict.c"], extra_compile_args=CFLAGS, ), ] if not NO_EXTENSIONS: print("*********************") print("* Accelerated build *") print("*********************") setup(ext_modules=extensions) else: print("*********************") print("* Pure Python build *") print("*********************") setup() ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1744320002.81318 multidict-6.4.3/tests/0000755000175100001660000000000014776033003014272 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/cimultidict-c-extension.pickle.00000644000175100001660000000012114776033000022354 0ustar00runnerdockercmultidict._multidict CIMultiDict p0 ((lp1 (Va p2 L1L tp3 a(g2 L2L tp4 atp5 Rp6 .././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/cimultidict-c-extension.pickle.10000644000175100001660000000010714776033000022361 0ustar00runnerdockercmultidict._multidict CIMultiDict q(]q((XaqKtq(hKtqetqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/cimultidict-c-extension.pickle.20000644000175100001660000000010614776033000022361 0ustar00runnerdockercmultidict._multidict CIMultiDict q]q(XaqKqhKqeqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/cimultidict-c-extension.pickle.30000644000175100001660000000010614776033000022362 0ustar00runnerdockercmultidict._multidict CIMultiDict q]q(XaqKqhKqeqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/cimultidict-c-extension.pickle.40000644000175100001660000000011114776033000022357 0ustar00runnerdocker>multidict._multidict CIMultiDict](aKhKeR.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/cimultidict-c-extension.pickle.50000644000175100001660000000011114776033000022360 0ustar00runnerdocker>multidict._multidict CIMultiDict](aKhKeR.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/cimultidict-pure-python.pickle.00000644000175100001660000000012414776033000022415 0ustar00runnerdockercmultidict._multidict_py CIMultiDict p0 ((lp1 (Va p2 L1L tp3 a(g2 L2L tp4 atp5 Rp6 .././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/cimultidict-pure-python.pickle.10000644000175100001660000000011214776033000022413 0ustar00runnerdockercmultidict._multidict_py CIMultiDict q(]q((XaqKtq(hKtqetqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/cimultidict-pure-python.pickle.20000644000175100001660000000011114776033000022413 0ustar00runnerdockercmultidict._multidict_py CIMultiDict q]q(XaqKqhKqeqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/cimultidict-pure-python.pickle.30000644000175100001660000000011114776033000022414 0ustar00runnerdockercmultidict._multidict_py CIMultiDict q]q(XaqKqhKqeqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/cimultidict-pure-python.pickle.40000644000175100001660000000011414776033000022420 0ustar00runnerdockerAmultidict._multidict_py CIMultiDict](aKhKeR.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/cimultidict-pure-python.pickle.50000644000175100001660000000011414776033000022421 0ustar00runnerdockerAmultidict._multidict_py CIMultiDict](aKhKeR.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/conftest.py0000644000175100001660000001535714776033000016501 0ustar00runnerdockerfrom __future__ import annotations import argparse import pickle from dataclasses import dataclass from functools import cached_property from importlib import import_module from types import ModuleType from typing import Callable, Type, Union import pytest from multidict import ( CIMultiDict, MultiDict, MultiDictProxy, MultiMapping, MutableMultiMapping, ) C_EXT_MARK = pytest.mark.c_extension @dataclass(frozen=True) class MultidictImplementation: """A facade for accessing importable multidict module variants. An instance essentially represents a c-extension or a pure-python module. The actual underlying module is accessed dynamically through a property and is cached. It also has a text tag depending on what variant it is, and a string representation suitable for use in Pytest's test IDs via parametrization. """ is_pure_python: bool """A flag showing whether this is a pure-python module or a C-extension.""" @cached_property def tag(self) -> str: """Return a text representation of the pure-python attribute.""" return "pure-python" if self.is_pure_python else "c-extension" @cached_property def imported_module(self) -> ModuleType: """Return a loaded importable containing a multidict variant.""" importable_module = "_multidict_py" if self.is_pure_python else "_multidict" return import_module(f"multidict.{importable_module}") def __str__(self) -> str: """Render the implementation facade instance as a string.""" return f"{self.tag}-module" @pytest.fixture( scope="session", params=( pytest.param( MultidictImplementation(is_pure_python=False), marks=C_EXT_MARK, ), MultidictImplementation(is_pure_python=True), ), ids=str, ) def multidict_implementation(request: pytest.FixtureRequest) -> MultidictImplementation: """Return a multidict variant facade.""" return request.param # type: ignore[no-any-return] @pytest.fixture(scope="session") def multidict_module( multidict_implementation: MultidictImplementation, ) -> ModuleType: """Return a pre-imported module containing a multidict variant.""" return multidict_implementation.imported_module @pytest.fixture( scope="session", params=("MultiDict", "CIMultiDict"), ids=("case-sensitive", "case-insensitive"), ) def any_multidict_class_name(request: pytest.FixtureRequest) -> str: """Return a class name of a mutable multidict implementation.""" return request.param # type: ignore[no-any-return] @pytest.fixture(scope="session") def any_multidict_class( any_multidict_class_name: str, multidict_module: ModuleType, ) -> Type[MutableMultiMapping[str]]: """Return a class object of a mutable multidict implementation.""" return getattr(multidict_module, any_multidict_class_name) # type: ignore[no-any-return] @pytest.fixture(scope="session") def case_sensitive_multidict_class( multidict_module: ModuleType, ) -> Type[MultiDict[str]]: """Return a case-sensitive mutable multidict class.""" return multidict_module.MultiDict # type: ignore[no-any-return] @pytest.fixture(scope="session") def case_insensitive_multidict_class( multidict_module: ModuleType, ) -> Type[CIMultiDict[str]]: """Return a case-insensitive mutable multidict class.""" return multidict_module.CIMultiDict # type: ignore[no-any-return] @pytest.fixture(scope="session") def case_insensitive_str_class(multidict_module: ModuleType) -> Type[str]: """Return a case-insensitive string class.""" return multidict_module.istr # type: ignore[no-any-return] @pytest.fixture(scope="session") def any_multidict_proxy_class_name(any_multidict_class_name: str) -> str: """Return a class name of an immutable multidict implementation.""" return f"{any_multidict_class_name}Proxy" @pytest.fixture(scope="session") def any_multidict_proxy_class( any_multidict_proxy_class_name: str, multidict_module: ModuleType, ) -> Type[MultiMapping[str]]: """Return an immutable multidict implementation class object.""" return getattr(multidict_module, any_multidict_proxy_class_name) # type: ignore[no-any-return] @pytest.fixture(scope="session") def case_sensitive_multidict_proxy_class( multidict_module: ModuleType, ) -> Type[MutableMultiMapping[str]]: """Return a case-sensitive immutable multidict class.""" return multidict_module.MultiDictProxy # type: ignore[no-any-return] @pytest.fixture(scope="session") def case_insensitive_multidict_proxy_class( multidict_module: ModuleType, ) -> Type[MutableMultiMapping[str]]: """Return a case-insensitive immutable multidict class.""" return multidict_module.CIMultiDictProxy # type: ignore[no-any-return] @pytest.fixture(scope="session") def multidict_getversion_callable( multidict_module: ModuleType, ) -> Callable[[Union[MultiDict[object], MultiDictProxy[object]]], int]: """Return a ``getversion()`` function for current implementation.""" return multidict_module.getversion # type: ignore[no-any-return] def pytest_addoption( parser: pytest.Parser, pluginmanager: pytest.PytestPluginManager, ) -> None: """Define a new ``--c-extensions`` flag. This lets the callers deselect tests executed against the C-extension version of the ``multidict`` implementation. """ del pluginmanager parser.addoption( "--c-extensions", # disabled with `--no-c-extensions` action=argparse.BooleanOptionalAction, default=True, dest="c_extensions", help="Test C-extensions (on by default)", ) def pytest_collection_modifyitems( session: pytest.Session, config: pytest.Config, items: list[pytest.Item], ) -> None: """Deselect tests against C-extensions when requested via CLI.""" test_c_extensions = config.getoption("--c-extensions") is True if test_c_extensions: return selected_tests: list[pytest.Item] = [] deselected_tests: list[pytest.Item] = [] for item in items: c_ext = item.get_closest_marker(C_EXT_MARK.name) is not None target_items_list = deselected_tests if c_ext else selected_tests target_items_list.append(item) config.hook.pytest_deselected(items=deselected_tests) items[:] = selected_tests def pytest_configure(config: pytest.Config) -> None: """Declare the C-extension marker in config.""" config.addinivalue_line( "markers", f"{C_EXT_MARK.name}: tests running against the C-extension implementation.", ) def pytest_generate_tests(metafunc: pytest.Metafunc) -> None: if "pickle_protocol" in metafunc.fixturenames: metafunc.parametrize( "pickle_protocol", list(range(pickle.HIGHEST_PROTOCOL + 1)), scope="session" ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/gen_pickles.py0000644000175100001660000000232214776033000017123 0ustar00runnerdockerimport pickle from importlib import import_module from pathlib import Path from typing import Union from multidict import CIMultiDict, MultiDict, istr TESTS_DIR = Path(__file__).parent.resolve() _MD_Classes = Union[type[MultiDict[int]], type[CIMultiDict[int]]] def write(tag: str, cls: _MD_Classes, proto: int) -> None: d = cls([("a", 1), ("a", 2)]) file_basename = f"{cls.__name__.lower()}-{tag}" with (TESTS_DIR / f"{file_basename}.pickle.{proto}").open("wb") as f: pickle.dump(d, f, proto) def write_istr(tag: str, cls: type[istr], proto: int) -> None: s = cls("str") file_basename = f"{cls.__name__.lower()}-{tag}" with (TESTS_DIR / f"{file_basename}.pickle.{proto}").open("wb") as f: pickle.dump(s, f, proto) def generate() -> None: _impl_map = { "c-extension": "_multidict", "pure-python": "_multidict_py", } for proto in range(pickle.HIGHEST_PROTOCOL + 1): for tag, impl_name in _impl_map.items(): impl = import_module(f"multidict.{impl_name}") for cls in impl.CIMultiDict, impl.MultiDict: write(tag, cls, proto) write_istr(tag, impl.istr, proto) if __name__ == "__main__": generate() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744320002.8141801 multidict-6.4.3/tests/isolated/0000755000175100001660000000000014776033003016076 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/isolated/multidict_extend_dict.py0000644000175100001660000000111514776033000023013 0ustar00runnerdockerimport gc import sys from typing import Any import objgraph # type: ignore[import-untyped] from multidict import MultiDict class NoLeakDict(dict[str, Any]): """A subclassed dict to make it easier to test for leaks.""" def _run_isolated_case() -> None: md: MultiDict[str] = MultiDict() for _ in range(100): md.update(NoLeakDict()) del md gc.collect() leaked = len(objgraph.by_type("NoLeakDict")) print(f"{leaked} instances of NoLeakDict not collected by GC") sys.exit(1 if leaked else 0) if __name__ == "__main__": _run_isolated_case() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/isolated/multidict_extend_multidict.py0000644000175100001660000000071514776033000024073 0ustar00runnerdockerimport gc import sys import objgraph # type: ignore[import-untyped] from multidict import MultiDict def _run_isolated_case() -> None: md: MultiDict[str] = MultiDict() for _ in range(100): md.extend(MultiDict()) del md gc.collect() leaked = len(objgraph.by_type("MultiDict")) print(f"{leaked} instances of MultiDict not collected by GC") sys.exit(1 if leaked else 0) if __name__ == "__main__": _run_isolated_case() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/isolated/multidict_extend_tuple.py0000644000175100001660000000112714776033000023224 0ustar00runnerdockerimport gc import sys from typing import Any import objgraph # type: ignore[import-untyped] from multidict import MultiDict class NotLeakTuple(tuple[Any, ...]): """A subclassed tuple to make it easier to test for leaks.""" def _run_isolated_case() -> None: md: MultiDict[str] = MultiDict() for _ in range(100): md.extend(NotLeakTuple()) del md gc.collect() leaked = len(objgraph.by_type("NotLeakTuple")) print(f"{leaked} instances of NotLeakTuple not collected by GC") sys.exit(1 if leaked else 0) if __name__ == "__main__": _run_isolated_case() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/isolated/multidict_update_multidict.py0000644000175100001660000000071514776033000024066 0ustar00runnerdockerimport gc import sys import objgraph # type: ignore[import-untyped] from multidict import MultiDict def _run_isolated_case() -> None: md: MultiDict[str] = MultiDict() for _ in range(100): md.extend(MultiDict()) del md gc.collect() leaked = len(objgraph.by_type("MultiDict")) print(f"{leaked} instances of MultiDict not collected by GC") sys.exit(1 if leaked else 0) if __name__ == "__main__": _run_isolated_case() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/istr-c-extension.pickle.00000644000175100001660000000006014776033000021025 0ustar00runnerdockercmultidict._multidict istr p0 (Vstr p1 tp2 Rp3 .././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/istr-c-extension.pickle.10000644000175100001660000000005714776033000021034 0ustar00runnerdockercmultidict._multidict istr q(XstrqtqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/istr-c-extension.pickle.20000644000175100001660000000006014776033000021027 0ustar00runnerdockercmultidict._multidict istr qXstrqqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/istr-c-extension.pickle.30000644000175100001660000000006014776033000021030 0ustar00runnerdockercmultidict._multidict istr qXstrqqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/istr-c-extension.pickle.40000644000175100001660000000006614776033000021037 0ustar00runnerdocker+multidict._multidictistrstrR.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/istr-c-extension.pickle.50000644000175100001660000000006614776033000021040 0ustar00runnerdocker+multidict._multidictistrstrR.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/istr-pure-python.pickle.00000644000175100001660000000014714776033000021071 0ustar00runnerdockerccopy_reg _reconstructor p0 (cmultidict._multidict_py istr p1 c__builtin__ unicode p2 Vstr p3 tp4 Rp5 .././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/istr-pure-python.pickle.10000644000175100001660000000014414776033000021067 0ustar00runnerdockerccopy_reg _reconstructor q(cmultidict._multidict_py istr qc__builtin__ unicode qXstrqtqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/istr-pure-python.pickle.20000644000175100001660000000006314776033000021070 0ustar00runnerdockercmultidict._multidict_py istr qXstrqqq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/istr-pure-python.pickle.30000644000175100001660000000006314776033000021071 0ustar00runnerdockercmultidict._multidict_py istr qXstrqqq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/istr-pure-python.pickle.40000644000175100001660000000007114776033000021071 0ustar00runnerdocker.multidict._multidict_pyistrstr.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/istr-pure-python.pickle.50000644000175100001660000000007114776033000021072 0ustar00runnerdocker.multidict._multidict_pyistrstr.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/multidict-c-extension.pickle.00000644000175100001660000000011714776033000022045 0ustar00runnerdockercmultidict._multidict MultiDict p0 ((lp1 (Va p2 L1L tp3 a(g2 L2L tp4 atp5 Rp6 .././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/multidict-c-extension.pickle.10000644000175100001660000000010514776033000022043 0ustar00runnerdockercmultidict._multidict MultiDict q(]q((XaqKtq(hKtqetqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/multidict-c-extension.pickle.20000644000175100001660000000010414776033000022043 0ustar00runnerdockercmultidict._multidict MultiDict q]q(XaqKqhKqeqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/multidict-c-extension.pickle.30000644000175100001660000000010414776033000022044 0ustar00runnerdockercmultidict._multidict MultiDict q]q(XaqKqhKqeqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/multidict-c-extension.pickle.40000644000175100001660000000010714776033000022050 0ustar00runnerdocker<multidict._multidict MultiDict](aKhKeR.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/multidict-c-extension.pickle.50000644000175100001660000000010714776033000022051 0ustar00runnerdocker<multidict._multidict MultiDict](aKhKeR.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/multidict-pure-python.pickle.00000644000175100001660000000012214776033000022077 0ustar00runnerdockercmultidict._multidict_py MultiDict p0 ((lp1 (Va p2 L1L tp3 a(g2 L2L tp4 atp5 Rp6 .././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/multidict-pure-python.pickle.10000644000175100001660000000011014776033000022075 0ustar00runnerdockercmultidict._multidict_py MultiDict q(]q((XaqKtq(hKtqetqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/multidict-pure-python.pickle.20000644000175100001660000000010714776033000022104 0ustar00runnerdockercmultidict._multidict_py MultiDict q]q(XaqKqhKqeqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/multidict-pure-python.pickle.30000644000175100001660000000010714776033000022105 0ustar00runnerdockercmultidict._multidict_py MultiDict q]q(XaqKqhKqeqRq.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/multidict-pure-python.pickle.40000644000175100001660000000011214776033000022102 0ustar00runnerdocker?multidict._multidict_py MultiDict](aKhKeR.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/multidict-pure-python.pickle.50000644000175100001660000000011214776033000022103 0ustar00runnerdocker?multidict._multidict_py MultiDict](aKhKeR.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_abc.py0000644000175100001660000000166214776033000016432 0ustar00runnerdockerfrom collections.abc import Mapping, MutableMapping from multidict import ( MultiDict, MultiDictProxy, MultiMapping, MutableMultiMapping, ) def test_abc_inheritance() -> None: assert issubclass(MultiMapping, Mapping) assert not issubclass(MultiMapping, MutableMapping) assert issubclass(MutableMultiMapping, Mapping) assert issubclass(MutableMultiMapping, MutableMapping) def test_multidict_inheritance(any_multidict_class: type[MultiDict[str]]) -> None: assert issubclass(any_multidict_class, MultiMapping) assert issubclass(any_multidict_class, MutableMultiMapping) def test_proxy_inheritance( any_multidict_proxy_class: type[MultiDictProxy[str]], ) -> None: assert issubclass(any_multidict_proxy_class, MultiMapping) assert not issubclass(any_multidict_proxy_class, MutableMultiMapping) def test_generic_type_in_runtime() -> None: MultiMapping[str] MutableMultiMapping[str] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_circular_imports.py0000644000175100001660000000575714776033000021277 0ustar00runnerdocker"""Tests for circular imports in all local packages and modules. This ensures all internal packages can be imported right away without any need to import some other module before doing so. This module is based on the idea that pytest uses for self-testing: * https://github.com/sanitizers/octomachinery/blob/be18b54/tests/circular_imports_test.py # noqa: E501 * https://github.com/pytest-dev/pytest/blob/d18c75b/testing/test_meta.py * https://twitter.com/codewithanthony/status/1229445110510735361 """ from __future__ import annotations import os import pkgutil import subprocess import sys from itertools import chain from pathlib import Path from types import ModuleType from typing import Generator import pytest import multidict def _find_all_importables(pkg: ModuleType) -> list[str]: """Find all importables in the project. Return them in order. """ return sorted( set( chain.from_iterable( _discover_path_importables(Path(p), pkg.__name__) for p in pkg.__path__ ), ), ) def _discover_path_importables( pkg_pth: Path, pkg_name: str, ) -> Generator[str, None, None]: """Yield all importables under a given path and package.""" yield pkg_name for dir_path, _d, file_names in os.walk(pkg_pth): pkg_dir_path = Path(dir_path) if pkg_dir_path.parts[-1] == "__pycache__": continue rel_pt = pkg_dir_path.relative_to(pkg_pth) pkg_pref = ".".join((pkg_name,) + rel_pt.parts) yield from ( pkg_path for _, pkg_path, _ in pkgutil.walk_packages( (str(pkg_dir_path),), prefix=f"{pkg_pref}.", ) ) @pytest.fixture(params=_find_all_importables(multidict)) def import_path(request: pytest.FixtureRequest) -> str: """Return an importable from the multidict package.""" importable_module: str = request.param if importable_module == "multidict._multidict": request.applymarker(pytest.mark.c_extension) return importable_module def test_no_warnings(import_path: str) -> None: """Verify that importing modules and packages doesn't explode. This is seeking for any import errors including ones caused by circular imports. """ imp_cmd = ( # fmt: off sys.executable, "-I", "-W", "error", "-c", f"import {import_path!s}", # fmt: on ) subprocess.check_call(imp_cmd) @pytest.mark.c_extension def test_c_extension_preferred_by_default(monkeypatch: pytest.MonkeyPatch) -> None: """Verify that the C-extension is exposed by default.""" monkeypatch.delenv("MULTIDICT_NO_EXTENSIONS", raising=False) imp_cmd = ( # fmt: off sys.executable, "-I", "-W", "error", "-c", "import multidict; raise SystemExit(int(" "multidict.istr.__module__ != 'multidict._multidict' " "or multidict.USE_EXTENSIONS is not True))", # fmt: on ) subprocess.check_call(imp_cmd) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_copy.py0000644000175100001660000000223114776033000016650 0ustar00runnerdockerimport copy from typing import Union from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy _MD_Classes = Union[type[MultiDict[int]], type[CIMultiDict[int]]] _MDP_Classes = Union[type[MultiDictProxy[int]], type[CIMultiDictProxy[int]]] def test_copy(any_multidict_class: _MD_Classes) -> None: d = any_multidict_class() d["foo"] = 6 d2 = d.copy() d2["foo"] = 7 assert d["foo"] == 6 assert d2["foo"] == 7 def test_copy_proxy( any_multidict_class: _MD_Classes, any_multidict_proxy_class: _MDP_Classes ) -> None: d = any_multidict_class() d["foo"] = 6 p = any_multidict_proxy_class(d) d2 = p.copy() d2["foo"] = 7 assert d["foo"] == 6 assert p["foo"] == 6 assert d2["foo"] == 7 def test_copy_std_copy(any_multidict_class: _MD_Classes) -> None: d = any_multidict_class() d["foo"] = 6 d2 = copy.copy(d) d2["foo"] = 7 assert d["foo"] == 6 assert d2["foo"] == 7 def test_ci_multidict_clone(any_multidict_class: _MD_Classes) -> None: d = any_multidict_class(foo=6) d2 = any_multidict_class(d) d2["foo"] = 7 assert d["foo"] == 6 assert d2["foo"] == 7 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_guard.py0000644000175100001660000000141314776033000017001 0ustar00runnerdockerimport pytest from multidict import MultiDict def test_guard_items( case_sensitive_multidict_class: type[MultiDict[str]], ) -> None: md = case_sensitive_multidict_class({"a": "b"}) it = iter(md.items()) md["a"] = "c" with pytest.raises(RuntimeError): next(it) def test_guard_keys( case_sensitive_multidict_class: type[MultiDict[str]], ) -> None: md = case_sensitive_multidict_class({"a": "b"}) it = iter(md.keys()) md["a"] = "c" with pytest.raises(RuntimeError): next(it) def test_guard_values( case_sensitive_multidict_class: type[MultiDict[str]], ) -> None: md = case_sensitive_multidict_class({"a": "b"}) it = iter(md.values()) md["a"] = "c" with pytest.raises(RuntimeError): next(it) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_incorrect_args.py0000644000175100001660000000653714776033000020717 0ustar00runnerdocker"""Test passing invalid arguments to the methods of the MultiDict class.""" from dataclasses import dataclass from typing import cast import pytest from multidict import MultiDict @dataclass(frozen=True) class InvalidTestedMethodArgs: """A set of arguments passed to methods under test.""" test_id: str positional: tuple[object, ...] keyword: dict[str, object] def __str__(self) -> str: """Render a test identifier as a string.""" return self.test_id @pytest.fixture( scope="module", params=( InvalidTestedMethodArgs("no_args", (), {}), InvalidTestedMethodArgs("too_many_args", ("a", "b", "c"), {}), InvalidTestedMethodArgs("wrong_kwarg", (), {"wrong": 1}), InvalidTestedMethodArgs( "wrong_kwarg_and_too_many_args", ("a",), {"wrong": 1}, ), ), ids=str, ) def tested_method_args( request: pytest.FixtureRequest, ) -> InvalidTestedMethodArgs: """Return an instance of a parameter set.""" return cast(InvalidTestedMethodArgs, request.param) @pytest.fixture(scope="module") def multidict_object( any_multidict_class: type[MultiDict[int]], ) -> MultiDict[int]: return any_multidict_class([("a", 1), ("a", 2)]) def test_getall_args( multidict_object: MultiDict[int], tested_method_args: InvalidTestedMethodArgs, ) -> None: with pytest.raises(TypeError, match=r".*argument.*"): multidict_object.getall( *tested_method_args.positional, **tested_method_args.keyword, ) def test_getone_args( multidict_object: MultiDict[int], tested_method_args: InvalidTestedMethodArgs, ) -> None: with pytest.raises(TypeError, match=r".*argument.*"): multidict_object.getone( *tested_method_args.positional, **tested_method_args.keyword, ) def test_get_args( multidict_object: MultiDict[int], tested_method_args: InvalidTestedMethodArgs, ) -> None: with pytest.raises(TypeError, match=r".*argument.*"): multidict_object.get( *tested_method_args.positional, **tested_method_args.keyword, ) def test_setdefault_args( multidict_object: MultiDict[int], tested_method_args: InvalidTestedMethodArgs, ) -> None: with pytest.raises(TypeError, match=r".*argument.*"): multidict_object.setdefault( *tested_method_args.positional, **tested_method_args.keyword, ) def test_popone_args( multidict_object: MultiDict[int], tested_method_args: InvalidTestedMethodArgs, ) -> None: with pytest.raises(TypeError, match=r".*argument.*"): multidict_object.popone( *tested_method_args.positional, **tested_method_args.keyword, ) def test_pop_args( multidict_object: MultiDict[int], tested_method_args: InvalidTestedMethodArgs, ) -> None: with pytest.raises(TypeError, match=r".*argument.*"): multidict_object.pop( *tested_method_args.positional, **tested_method_args.keyword, ) def test_popall_args( multidict_object: MultiDict[int], tested_method_args: InvalidTestedMethodArgs, ) -> None: with pytest.raises(TypeError, match=r".*argument.*"): multidict_object.popall( *tested_method_args.positional, **tested_method_args.keyword, ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_istr.py0000644000175100001660000000406414776033000016665 0ustar00runnerdockerimport gc import sys from typing import Callable, Type import pytest IMPLEMENTATION = getattr(sys, "implementation") # to suppress mypy error GIL_ENABLED = getattr(sys, "_is_gil_enabled", lambda: True)() def test_ctor(case_insensitive_str_class: Type[str]) -> None: s = case_insensitive_str_class() assert "" == s def test_ctor_str(case_insensitive_str_class: Type[str]) -> None: s = case_insensitive_str_class("aBcD") assert "aBcD" == s def test_ctor_istr(case_insensitive_str_class: Type[str]) -> None: s = case_insensitive_str_class("A") s2 = case_insensitive_str_class(s) assert "A" == s assert s == s2 def test_ctor_buffer(case_insensitive_str_class: Type[str]) -> None: s = case_insensitive_str_class(b"aBc") assert "b'aBc'" == s def test_ctor_repr(case_insensitive_str_class: Type[str]) -> None: s = case_insensitive_str_class(None) assert "None" == s def test_str(case_insensitive_str_class: Type[str]) -> None: s = case_insensitive_str_class("aBcD") s1 = str(s) assert s1 == "aBcD" assert type(s1) is str def test_eq(case_insensitive_str_class: Type[str]) -> None: s1 = "Abc" s2 = case_insensitive_str_class(s1) assert s1 == s2 @pytest.fixture def create_istrs(case_insensitive_str_class: Type[str]) -> Callable[[], None]: """Make a callable populating memory with a few ``istr`` objects.""" def _create_strs() -> None: case_insensitive_str_class("foobarbaz") istr2 = case_insensitive_str_class() case_insensitive_str_class(istr2) return _create_strs @pytest.mark.skipif( IMPLEMENTATION.name != "cpython", reason="PyPy has different GC implementation", ) @pytest.mark.skipif( not GIL_ENABLED, reason="free threading has different GC implementation", ) def test_leak(create_istrs: Callable[[], None]) -> None: gc.collect() cnt = len(gc.get_objects()) for _ in range(10000): create_istrs() gc.collect() cnt2 = len(gc.get_objects()) assert abs(cnt - cnt2) < 10 # on other GC impls these numbers are not equal ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_leaks.py0000644000175100001660000000142014776033000016774 0ustar00runnerdockerimport pathlib import platform import subprocess import sys import pytest IS_PYPY = platform.python_implementation() == "PyPy" @pytest.mark.parametrize( ("script"), ( "multidict_extend_dict.py", "multidict_extend_multidict.py", "multidict_extend_tuple.py", "multidict_update_multidict.py", ), ) @pytest.mark.leaks @pytest.mark.skipif(IS_PYPY, reason="leak testing is not supported on PyPy") def test_leak(script: str) -> None: """Run isolated leak test script and check for leaks.""" leak_test_script = pathlib.Path(__file__).parent.joinpath("isolated", script) subprocess.run( [sys.executable, "-u", str(leak_test_script)], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True, ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_multidict.py0000644000175100001660000012402014776033000017675 0ustar00runnerdockerfrom __future__ import annotations import gc import operator import platform import sys import weakref from collections import deque from collections.abc import Callable, Iterable, Iterator, KeysView, Mapping from types import ModuleType from typing import TypeVar, Union, cast import pytest import multidict from multidict import ( CIMultiDict, MultiDict, MultiDictProxy, MultiMapping, MutableMultiMapping, istr, ) _T = TypeVar("_T") IS_PYPY = platform.python_implementation() == "PyPy" def chained_callable( module: ModuleType, callables: Iterable[str], ) -> Callable[..., MultiMapping[int | str] | MutableMultiMapping[int | str]]: """ Return callable that will get and call all given objects in module in exact order. """ def chained_call( *args: object, **kwargs: object, ) -> MultiMapping[int | str] | MutableMultiMapping[int | str]: callable_chain = (getattr(module, name) for name in callables) first_callable = next(callable_chain) value = first_callable(*args, **kwargs) for element in callable_chain: value = element(value) return cast( Union[ MultiMapping[Union[int, str]], MutableMultiMapping[Union[int, str]], ], value, ) return chained_call @pytest.fixture def cls( # type: ignore[misc] request: pytest.FixtureRequest, multidict_module: ModuleType, ) -> Callable[..., MultiMapping[int | str] | MutableMultiMapping[int | str]]: """Make a callable from multidict module, requested by name.""" return chained_callable(multidict_module, request.param) def test_exposed_names(any_multidict_class_name: str) -> None: assert any_multidict_class_name in multidict.__all__ @pytest.mark.parametrize( ("cls", "key_cls"), ( (("MultiDict",), str), ( ("MultiDict", "MultiDictProxy"), str, ), ), indirect=["cls"], ) def test__iter__types( cls: type[MultiDict[Union[str, int]]], key_cls: type[str], ) -> None: d = cls([("key", "one"), ("key2", "two"), ("key", 3)]) for i in d: assert type(i) is key_cls, (type(i), key_cls) def test_proxy_copy( any_multidict_class: type[MultiDict[str]], any_multidict_proxy_class: type[MultiDictProxy[str]], ) -> None: d1 = any_multidict_class(key="value", a="b") p1 = any_multidict_proxy_class(d1) d2 = p1.copy() assert d1 == d2 assert d1 is not d2 def test_multidict_subclassing( any_multidict_class: type[MultiDict[str]], ) -> None: class DummyMultidict(any_multidict_class): # type: ignore[valid-type,misc] pass def test_multidict_proxy_subclassing( any_multidict_proxy_class: type[MultiDictProxy[str]], ) -> None: class DummyMultidictProxy( any_multidict_proxy_class, # type: ignore[valid-type,misc] ): pass class BaseMultiDictTest: def test_instantiate__empty(self, cls: type[MutableMultiMapping[str]]) -> None: d = cls() empty: Mapping[str, str] = {} assert d == empty assert len(d) == 0 assert list(d.keys()) == [] assert list(d.values()) == [] assert list(d.items()) == [] assert cls() != list() # type: ignore[comparison-overlap] with pytest.raises(TypeError, match=r"3 were given"): cls(("key1", "value1"), ("key2", "value2")) # type: ignore[call-arg] # noqa: E501 @pytest.mark.parametrize("arg0", ([("key", "value1")], {"key": "value1"})) def test_instantiate__from_arg0( self, cls: type[MultiDict[str]], arg0: Union[list[tuple[str, str]], dict[str, str]], ) -> None: d = cls(arg0) assert d == {"key": "value1"} assert len(d) == 1 assert list(d.keys()) == ["key"] assert list(d.values()) == ["value1"] assert list(d.items()) == [("key", "value1")] def test_instantiate__with_kwargs( self, cls: type[MultiDict[str]], ) -> None: d = cls([("key", "value1")], key2="value2") assert d == {"key": "value1", "key2": "value2"} assert len(d) == 2 assert sorted(d.keys()) == ["key", "key2"] assert sorted(d.values()) == ["value1", "value2"] assert sorted(d.items()) == [("key", "value1"), ("key2", "value2")] def test_instantiate__from_generator( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]] ) -> None: d = cls((str(i), i) for i in range(2)) assert d == {"0": 0, "1": 1} assert len(d) == 2 assert sorted(d.keys()) == ["0", "1"] assert sorted(d.values()) == [0, 1] assert sorted(d.items()) == [("0", 0), ("1", 1)] def test_instantiate__from_list_of_lists( self, cls: type[MutableMultiMapping[str]], ) -> None: # Should work at runtime, but won't type check. d = cls([["key", "value1"]]) # type: ignore[call-arg] assert d == {"key": "value1"} def test_instantiate__from_list_of_custom_pairs( self, cls: type[MultiDict[str]], ) -> None: class Pair: def __len__(self) -> int: return 2 def __getitem__(self, pos: int) -> str: return ("key", "value1")[pos] # Works at runtime, but won't type check. d = cls([Pair()]) # type: ignore[list-item] assert d == {"key": "value1"} def test_getone(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")], key="value2") assert d.getone("key") == "value1" assert d.get("key") == "value1" assert d["key"] == "value1" with pytest.raises(KeyError, match="key2"): d["key2"] with pytest.raises(KeyError, match="key2"): d.getone("key2") assert d.getone("key2", "default") == "default" def test_call_with_kwargs(self, cls: type[MultiDict[str]]) -> None: d = cls([("present", "value")]) assert d.getall(default="missing", key="notfound") == "missing" def test__iter__( self, cls: Union[ type[MultiDict[Union[str, int]]], type[CIMultiDict[Union[str, int]]], ], ) -> None: d = cls([("key", "one"), ("key2", "two"), ("key", 3)]) assert list(d) == ["key", "key2", "key"] def test__contains( self, cls: Union[ type[MultiDict[Union[str, int]]], type[CIMultiDict[Union[str, int]]], ], ) -> None: d = cls([("key", "one"), ("key2", "two"), ("key", 3)]) assert list(d) == ["key", "key2", "key"] assert "key" in d assert "key2" in d assert "foo" not in d assert 42 not in d # type: ignore[comparison-overlap] def test_keys__contains( self, cls: Union[ type[MultiDict[Union[str, int]]], type[CIMultiDict[Union[str, int]]], ], ) -> None: d = cls([("key", "one"), ("key2", "two"), ("key", 3)]) assert list(d.keys()) == ["key", "key2", "key"] assert "key" in d.keys() assert "key2" in d.keys() assert "foo" not in d.keys() assert 42 not in d.keys() # type: ignore[comparison-overlap] def test_values__contains( self, cls: Union[ type[MultiDict[Union[str, int]]], type[CIMultiDict[Union[str, int]]], ], ) -> None: d = cls([("key", "one"), ("key", "two"), ("key", 3)]) assert list(d.values()) == ["one", "two", 3] assert "one" in d.values() assert "two" in d.values() assert 3 in d.values() assert "foo" not in d.values() def test_items__contains( self, cls: Union[ type[MultiDict[Union[str, int]]], type[CIMultiDict[Union[str, int]]], ], ) -> None: d = cls([("key", "one"), ("key", "two"), ("key", 3)]) assert list(d.items()) == [("key", "one"), ("key", "two"), ("key", 3)] assert ("key", "one") in d.items() assert ("key", "two") in d.items() assert ("key", 3) in d.items() assert ("foo", "bar") not in d.items() assert (42, 3) not in d.items() # type: ignore[comparison-overlap] assert 42 not in d.items() # type: ignore[comparison-overlap] def test_cannot_create_from_unaccepted( self, cls: type[MutableMultiMapping[str]], ) -> None: with pytest.raises(ValueError, match="multidict update sequence element"): cls([(1, 2, 3)]) # type: ignore[call-arg] def test_keys_is_set_less(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert d.keys() < {"key", "key2"} @pytest.mark.parametrize( ("contents", "expected"), ( ([("key", "value1")], True), ([("key", "value1"), ("key2", "value2")], True), ([("key", "value1"), ("key2", "value2"), ("key3", "value3")], False), ([("key", "value1"), ("key3", "value3")], False), ), ) def test_keys_is_set_less_equal( self, cls: type[MultiDict[str]], contents: list[tuple[str, str]], expected: bool, ) -> None: d = cls(contents) result = d.keys() <= {"key", "key2"} assert result is expected def test_keys_is_set_equal(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert d.keys() == {"key"} def test_items_is_set_equal(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert d.items() == {("key", "value1")} def test_keys_is_set_greater(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert d.keys() > {"key"} @pytest.mark.parametrize( ("set_", "expected"), ( ({"key"}, True), ({"key", "key2"}, True), ({"key", "key2", "key3"}, False), ({"key3"}, False), ), ) def test_keys_is_set_greater_equal( self, cls: type[MultiDict[str]], set_: set[str], expected: bool ) -> None: d = cls([("key", "value1"), ("key2", "value2")]) result = d.keys() >= set_ assert result is expected def test_keys_less_than_not_implemented(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) sentinel_operation_result = object() class RightOperand: def __gt__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert (d.keys() < RightOperand()) is sentinel_operation_result def test_keys_less_than_or_equal_not_implemented( self, cls: type[MultiDict[str]] ) -> None: d = cls([("key", "value1")]) sentinel_operation_result = object() class RightOperand: def __ge__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert (d.keys() <= RightOperand()) is sentinel_operation_result def test_keys_greater_than_not_implemented(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) sentinel_operation_result = object() class RightOperand: def __lt__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert (d.keys() > RightOperand()) is sentinel_operation_result def test_keys_greater_than_or_equal_not_implemented( self, cls: type[MultiDict[str]] ) -> None: d = cls([("key", "value1")]) sentinel_operation_result = object() class RightOperand: def __le__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert (d.keys() >= RightOperand()) is sentinel_operation_result def test_keys_is_set_not_equal(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert d.keys() != {"key2"} def test_keys_not_equal_unrelated_type(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert d.keys() != "other" # type: ignore[comparison-overlap] def test_eq(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key": "value1"} == d def test_eq2(self, cls: type[MultiDict[str]]) -> None: d1 = cls([("key", "value1")]) d2 = cls([("key2", "value1")]) assert d1 != d2 def test_eq3(self, cls: type[MultiDict[str]]) -> None: d1 = cls([("key", "value1")]) d2 = cls() assert d1 != d2 def test_eq_other_mapping_contains_more_keys( self, cls: type[MultiDict[str]], ) -> None: d1 = cls(foo="bar") d2 = dict(foo="bar", bar="baz") assert d1 != d2 def test_eq_bad_mapping_len( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]] ) -> None: class BadMapping(Mapping[str, int]): def __getitem__(self, key: str) -> int: return 1 # pragma: no cover # `len()` fails earlier def __iter__(self) -> Iterator[str]: yield "a" # pragma: no cover # `len()` fails earlier def __len__(self) -> int: return 1 // 0 d1 = cls(a=1) d2 = BadMapping() with pytest.raises(ZeroDivisionError): d1 == d2 def test_eq_bad_mapping_getitem( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: class BadMapping(Mapping[str, int]): def __getitem__(self, key: str) -> int: return 1 // 0 def __iter__(self) -> Iterator[str]: yield "a" # pragma: no cover # foreign objects no iterated def __len__(self) -> int: return 1 d1 = cls(a=1) d2 = BadMapping() with pytest.raises(ZeroDivisionError): d1 == d2 def test_ne(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert d != {"key": "another_value"} def test_and(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key"} == d.keys() & {"key", "key2"} def test_and2(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key"} == {"key", "key2"} & d.keys() def test_bitwise_and_not_implemented(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) sentinel_operation_result = object() class RightOperand: def __rand__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert d.keys() & RightOperand() is sentinel_operation_result def test_bitwise_and_iterable_not_set(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key"} == d.keys() & ["key", "key2"] def test_or(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key", "key2"} == d.keys() | {"key2"} def test_or2(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key", "key2"} == {"key2"} | d.keys() def test_bitwise_or_not_implemented(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) sentinel_operation_result = object() class RightOperand: def __ror__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert d.keys() | RightOperand() is sentinel_operation_result def test_bitwise_or_iterable_not_set(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")]) assert {"key", "key2"} == d.keys() | ["key2"] def test_sub(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert {"key"} == d.keys() - {"key2"} def test_sub2(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert {"key3"} == {"key", "key2", "key3"} - d.keys() def test_sub_not_implemented(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) sentinel_operation_result = object() class RightOperand: def __rsub__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert d.keys() - RightOperand() is sentinel_operation_result def test_sub_iterable_not_set(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert {"key"} == d.keys() - ["key2"] def test_xor(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert {"key", "key3"} == d.keys() ^ {"key2", "key3"} def test_xor2(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert {"key", "key3"} == {"key2", "key3"} ^ d.keys() def test_xor_not_implemented(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) sentinel_operation_result = object() class RightOperand: def __rxor__(self, other: KeysView[str]) -> object: assert isinstance(other, KeysView) return sentinel_operation_result assert d.keys() ^ RightOperand() is sentinel_operation_result def test_xor_iterable_not_set(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1"), ("key2", "value2")]) assert {"key", "key3"} == d.keys() ^ ["key2", "key3"] @pytest.mark.parametrize( ("key", "value", "expected"), (("key2", "v", True), ("key", "value1", False)), ) def test_isdisjoint( self, cls: type[MultiDict[str]], key: str, value: str, expected: bool ) -> None: d = cls([("key", "value1")]) assert d.items().isdisjoint({(key, value)}) is expected assert d.keys().isdisjoint({key}) is expected def test_repr_aiohttp_issue_410(self, cls: type[MutableMultiMapping[str]]) -> None: d = cls() try: raise Exception pytest.fail("Should never happen") # pragma: no cover except Exception as e: repr(d) assert sys.exc_info()[1] == e # noqa: PT017 @pytest.mark.parametrize( "op", (operator.or_, operator.and_, operator.sub, operator.xor), ) @pytest.mark.parametrize("other", ({"other"},)) def test_op_issue_aiohttp_issue_410( self, cls: type[MultiDict[str]], op: Callable[[object, object], object], other: set[str], ) -> None: d = cls([("key", "value")]) try: raise Exception pytest.fail("Should never happen") # pragma: no cover except Exception as e: op(d.keys(), other) assert sys.exc_info()[1] == e # noqa: PT017 def test_weakref(self, cls: type[MutableMultiMapping[str]]) -> None: called = False def cb(wr: object) -> None: nonlocal called called = True d = cls() wr = weakref.ref(d, cb) del d gc.collect() assert called del wr def test_iter_length_hint_keys( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: md = cls(a=1, b=2) it = iter(md.keys()) assert it.__length_hint__() == 2 # type: ignore[attr-defined] def test_iter_length_hint_items( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: md = cls(a=1, b=2) it = iter(md.items()) assert it.__length_hint__() == 2 # type: ignore[attr-defined] def test_iter_length_hint_values( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: md = cls(a=1, b=2) it = iter(md.values()) assert it.__length_hint__() == 2 def test_ctor_list_arg_and_kwds( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: arg = [("a", 1)] obj = cls(arg, b=2) assert list(obj.items()) == [("a", 1), ("b", 2)] assert arg == [("a", 1)] def test_ctor_tuple_arg_and_kwds( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: arg = (("a", 1),) obj = cls(arg, b=2) assert list(obj.items()) == [("a", 1), ("b", 2)] assert arg == (("a", 1),) def test_ctor_deque_arg_and_kwds( self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]], ) -> None: arg = deque([("a", 1)]) obj = cls(arg, b=2) assert list(obj.items()) == [("a", 1), ("b", 2)] assert arg == deque([("a", 1)]) class TestMultiDict(BaseMultiDictTest): @pytest.fixture( params=[ ("MultiDict",), ("MultiDict", "MultiDictProxy"), ], ) def cls( # type: ignore[misc] self, request: pytest.FixtureRequest, multidict_module: ModuleType, ) -> Callable[..., MultiMapping[int | str] | MutableMultiMapping[int | str]]: """Make a case-sensitive multidict class/proxy constructor.""" return chained_callable(multidict_module, request.param) def test__repr__(self, cls: type[MultiDict[str]]) -> None: d = cls() _cls = type(d) assert str(d) == "<%s()>" % _cls.__name__ d = cls([("key", "one"), ("key", "two")]) assert str(d) == "<%s('key': 'one', 'key': 'two')>" % _cls.__name__ def test__repr___recursive( self, any_multidict_class: type[MultiDict[object]] ) -> None: d = any_multidict_class() _cls = type(d) d = any_multidict_class() d["key"] = d assert str(d) == "<%s('key': ...)>" % _cls.__name__ def test_getall(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")], key="value2") assert d != {"key": "value1"} assert len(d) == 2 assert d.getall("key") == ["value1", "value2"] with pytest.raises(KeyError, match="some_key"): d.getall("some_key") default = object() assert d.getall("some_key", default) is default def test_preserve_stable_ordering( self, cls: type[MultiDict[Union[str, int]]], ) -> None: d = cls([("a", 1), ("b", "2"), ("a", 3)]) s = "&".join("{}={}".format(k, v) for k, v in d.items()) assert s == "a=1&b=2&a=3" def test_get(self, cls: type[MultiDict[int]]) -> None: d = cls([("a", 1), ("a", 2)]) assert d["a"] == 1 def test_items__repr__(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")], key="value2") expected = "<_ItemsView('key': 'value1', 'key': 'value2')>" assert repr(d.items()) == expected def test_items__repr__recursive( self, any_multidict_class: type[MultiDict[object]] ) -> None: d = any_multidict_class() d["key"] = d.items() expected = "<_ItemsView('key': <_ItemsView('key': ...)>)>" assert repr(d.items()) == expected def test_keys__repr__(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")], key="value2") assert repr(d.keys()) == "<_KeysView('key', 'key')>" def test_values__repr__(self, cls: type[MultiDict[str]]) -> None: d = cls([("key", "value1")], key="value2") assert repr(d.values()) == "<_ValuesView('value1', 'value2')>" def test_values__repr__recursive( self, any_multidict_class: type[MultiDict[object]] ) -> None: d = any_multidict_class() d["key"] = d.values() assert repr(d.values()) == "<_ValuesView(<_ValuesView(...)>)>" class TestCIMultiDict(BaseMultiDictTest): @pytest.fixture( params=[ ("CIMultiDict",), ("CIMultiDict", "CIMultiDictProxy"), ], ) def cls( # type: ignore[misc] self, request: pytest.FixtureRequest, multidict_module: ModuleType, ) -> Callable[..., MultiMapping[int | str] | MutableMultiMapping[int | str]]: """Make a case-insensitive multidict class/proxy constructor.""" return chained_callable(multidict_module, request.param) def test_basics(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], KEY="value2") assert d.getone("key") == "value1" assert d.get("key") == "value1" assert d.get("key2", "val") == "val" assert d["key"] == "value1" assert "key" in d with pytest.raises(KeyError, match="key2"): d["key2"] with pytest.raises(KeyError, match="key2"): d.getone("key2") def test_from_md_and_kwds(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")]) d2 = cls(d, KEY="value2") assert list(d2.items()) == [("KEY", "value1"), ("KEY", "value2")] def test_getall(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], KEY="value2") assert not d == {"KEY": "value1"} assert len(d) == 2 assert d.getall("key") == ["value1", "value2"] with pytest.raises(KeyError, match="some_key"): d.getall("some_key") def test_get(self, cls: type[CIMultiDict[int]]) -> None: d = cls([("A", 1), ("a", 2)]) assert 1 == d["a"] def test__repr__(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], key="value2") _cls = type(d) expected = "<%s('KEY': 'value1', 'key': 'value2')>" % _cls.__name__ assert str(d) == expected def test_items__repr__(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], key="value2") expected = "<_ItemsView('KEY': 'value1', 'key': 'value2')>" assert repr(d.items()) == expected def test_keys__repr__(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], key="value2") assert repr(d.keys()) == "<_KeysView('KEY', 'key')>" def test_values__repr__(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], key="value2") assert repr(d.values()) == "<_ValuesView('value1', 'value2')>" def test_items_iter_of_iter(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], key="value2") it = iter(d.items()) assert iter(it) is it def test_keys_iter_of_iter(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], key="value2") it = iter(d.keys()) assert iter(it) is it def test_values_iter_of_iter(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "value1")], key="value2") it = iter(d.values()) assert iter(it) is it @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param({"key"}, {"KEY"}, id="ok"), pytest.param({"key", 123}, {"KEY"}, id="non-str"), ), ) def test_keys_case_insensitive_and( self, cls: type[CIMultiDict[str]], arg: set[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one")]) assert d.keys() & arg == expected @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param(["key"], {"key"}, id="ok"), pytest.param(["key", 123], {"key"}, id="non-str"), ), ) def test_keys_case_insensitive_rand( self, cls: type[CIMultiDict[str]], arg: list[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one")]) assert type(arg) is list assert arg & d.keys() == expected @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param({"key", "other"}, {"KEY", "other"}, id="ok"), pytest.param({"key", "other", 123}, {"KEY", "other", 123}, id="non-str"), ), ) def test_keys_case_insensitive_or( self, cls: type[CIMultiDict[str]], arg: set[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one")]) assert d.keys() | arg == expected @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param(["key", "other"], {"key", "other"}, id="ok"), pytest.param(["key", "other", 123], {"key", "other", 123}, id="non-str"), ), ) def test_keys_case_insensitive_ror( self, cls: type[CIMultiDict[str]], arg: list[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one")]) assert type(arg) is list assert arg | d.keys() == expected @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param({"key", "other"}, {"KEY2"}, id="ok"), pytest.param({"key", "other", 123}, {"KEY2"}, id="non-str"), ), ) def test_keys_case_insensitive_sub( self, cls: type[CIMultiDict[str]], arg: set[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one"), ("KEY2", "two")]) assert d.keys() - arg == expected @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param(["key", "other"], {"other"}, id="ok"), pytest.param(["key", "other", 123], {"other", 123}, id="non-str"), ), ) def test_keys_case_insensitive_rsub( self, cls: type[CIMultiDict[str]], arg: list[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one"), ("KEY2", "two")]) assert type(arg) is list assert arg - d.keys() == expected @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param(["key", "other"], {"KEY2", "other"}, id="ok"), pytest.param(["key", "other", 123], {"KEY2", "other", 123}, id="non-str"), ), ) def test_keys_case_insensitive_xor( self, cls: type[CIMultiDict[str]], arg: list[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one"), ("KEY2", "two")]) assert d.keys() ^ arg == expected @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param(["key", "other"], {"KEY2", "other"}, id="ok"), pytest.param(["key", "other", 123], {"KEY2", "other", 123}, id="non-str"), ), ) def test_keys_case_insensitive_rxor( self, cls: type[CIMultiDict[str]], arg: list[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one"), ("KEY2", "two")]) assert arg ^ d.keys() == expected @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param({"key"}, False, id="ok"), pytest.param({123}, True, id="non-str"), ), ) def test_keys_case_insensitive_isdisjoint( self, cls: type[CIMultiDict[str]], arg: set[_T], expected: bool ) -> None: d = cls([("KEY", "one")]) assert d.keys().isdisjoint(arg) == expected def test_keys_case_insensitive_not_iterable( self, cls: type[CIMultiDict[str]] ) -> None: d = cls([("KEY", "one"), ("KEY2", "two")]) with pytest.raises(TypeError): 123 & d.keys() # type: ignore[operator] with pytest.raises(TypeError): d.keys() & 123 # type: ignore[operator] with pytest.raises(TypeError): 123 | d.keys() # type: ignore[operator] with pytest.raises(TypeError): d.keys() | 123 # type: ignore[operator] with pytest.raises(TypeError): 123 ^ d.keys() # type: ignore[operator] with pytest.raises(TypeError): d.keys() ^ 123 # type: ignore[operator] with pytest.raises(TypeError): d.keys() - 123 # type: ignore[operator] with pytest.raises(TypeError): 123 - d.keys() # type: ignore[operator] @pytest.mark.parametrize( "param", ( pytest.param("non-tuple", id="not-tuple"), pytest.param(("key2", "two", "three"), id="not-2-elems"), pytest.param((123, "two"), id="not-str"), ), ) def test_items_case_insensitive_parse_item( self, cls: type[CIMultiDict[str]], param: _T ) -> None: d = cls([("KEY", "one")]) assert d.items() | {param} == {("KEY", "one"), param} @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param({("key", "one")}, {("KEY", "one")}, id="ok"), pytest.param( {("key", "one"), (123, "two")}, {("KEY", "one")}, id="non-str", ), pytest.param( {("key", "one"), ("key", "two")}, {("KEY", "one")}, id="nonequal-value", ), ), ) def test_items_case_insensitive_and( self, cls: type[CIMultiDict[str]], arg: set[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one")]) assert d.items() & arg == expected @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param([("key", "one")], {("key", "one")}, id="ok"), pytest.param( [("key", "one"), (123, "two")], {("key", "one")}, id="non-str", ), pytest.param( [("key", "one"), ("key", "two")], {("key", "one")}, id="nonequal-value", ), ), ) def test_items_case_insensitive_rand( self, cls: type[CIMultiDict[str]], arg: list[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one")]) assert type(arg) is list assert arg & d.items() == expected def test_items_case_insensitive_or(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "one")]) assert d.items() | {("key", "one"), ("other", "two")} == { ("KEY", "one"), ("other", "two"), } def test_items_case_insensitive_ror(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "one"), ("KEY2", "three")]) assert [("key", "one"), ("other", "two")] | d.items() == { ("key", "one"), ("other", "two"), ("KEY2", "three"), } @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param( {("key", "one"), ("other", "three")}, {("KEY2", "two")}, id="ok" ), pytest.param( {("key", "one"), (123, "three")}, {("KEY2", "two")}, id="non-str" ), ), ) def test_items_case_insensitive_sub( self, cls: type[CIMultiDict[str]], arg: set[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one"), ("KEY2", "two")]) assert d.items() - arg == expected @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param( [("key", "one"), ("other", "three")], {("other", "three")}, id="ok" ), pytest.param( [("key", "one"), (123, "three")], {(123, "three")}, id="non-str" ), ), ) def test_items_case_insensitive_rsub( self, cls: type[CIMultiDict[str]], arg: set[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one"), ("KEY2", "two")]) assert arg - d.items() == expected @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param( {("key", "one"), ("other", "three")}, {("KEY2", "two"), ("other", "three")}, id="ok", ), pytest.param( {("key", "one"), (123, "three")}, {("KEY2", "two"), (123, "three")}, id="non-str", ), ), ) def test_items_case_insensitive_xor( self, cls: type[CIMultiDict[str]], arg: set[_T], expected: set[_T] ) -> None: d = cls([("KEY", "one"), ("KEY2", "two")]) assert d.items() ^ arg == expected def test_items_case_insensitive_rxor(self, cls: type[CIMultiDict[str]]) -> None: d = cls([("KEY", "one"), ("KEY2", "two")]) assert [("key", "one"), ("other", "three")] ^ d.items() == { ("KEY2", "two"), ("other", "three"), } def test_items_case_insensitive_non_iterable( self, cls: type[CIMultiDict[str]] ) -> None: d = cls([("KEY", "one")]) with pytest.raises(TypeError): d.items() & None # type: ignore[operator] with pytest.raises(TypeError): None & d.items() # type: ignore[operator] with pytest.raises(TypeError): d.items() | None # type: ignore[operator] with pytest.raises(TypeError): None | d.items() # type: ignore[operator] with pytest.raises(TypeError): d.items() ^ None # type: ignore[operator] with pytest.raises(TypeError): None ^ d.items() # type: ignore[operator] with pytest.raises(TypeError): d.items() - None # type: ignore[operator] with pytest.raises(TypeError): None - d.items() # type: ignore[operator] @pytest.mark.parametrize( ("arg", "expected"), ( pytest.param({("key", "one")}, False, id="ok"), pytest.param({(123, "one")}, True, id="non-str"), ), ) def test_items_case_insensitive_isdisjoint( self, cls: type[CIMultiDict[str]], arg: set[_T], expected: bool ) -> None: d = cls([("KEY", "one")]) assert d.items().isdisjoint(arg) == expected def test_create_multidict_from_existing_multidict_new_pairs() -> None: """Test creating a MultiDict from an existing one does not mutate the original.""" original = MultiDict([("h1", "header1"), ("h2", "header2"), ("h3", "header3")]) new = MultiDict(original, h4="header4") assert "h4" in new assert "h4" not in original def test_convert_multidict_to_cimultidict_and_back( case_sensitive_multidict_class: type[MultiDict[str]], case_insensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_str_class: type[istr], ) -> None: """Test conversion from MultiDict to CIMultiDict.""" start_as_md = case_sensitive_multidict_class( [("KEY", "value1"), ("key2", "value2")] ) assert start_as_md.get("KEY") == "value1" assert start_as_md["KEY"] == "value1" assert start_as_md.get("key2") == "value2" assert start_as_md["key2"] == "value2" start_as_cimd = case_insensitive_multidict_class( [("KEY", "value1"), ("key2", "value2")] ) assert start_as_cimd.get("key") == "value1" assert start_as_cimd["key"] == "value1" assert start_as_cimd.get("key2") == "value2" assert start_as_cimd["key2"] == "value2" converted_to_ci = case_insensitive_multidict_class(start_as_md) assert converted_to_ci.get("key") == "value1" assert converted_to_ci["key"] == "value1" assert converted_to_ci.get("key2") == "value2" assert converted_to_ci["key2"] == "value2" converted_to_md = case_sensitive_multidict_class(converted_to_ci) assert all(type(k) is case_insensitive_str_class for k in converted_to_ci.keys()) assert converted_to_md.get("KEY") == "value1" assert converted_to_md["KEY"] == "value1" assert converted_to_md.get("key2") == "value2" assert converted_to_md["key2"] == "value2" def test_convert_multidict_to_cimultidict_eq( case_sensitive_multidict_class: type[MultiDict[str]], case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: """Test compare after conversion from MultiDict to CIMultiDict.""" original = case_sensitive_multidict_class( [("h1", "header1"), ("h2", "header2"), ("h3", "header3")] ) assert case_insensitive_multidict_class( original ) == case_insensitive_multidict_class( [("H1", "header1"), ("H2", "header2"), ("H3", "header3")] ) @pytest.mark.skipif(IS_PYPY, reason="getrefcount is not supported on PyPy") def test_extend_does_not_alter_refcount( case_sensitive_multidict_class: type[MultiDict[str]], ) -> None: """Test that extending a MultiDict with a MultiDict does not alter the refcount of the original.""" original = case_sensitive_multidict_class([("h1", "header1")]) new = case_sensitive_multidict_class([("h2", "header2")]) original_refcount = sys.getrefcount(original) new.extend(original) assert sys.getrefcount(original) == original_refcount @pytest.mark.skipif(IS_PYPY, reason="getrefcount is not supported on PyPy") def test_update_does_not_alter_refcount( case_sensitive_multidict_class: type[MultiDict[str]], ) -> None: """Test that updating a MultiDict with a MultiDict does not alter the refcount of the original.""" original = case_sensitive_multidict_class([("h1", "header1")]) new = case_sensitive_multidict_class([("h2", "header2")]) original_refcount = sys.getrefcount(original) new.update(original) assert sys.getrefcount(original) == original_refcount @pytest.mark.skipif(IS_PYPY, reason="getrefcount is not supported on PyPy") def test_init_does_not_alter_refcount( case_sensitive_multidict_class: type[MultiDict[str]], ) -> None: """Test that initializing a MultiDict with a MultiDict does not alter the refcount of the original.""" original = case_sensitive_multidict_class([("h1", "header1")]) original_refcount = sys.getrefcount(original) case_sensitive_multidict_class(original) assert sys.getrefcount(original) == original_refcount def test_subclassed_multidict( any_multidict_class: type[MultiDict[str]], ) -> None: """Test that subclassed MultiDicts work as expected.""" class SubclassedMultiDict(any_multidict_class): # type: ignore[valid-type, misc] """Subclassed MultiDict.""" d1 = SubclassedMultiDict([("key", "value1")]) d2 = SubclassedMultiDict([("key", "value2")]) d3 = SubclassedMultiDict([("key", "value1")]) assert d1 != d2 assert d1 == d3 assert d1 == SubclassedMultiDict([("key", "value1")]) assert d1 != SubclassedMultiDict([("key", "value2")]) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_multidict_benchmarks.py0000644000175100001660000003775014776033000022107 0ustar00runnerdocker"""codspeed benchmarks for multidict.""" from typing import Dict, Type, Union from pytest_codspeed import BenchmarkFixture from multidict import ( CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr, ) # Note that this benchmark should not be refactored to use pytest.mark.parametrize # since each benchmark name should be unique. _SENTINEL = object() def test_multidict_insert_str( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md = any_multidict_class() items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md[i] = i def test_cimultidict_insert_istr( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: md = case_insensitive_multidict_class() items = [istr(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md[i] = i def test_multidict_add_str( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: base_md = any_multidict_class() items = [str(i) for i in range(100)] @benchmark def _run() -> None: for _ in range(100): md = base_md.copy() for i in items: md.add(i, i) def test_cimultidict_add_istr( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: base_md = case_insensitive_multidict_class() items = [istr(i) for i in range(100)] @benchmark def _run() -> None: for j in range(100): md = base_md.copy() for i in items: md.add(i, i) def test_multidict_pop_str( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md_base = any_multidict_class((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: md = md_base.copy() for i in items: md.pop(i) def test_cimultidict_pop_istr( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: md_base = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100)] @benchmark def _run() -> None: md = md_base.copy() for i in items: md.pop(i) def test_multidict_popitem_str( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md_base = any_multidict_class((str(i), str(i)) for i in range(100)) @benchmark def _run() -> None: md = md_base.copy() for _ in range(100): md.popitem() def test_multidict_clear_str( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md = any_multidict_class((str(i), str(i)) for i in range(100)) @benchmark def _run() -> None: md.clear() def test_multidict_update_str( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md = any_multidict_class((str(i), str(i)) for i in range(100)) items = {str(i): str(i) for i in range(100, 200)} @benchmark def _run() -> None: md.update(items) def test_cimultidict_update_istr( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: md = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(100)) items: Dict[Union[str, istr], istr] = {istr(i): istr(i) for i in range(100, 200)} @benchmark def _run() -> None: md.update(items) def test_multidict_update_str_with_kwargs( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md = any_multidict_class((str(i), str(i)) for i in range(100)) items = {str(i): str(i) for i in range(100, 200)} kwargs = {str(i): str(i) for i in range(200, 300)} @benchmark def _run() -> None: md.update(items, **kwargs) def test_cimultidict_update_istr_with_kwargs( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: md = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(100)) items: Dict[Union[str, istr], istr] = {istr(i): istr(i) for i in range(100, 200)} kwargs = {str(i): istr(i) for i in range(200, 300)} @benchmark def _run() -> None: md.update(items, **kwargs) def test_multidict_extend_str( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: base_md = any_multidict_class((str(i), str(i)) for i in range(100)) items = {str(i): str(i) for i in range(200)} @benchmark def _run() -> None: for j in range(100): md = base_md.copy() md.extend(items) def test_cimultidict_extend_istr( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: base_md = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(100)) items = {istr(i): istr(i) for i in range(200)} @benchmark def _run() -> None: for _ in range(100): md = base_md.copy() md.extend(items) def test_multidict_extend_str_with_kwargs( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: base_md = any_multidict_class((str(i), str(i)) for i in range(100)) items = {str(i): str(i) for i in range(200)} kwargs = {str(i): str(i) for i in range(200, 300)} @benchmark def _run() -> None: for j in range(100): md = base_md.copy() md.extend(items, **kwargs) def test_cimultidict_extend_istr_with_kwargs( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: base_md = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(100)) items = {istr(i): istr(i) for i in range(200)} kwargs = {str(i): istr(i) for i in range(200, 300)} @benchmark def _run() -> None: for _ in range(100): md = base_md.copy() md.extend(items, **kwargs) def test_multidict_delitem_str( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md_base = any_multidict_class((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: md = md_base.copy() for i in items: del md[i] def test_cimultidict_delitem_istr( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: md_base = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100)] @benchmark def _run() -> None: md = md_base.copy() for i in items: del md[i] def test_multidict_getall_str_hit( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md = any_multidict_class((f"key{j}", str(f"{i}-{j}")) for i in range(20) for j in range(5)) @benchmark def _run() -> None: md.getall("key0") def test_multidict_getall_str_miss( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md = any_multidict_class((f"key{j}", str(f"{i}-{j}")) for i in range(20) for j in range(5)) @benchmark def _run() -> None: md.getall("miss", ()) def test_cimultidict_getall_istr_hit( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: all_istr = istr("key0") md = case_insensitive_multidict_class((f"key{j}", istr(f"{i}-{j}")) for i in range(20) for j in range(5)) @benchmark def _run() -> None: md.getall(all_istr) def test_cimultidict_getall_istr_miss( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: miss_istr = istr("miss") md = case_insensitive_multidict_class((istr(f"key{j}"), istr(f"{i}-{j}")) for i in range(20) for j in range(5)) @benchmark def _run() -> None: md.getall(miss_istr, ()) def test_multidict_fetch( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md = any_multidict_class((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md[i] def test_cimultidict_fetch_istr( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: md = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md[i] def test_multidict_get_hit( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md = any_multidict_class((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md.get(i) def test_multidict_get_miss( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md = any_multidict_class((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100, 200)] @benchmark def _run() -> None: for i in items: md.get(i) def test_cimultidict_get_istr_hit( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: md = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md.get(i) def test_cimultidict_get_istr_miss( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: md = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100, 200)] @benchmark def _run() -> None: for i in items: md.get(i) def test_multidict_get_hit_with_default( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md = any_multidict_class((str(i), str(i)) for i in range(100)) items = [str(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md.get(i, _SENTINEL) def test_cimultidict_get_istr_hit_with_default( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: md = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100)] @benchmark def _run() -> None: for i in items: md.get(i, _SENTINEL) def test_cimultidict_get_istr_with_default_miss( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: md = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(100)) items = [istr(i) for i in range(100, 200)] @benchmark def _run() -> None: for i in items: md.get(i, _SENTINEL) def test_multidict_repr( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: items = [str(i) for i in range(100)] md = any_multidict_class([(i, i) for i in items]) @benchmark def _run() -> None: repr(md) def test_create_empty_multidict( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: @benchmark def _run() -> None: any_multidict_class() def test_create_multidict_with_items( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: items = [(str(i), str(i)) for i in range(100)] @benchmark def _run() -> None: any_multidict_class(items) def test_create_cimultidict_with_items_istr( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: items = [(istr(i), istr(i)) for i in range(100)] @benchmark def _run() -> None: case_insensitive_multidict_class(items) def test_create_multidict_with_dict( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: dct = {str(i): str(i) for i in range(100)} @benchmark def _run() -> None: any_multidict_class(dct) def test_create_cimultidict_with_dict_istr( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: dct = {istr(i): istr(i) for i in range(100)} @benchmark def _run() -> None: case_insensitive_multidict_class(dct) def test_create_multidict_with_items_with_kwargs( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: items = [(str(i), str(i)) for i in range(100)] kwargs = {str(i): str(i) for i in range(100)} @benchmark def _run() -> None: any_multidict_class(items, **kwargs) def test_create_cimultidict_with_items_istr_with_kwargs( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: items = [(istr(i), istr(i)) for i in range(100)] kwargs = {str(i): istr(i) for i in range(100)} @benchmark def _run() -> None: case_insensitive_multidict_class(items, **kwargs) def test_create_empty_multidictproxy(benchmark: BenchmarkFixture) -> None: md: MultiDict[str] = MultiDict() @benchmark def _run() -> None: MultiDictProxy(md) def test_create_multidictproxy(benchmark: BenchmarkFixture) -> None: items = [(str(i), str(i)) for i in range(100)] md: MultiDict[str] = MultiDict(items) @benchmark def _run() -> None: MultiDictProxy(md) def test_create_empty_cimultidictproxy( benchmark: BenchmarkFixture, ) -> None: md: CIMultiDict[istr] = CIMultiDict() @benchmark def _run() -> None: CIMultiDictProxy(md) def test_create_cimultidictproxy( benchmark: BenchmarkFixture, ) -> None: items = [(istr(i), istr(i)) for i in range(100)] md = CIMultiDict(items) @benchmark def _run() -> None: CIMultiDictProxy(md) def test_create_from_existing_cimultidict( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: existing = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(5)) @benchmark def _run() -> None: case_insensitive_multidict_class(existing) def test_copy_from_existing_cimultidict( benchmark: BenchmarkFixture, case_insensitive_multidict_class: Type[CIMultiDict[istr]], ) -> None: existing = case_insensitive_multidict_class((istr(i), istr(i)) for i in range(5)) @benchmark def _run() -> None: existing.copy() def test_iterate_multidict( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: items = [(str(i), str(i)) for i in range(100)] md = any_multidict_class(items) @benchmark def _run() -> None: for _ in md: pass def test_iterate_multidict_keys( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: items = [(str(i), str(i)) for i in range(100)] md = any_multidict_class(items) @benchmark def _run() -> None: for _ in md.keys(): pass def test_iterate_multidict_values( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: items = [(str(i), str(i)) for i in range(100)] md = any_multidict_class(items) @benchmark def _run() -> None: for _ in md.values(): pass def test_iterate_multidict_items( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: items = [(str(i), str(i)) for i in range(100)] md = any_multidict_class(items) @benchmark def _run() -> None: for _, _ in md.items(): pass ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_mutable_multidict.py0000644000175100001660000005151414776033000021415 0ustar00runnerdockerimport string import sys from typing import Union import pytest from multidict import CIMultiDict, CIMultiDictProxy, MultiDictProxy, istr class TestMutableMultiDict: def test_copy( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d1 = case_sensitive_multidict_class(key="value", a="b") d2 = d1.copy() assert d1 == d2 assert d1 is not d2 def test__repr__( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() assert str(d) == "<%s()>" % case_sensitive_multidict_class.__name__ d = case_sensitive_multidict_class([("key", "one"), ("key", "two")]) expected = ( f"<{case_sensitive_multidict_class.__name__}" "('key': 'one', 'key': 'two')>" ) assert str(d) == expected def test_getall( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class([("key", "value1")], key="value2") assert len(d) == 2 assert d.getall("key") == ["value1", "value2"] with pytest.raises(KeyError, match="some_key"): d.getall("some_key") default = object() assert d.getall("some_key", default) is default def test_add( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() assert d == {} d["key"] = "one" assert d == {"key": "one"} assert d.getall("key") == ["one"] d["key"] = "two" assert d == {"key": "two"} assert d.getall("key") == ["two"] d.add("key", "one") assert 2 == len(d) assert d.getall("key") == ["two", "one"] d.add("foo", "bar") assert 3 == len(d) assert d.getall("foo") == ["bar"] def test_extend( self, case_sensitive_multidict_class: type[CIMultiDict[Union[str, int]]], ) -> None: d = case_sensitive_multidict_class() assert d == {} d.extend([("key", "one"), ("key", "two")], key=3, foo="bar") assert d != {"key": "one", "foo": "bar"} assert 4 == len(d) itms = d.items() # we can't guarantee order of kwargs assert ("key", "one") in itms assert ("key", "two") in itms assert ("key", 3) in itms assert ("foo", "bar") in itms other = case_sensitive_multidict_class(bar="baz") assert other == {"bar": "baz"} d.extend(other) assert ("bar", "baz") in d.items() d.extend({"foo": "moo"}) assert ("foo", "moo") in d.items() d.extend() assert 6 == len(d) with pytest.raises(TypeError): d.extend("foo", "bar") # type: ignore[arg-type, call-arg] def test_extend_from_proxy( self, case_sensitive_multidict_class: type[CIMultiDict[str]], case_sensitive_multidict_proxy_class: type[MultiDictProxy[str]], ) -> None: d = case_sensitive_multidict_class([("a", "a"), ("b", "b")]) proxy = case_sensitive_multidict_proxy_class(d) d2 = case_sensitive_multidict_class() d2.extend(proxy) assert [("a", "a"), ("b", "b")] == list(d2.items()) def test_clear( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class([("key", "one")], key="two", foo="bar") d.clear() assert d == {} assert list(d.items()) == [] def test_del( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class([("key", "one"), ("key", "two")], foo="bar") assert list(d.keys()) == ["key", "key", "foo"] del d["key"] assert d == {"foo": "bar"} assert list(d.items()) == [("foo", "bar")] with pytest.raises(KeyError, match="key"): del d["key"] def test_set_default( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class([("key", "one"), ("key", "two")], foo="bar") assert "one" == d.setdefault("key", "three") assert "three" == d.setdefault("otherkey", "three") assert "otherkey" in d assert "three" == d["otherkey"] def test_popitem( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() d.add("key", "val1") d.add("key", "val2") assert ("key", "val2") == d.popitem() assert [("key", "val1")] == list(d.items()) def test_popitem_empty_multidict( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() with pytest.raises(KeyError): d.popitem() def test_pop( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() d.add("key", "val1") d.add("key", "val2") assert "val1" == d.pop("key") assert {"key": "val2"} == d def test_pop2( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() d.add("key", "val1") d.add("key2", "val2") d.add("key", "val3") assert "val1" == d.pop("key") assert [("key2", "val2"), ("key", "val3")] == list(d.items()) def test_pop_default( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class(other="val") assert "default" == d.pop("key", "default") assert "other" in d def test_pop_raises( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class(other="val") with pytest.raises(KeyError, match="key"): d.pop("key") assert "other" in d def test_replacement_order( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() d.add("key1", "val1") d.add("key2", "val2") d.add("key1", "val3") d.add("key2", "val4") d["key1"] = "val" expected = [("key1", "val"), ("key2", "val2"), ("key2", "val4")] assert expected == list(d.items()) def test_nonstr_key( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() with pytest.raises(TypeError): d[1] = "val" # type: ignore[index] def test_istr_key( self, case_sensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_str_class: type[str], ) -> None: d = case_sensitive_multidict_class() d[case_insensitive_str_class("1")] = "val" assert type(list(d.keys())[0]) is case_insensitive_str_class def test_str_derived_key( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: class A(str): pass d = case_sensitive_multidict_class() d[A("1")] = "val" assert type(list(d.keys())[0]) is A def test_istr_key_add( self, case_sensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_str_class: type[str], ) -> None: d = case_sensitive_multidict_class() d.add(case_insensitive_str_class("1"), "val") assert type(list(d.keys())[0]) is case_insensitive_str_class def test_str_derived_key_add( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: class A(str): pass d = case_sensitive_multidict_class() d.add(A("1"), "val") assert type(list(d.keys())[0]) is A def test_popall( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() d.add("key1", "val1") d.add("key2", "val2") d.add("key1", "val3") ret = d.popall("key1") assert ["val1", "val3"] == ret assert {"key2": "val2"} == d def test_popall_default( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() assert "val" == d.popall("key", "val") def test_popall_key_error( self, case_sensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_sensitive_multidict_class() with pytest.raises(KeyError, match="key"): d.popall("key") def test_large_multidict_resizing( self, case_sensitive_multidict_class: type[CIMultiDict[int]], ) -> None: SIZE = 1024 d = case_sensitive_multidict_class() for i in range(SIZE): d["key" + str(i)] = i for i in range(SIZE - 1): del d["key" + str(i)] assert {"key" + str(SIZE - 1): SIZE - 1} == d def test_update( self, case_sensitive_multidict_class: type[CIMultiDict[Union[str, int]]], ) -> None: d = case_sensitive_multidict_class() assert d == {} d.update([("key", "one"), ("key", "two")], key=3, foo="bar") assert d != {"key": "one", "foo": "bar"} assert 4 == len(d) itms = d.items() # we can't guarantee order of kwargs assert ("key", "one") in itms assert ("key", "two") in itms assert ("key", 3) in itms assert ("foo", "bar") in itms other = case_sensitive_multidict_class(bar="baz") assert other == {"bar": "baz"} d.update(other) assert ("bar", "baz") in d.items() d.update({"foo": "moo"}) assert ("foo", "moo") in d.items() d.update() assert 5 == len(d) with pytest.raises(TypeError): d.update("foo", "bar") # type: ignore[arg-type, call-arg] class TestCIMutableMultiDict: def test_getall( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class([("KEY", "value1")], KEY="value2") assert d != {"KEY": "value1"} assert len(d) == 2 assert d.getall("key") == ["value1", "value2"] with pytest.raises(KeyError, match="some_key"): d.getall("some_key") def test_ctor( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class(k1="v1") assert "v1" == d["K1"] assert ("k1", "v1") in d.items() def test_setitem( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() d["k1"] = "v1" assert "v1" == d["K1"] assert ("k1", "v1") in d.items() def test_delitem( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() d["k1"] = "v1" assert "K1" in d del d["k1"] assert "K1" not in d def test_copy( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d1 = case_insensitive_multidict_class(key="KEY", a="b") d2 = d1.copy() assert d1 == d2 assert d1.items() == d2.items() assert d1 is not d2 def test__repr__( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() assert str(d) == "<%s()>" % case_insensitive_multidict_class.__name__ d = case_insensitive_multidict_class([("KEY", "one"), ("KEY", "two")]) expected = ( f"<{case_insensitive_multidict_class.__name__}" "('KEY': 'one', 'KEY': 'two')>" ) assert str(d) == expected def test_add( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() assert d == {} d["KEY"] = "one" assert ("KEY", "one") in d.items() assert d == case_insensitive_multidict_class({"Key": "one"}) assert d.getall("key") == ["one"] d["KEY"] = "two" assert ("KEY", "two") in d.items() assert d == case_insensitive_multidict_class({"Key": "two"}) assert d.getall("key") == ["two"] d.add("KEY", "one") assert ("KEY", "one") in d.items() assert 2 == len(d) assert d.getall("key") == ["two", "one"] d.add("FOO", "bar") assert ("FOO", "bar") in d.items() assert 3 == len(d) assert d.getall("foo") == ["bar"] d.add(key="test", value="test") assert ("test", "test") in d.items() assert 4 == len(d) assert d.getall("test") == ["test"] def test_extend( self, case_insensitive_multidict_class: type[CIMultiDict[Union[str, int]]], ) -> None: d = case_insensitive_multidict_class() assert d == {} d.extend([("KEY", "one"), ("key", "two")], key=3, foo="bar") assert 4 == len(d) itms = d.items() # we can't guarantee order of kwargs assert ("KEY", "one") in itms assert ("key", "two") in itms assert ("key", 3) in itms assert ("foo", "bar") in itms other = case_insensitive_multidict_class(Bar="baz") assert other == {"Bar": "baz"} d.extend(other) assert ("Bar", "baz") in d.items() assert "bar" in d d.extend({"Foo": "moo"}) assert ("Foo", "moo") in d.items() assert "foo" in d d.extend() assert 6 == len(d) with pytest.raises(TypeError): d.extend("foo", "bar") # type: ignore[arg-type, call-arg] def test_extend_from_proxy( self, case_insensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_multidict_proxy_class: type[CIMultiDictProxy[str]], ) -> None: d = case_insensitive_multidict_class([("a", "a"), ("b", "b")]) proxy = case_insensitive_multidict_proxy_class(d) d2 = case_insensitive_multidict_class() d2.extend(proxy) assert [("a", "a"), ("b", "b")] == list(d2.items()) def test_clear( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class([("KEY", "one")], key="two", foo="bar") d.clear() assert d == {} assert list(d.items()) == [] def test_del( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class( [("KEY", "one"), ("key", "two")], foo="bar", ) del d["key"] assert d == {"foo": "bar"} assert list(d.items()) == [("foo", "bar")] with pytest.raises(KeyError, match="key"): del d["key"] def test_set_default( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class( [("KEY", "one"), ("key", "two")], foo="bar", ) assert "one" == d.setdefault("key", "three") assert "three" == d.setdefault("otherkey", "three") assert "otherkey" in d assert ("otherkey", "three") in d.items() assert "three" == d["OTHERKEY"] def test_popitem( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() d.add("KEY", "val1") d.add("key", "val2") pair = d.popitem() assert ("key", "val2") == pair assert isinstance(pair[0], str) assert [("KEY", "val1")] == list(d.items()) def test_popitem_empty_multidict( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() with pytest.raises(KeyError): d.popitem() def test_pop( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() d.add("KEY", "val1") d.add("key", "val2") assert "val1" == d.pop("KEY") assert {"key": "val2"} == d def test_pop_lowercase( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class() d.add("KEY", "val1") d.add("key", "val2") assert "val1" == d.pop("key") assert {"key": "val2"} == d def test_pop_default( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class(OTHER="val") assert "default" == d.pop("key", "default") assert "other" in d def test_pop_raises( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d = case_insensitive_multidict_class(OTHER="val") with pytest.raises(KeyError, match="KEY"): d.pop("KEY") assert "other" in d def test_extend_with_istr( self, case_insensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_str_class: type[istr], ) -> None: us = case_insensitive_str_class("aBc") d = case_insensitive_multidict_class() d.extend([(us, "val")]) assert [("aBc", "val")] == list(d.items()) def test_copy_istr( self, case_insensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_str_class: type[istr], ) -> None: d = case_insensitive_multidict_class({case_insensitive_str_class("Foo"): "bar"}) d2 = d.copy() assert d == d2 def test_eq( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: d1 = case_insensitive_multidict_class(Key="val") d2 = case_insensitive_multidict_class(KEY="val") assert d1 == d2 @pytest.mark.skipif( sys.implementation.name == "pypy", reason="getsizeof() is not implemented on PyPy", ) def test_sizeof( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: md = case_insensitive_multidict_class() s1 = sys.getsizeof(md) for i in string.ascii_lowercase: for j in string.ascii_uppercase: md[i + j] = i + j # multidict should be resized s2 = sys.getsizeof(md) assert s2 > s1 @pytest.mark.skipif( sys.implementation.name == "pypy", reason="getsizeof() is not implemented on PyPy", ) def test_min_sizeof( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: md = case_insensitive_multidict_class() assert sys.getsizeof(md) < 1024 def test_issue_620_items( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: # https://github.com/aio-libs/multidict/issues/620 d = case_insensitive_multidict_class({"a": "123, 456", "b": "789"}) before_mutation_items = d.items() d["c"] = "000" # This causes an error on pypy. list(before_mutation_items) def test_issue_620_keys( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: # https://github.com/aio-libs/multidict/issues/620 d = case_insensitive_multidict_class({"a": "123, 456", "b": "789"}) before_mutation_keys = d.keys() d["c"] = "000" # This causes an error on pypy. list(before_mutation_keys) def test_issue_620_values( self, case_insensitive_multidict_class: type[CIMultiDict[str]], ) -> None: # https://github.com/aio-libs/multidict/issues/620 d = case_insensitive_multidict_class({"a": "123, 456", "b": "789"}) before_mutation_values = d.values() d["c"] = "000" # This causes an error on pypy. list(before_mutation_values) def test_keys_type( self, case_insensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_str_class: type[istr], ) -> None: d = case_insensitive_multidict_class( [ ("KEY", "one"), ] ) d["k2"] = "2" d.extend(k3="3") for k in d: assert type(k) is case_insensitive_str_class for k in d.keys(): assert type(k) is case_insensitive_str_class for k, v in d.items(): assert type(k) is case_insensitive_str_class k, v = d.popitem() assert type(k) is case_insensitive_str_class ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_mypy.py0000644000175100001660000001637614776033000016713 0ustar00runnerdocker# This file doesn't use test parametrization because mypy doesn't nothing about it. # Concrete types are required import multidict def test_classes_not_abstract() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) multidict.MultiDictProxy(d1) multidict.CIMultiDictProxy(d2) def test_getitem() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("a") assert d1["a"] == "b" assert d2["a"] == "b" assert d3["a"] == "b" assert d4["a"] == "b" assert d1[key] == "b" assert d2[key] == "b" assert d3[key] == "b" assert d4[key] == "b" def test_get() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("a") assert d1.get("a") == "b" assert d2.get("a") == "b" assert d3.get("a") == "b" assert d4.get("a") == "b" assert d1.get(key) == "b" assert d2.get(key) == "b" assert d3.get(key) == "b" assert d4.get(key) == "b" def test_get_default() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("b") assert d1.get("b", "d") == "d" assert d2.get("b", "d") == "d" assert d3.get("b", "d") == "d" assert d4.get("b", "d") == "d" assert d1.get(key, "d") == "d" assert d2.get(key, "d") == "d" assert d3.get(key, "d") == "d" assert d4.get(key, "d") == "d" def test_getone() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("a") assert d1.getone("a") == "b" assert d2.getone("a") == "b" assert d3.getone("a") == "b" assert d4.getone("a") == "b" assert d1.getone(key) == "b" assert d2.getone(key) == "b" assert d3.getone(key) == "b" assert d4.getone(key) == "b" def test_getone_default() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("b") assert d1.getone("b", 1) == 1 assert d2.getone("b", 1) == 1 assert d3.getone("b", 1) == 1 assert d4.getone("b", 1) == 1 assert d1.getone(key, 1) == 1 assert d2.getone(key, 1) == 1 assert d3.getone(key, 1) == 1 assert d4.getone(key, 1) == 1 def test_getall() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("a") assert d1.getall("a") == ["b"] assert d2.getall("a") == ["b"] assert d3.getall("a") == ["b"] assert d4.getall("a") == ["b"] assert d1.getall(key) == ["b"] assert d2.getall(key) == ["b"] assert d3.getall(key) == ["b"] assert d4.getall(key) == ["b"] def test_getall_default() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) key = multidict.istr("b") assert d1.getall("b", 1) == 1 assert d2.getall("b", 1) == 1 assert d3.getall("b", 1) == 1 assert d4.getall("b", 1) == 1 assert d1.getall(key, 1) == 1 assert d2.getall(key, 1) == 1 assert d3.getall(key, 1) == 1 assert d4.getall(key, 1) == 1 def test_copy() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) assert d1.copy() == d1 assert d2.copy() == d2 assert d3.copy() == d1 assert d4.copy() == d2 def test_iter() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) d3 = multidict.MultiDictProxy(d1) d4 = multidict.CIMultiDictProxy(d2) for i in d1: i.lower() # str-specific class for i in d2: i.lower() # str-specific class for i in d3: i.lower() # str-specific class for i in d4: i.lower() # str-specific class def test_setitem() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) key = multidict.istr("a") d1["a"] = "b" d2["a"] = "b" d1[key] = "b" d2[key] = "b" def test_delitem() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) del d1["a"] del d2["a"] key = multidict.istr("a") d3: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d4: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) del d3[key] del d4[key] def test_additem() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) key = multidict.istr("a") d1.add("a", "b") d2.add("a", "b") d1.add(key, "b") d2.add(key, "b") def test_extend_mapping() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) key = multidict.istr("a") d1.extend({"a": "b"}) d2.extend({"a": "b"}) d1.extend({key: "b"}) d2.extend({key: "b"}) def test_update_mapping() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) key = multidict.istr("a") d1.update({"a": "b"}) d2.update({"a": "b"}) d1.update({key: "b"}) d2.update({key: "b"}) def test_popone() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) assert d1.popone("a") == "b" assert d2.popone("a") == "b" key = multidict.istr("a") d1 = multidict.MultiDict({"a": "b"}) d2 = multidict.CIMultiDict({"a": "b"}) assert d1.popone(key) == "b" assert d2.popone(key) == "b" def test_popall() -> None: d1: multidict.MultiDict[str] = multidict.MultiDict({"a": "b"}) d2: multidict.CIMultiDict[str] = multidict.CIMultiDict({"a": "b"}) assert d1.popall("a") == ["b"] assert d2.popall("a") == ["b"] key = multidict.istr("a") d1 = multidict.MultiDict({"a": "b"}) d2 = multidict.CIMultiDict({"a": "b"}) assert d1.popall(key) == ["b"] assert d2.popall(key) == ["b"] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_pickle.py0000644000175100001660000000452614776033000017156 0ustar00runnerdockerimport pickle from pathlib import Path from typing import TYPE_CHECKING import pytest from multidict import MultiDict, MultiDictProxy, istr if TYPE_CHECKING: from conftest import MultidictImplementation here = Path(__file__).resolve().parent def test_pickle( any_multidict_class: type[MultiDict[int]], pickle_protocol: int ) -> None: d = any_multidict_class([("a", 1), ("a", 2)]) pbytes = pickle.dumps(d, pickle_protocol) obj = pickle.loads(pbytes) assert d == obj assert isinstance(obj, any_multidict_class) def test_pickle_proxy( any_multidict_class: type[MultiDict[int]], any_multidict_proxy_class: type[MultiDictProxy[int]], ) -> None: d = any_multidict_class([("a", 1), ("a", 2)]) proxy = any_multidict_proxy_class(d) with pytest.raises(TypeError): pickle.dumps(proxy) def test_pickle_istr( case_insensitive_str_class: type[istr], pickle_protocol: int ) -> None: s = case_insensitive_str_class("str") pbytes = pickle.dumps(s, pickle_protocol) obj = pickle.loads(pbytes) assert s == obj assert isinstance(obj, case_insensitive_str_class) def test_load_from_file( any_multidict_class: type[MultiDict[int]], multidict_implementation: "MultidictImplementation", pickle_protocol: int, ) -> None: multidict_class_name = any_multidict_class.__name__ pickle_file_basename = "-".join( ( multidict_class_name.lower(), multidict_implementation.tag, ) ) d = any_multidict_class([("a", 1), ("a", 2)]) fname = f"{pickle_file_basename}.pickle.{pickle_protocol}" p = here / fname with p.open("rb") as f: obj = pickle.load(f) assert d == obj assert isinstance(obj, any_multidict_class) def test_load_istr_from_file( case_insensitive_str_class: type[istr], multidict_implementation: "MultidictImplementation", pickle_protocol: int, ) -> None: istr_class_name = case_insensitive_str_class.__name__ pickle_file_basename = "-".join( ( istr_class_name.lower(), multidict_implementation.tag, ) ) s = case_insensitive_str_class("str") fname = f"{pickle_file_basename}.pickle.{pickle_protocol}" p = here / fname with p.open("rb") as f: obj = pickle.load(f) assert s == obj assert isinstance(obj, case_insensitive_str_class) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_types.py0000644000175100001660000000655414776033000017056 0ustar00runnerdockerimport types import pytest def test_proxies(multidict_module: types.ModuleType) -> None: assert issubclass( multidict_module.CIMultiDictProxy, multidict_module.MultiDictProxy, ) def test_dicts(multidict_module: types.ModuleType) -> None: assert issubclass(multidict_module.CIMultiDict, multidict_module.MultiDict) def test_proxy_not_inherited_from_dict(multidict_module: types.ModuleType) -> None: assert not issubclass(multidict_module.MultiDictProxy, multidict_module.MultiDict) def test_dict_not_inherited_from_proxy(multidict_module: types.ModuleType) -> None: assert not issubclass(multidict_module.MultiDict, multidict_module.MultiDictProxy) def test_multidict_proxy_copy_type(multidict_module: types.ModuleType) -> None: d = multidict_module.MultiDict(key="val") p = multidict_module.MultiDictProxy(d) assert isinstance(p.copy(), multidict_module.MultiDict) def test_cimultidict_proxy_copy_type(multidict_module: types.ModuleType) -> None: d = multidict_module.CIMultiDict(key="val") p = multidict_module.CIMultiDictProxy(d) assert isinstance(p.copy(), multidict_module.CIMultiDict) def test_create_multidict_proxy_from_nonmultidict( multidict_module: types.ModuleType, ) -> None: with pytest.raises(TypeError): multidict_module.MultiDictProxy({}) def test_create_multidict_proxy_from_cimultidict( multidict_module: types.ModuleType, ) -> None: d = multidict_module.CIMultiDict(key="val") p = multidict_module.MultiDictProxy(d) assert p == d def test_create_multidict_proxy_from_multidict_proxy_from_mdict( multidict_module: types.ModuleType, ) -> None: d = multidict_module.MultiDict(key="val") p = multidict_module.MultiDictProxy(d) assert p == d p2 = multidict_module.MultiDictProxy(p) assert p2 == p def test_create_cimultidict_proxy_from_cimultidict_proxy_from_ci( multidict_module: types.ModuleType, ) -> None: d = multidict_module.CIMultiDict(key="val") p = multidict_module.CIMultiDictProxy(d) assert p == d p2 = multidict_module.CIMultiDictProxy(p) assert p2 == p def test_create_cimultidict_proxy_from_nonmultidict( multidict_module: types.ModuleType, ) -> None: with pytest.raises( TypeError, match=( "ctor requires CIMultiDict or CIMultiDictProxy instance, " "not " ), ): multidict_module.CIMultiDictProxy({}) def test_create_ci_multidict_proxy_from_multidict( multidict_module: types.ModuleType, ) -> None: d = multidict_module.MultiDict(key="val") with pytest.raises( TypeError, match=( "ctor requires CIMultiDict or CIMultiDictProxy instance, " "not " ), ): multidict_module.CIMultiDictProxy(d) def test_generic_alias(multidict_module: types.ModuleType) -> None: assert multidict_module.MultiDict[int] == types.GenericAlias( multidict_module.MultiDict, (int,) ) assert multidict_module.MultiDictProxy[int] == types.GenericAlias( multidict_module.MultiDictProxy, (int,) ) assert multidict_module.CIMultiDict[int] == types.GenericAlias( multidict_module.CIMultiDict, (int,) ) assert multidict_module.CIMultiDictProxy[int] == types.GenericAlias( multidict_module.CIMultiDictProxy, (int,) ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_update.py0000644000175100001660000000774314776033000017175 0ustar00runnerdockerfrom collections import deque from typing import Union from multidict import CIMultiDict, MultiDict _MD_Classes = Union[type[MultiDict[int]], type[CIMultiDict[int]]] def test_update_replace(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj2 = any_multidict_class([("a", 4), ("b", 5), ("a", 6)]) obj1.update(obj2) expected = [("a", 4), ("b", 5), ("a", 6), ("c", 10)] assert list(obj1.items()) == expected def test_update_append(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj2 = any_multidict_class([("a", 4), ("a", 5), ("a", 6)]) obj1.update(obj2) expected = [("a", 4), ("b", 2), ("a", 5), ("c", 10), ("a", 6)] assert list(obj1.items()) == expected def test_update_remove(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj2 = any_multidict_class([("a", 4)]) obj1.update(obj2) expected = [("a", 4), ("b", 2), ("c", 10)] assert list(obj1.items()) == expected def test_update_replace_seq(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj2 = [("a", 4), ("b", 5), ("a", 6)] obj1.update(obj2) expected = [("a", 4), ("b", 5), ("a", 6), ("c", 10)] assert list(obj1.items()) == expected def test_update_replace_seq2(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj1.update([("a", 4)], b=5, a=6) expected = [("a", 4), ("b", 5), ("a", 6), ("c", 10)] assert list(obj1.items()) == expected def test_update_append_seq(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj2 = [("a", 4), ("a", 5), ("a", 6)] obj1.update(obj2) expected = [("a", 4), ("b", 2), ("a", 5), ("c", 10), ("a", 6)] assert list(obj1.items()) == expected def test_update_remove_seq(any_multidict_class: _MD_Classes) -> None: obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)]) obj2 = [("a", 4)] obj1.update(obj2) expected = [("a", 4), ("b", 2), ("c", 10)] assert list(obj1.items()) == expected def test_update_md(case_sensitive_multidict_class: type[CIMultiDict[str]]) -> None: d = case_sensitive_multidict_class() d.add("key", "val1") d.add("key", "val2") d.add("key2", "val3") d.update(key="val") assert [("key", "val"), ("key2", "val3")] == list(d.items()) def test_update_istr_ci_md( case_insensitive_multidict_class: type[CIMultiDict[str]], case_insensitive_str_class: type[str], ) -> None: d = case_insensitive_multidict_class() d.add(case_insensitive_str_class("KEY"), "val1") d.add("key", "val2") d.add("key2", "val3") d.update({case_insensitive_str_class("key"): "val"}) assert [("key", "val"), ("key2", "val3")] == list(d.items()) def test_update_ci_md(case_insensitive_multidict_class: type[CIMultiDict[str]]) -> None: d = case_insensitive_multidict_class() d.add("KEY", "val1") d.add("key", "val2") d.add("key2", "val3") d.update(Key="val") assert [("Key", "val"), ("key2", "val3")] == list(d.items()) def test_update_list_arg_and_kwds(any_multidict_class: _MD_Classes) -> None: obj = any_multidict_class() arg = [("a", 1)] obj.update(arg, b=2) assert list(obj.items()) == [("a", 1), ("b", 2)] assert arg == [("a", 1)] def test_update_tuple_arg_and_kwds(any_multidict_class: _MD_Classes) -> None: obj = any_multidict_class() arg = (("a", 1),) obj.update(arg, b=2) assert list(obj.items()) == [("a", 1), ("b", 2)] assert arg == (("a", 1),) def test_update_deque_arg_and_kwds(any_multidict_class: _MD_Classes) -> None: obj = any_multidict_class() arg = deque([("a", 1)]) obj.update(arg, b=2) assert list(obj.items()) == [("a", 1), ("b", 2)] assert arg == deque([("a", 1)]) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_version.py0000644000175100001660000001430614776033000017371 0ustar00runnerdockerfrom collections.abc import Callable from typing import TypeVar, Union import pytest from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy _T = TypeVar("_T") _MD_Types = Union[ MultiDict[_T], CIMultiDict[_T], MultiDictProxy[_T], CIMultiDictProxy[_T] ] GetVersion = Callable[[_MD_Types[_T]], int] def test_getversion_bad_param(multidict_getversion_callable: GetVersion[str]) -> None: with pytest.raises(TypeError): multidict_getversion_callable(1) # type: ignore[arg-type] def test_ctor( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m1 = any_multidict_class() v1 = multidict_getversion_callable(m1) m2 = any_multidict_class() v2 = multidict_getversion_callable(m2) assert v1 != v2 def test_add( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() v = multidict_getversion_callable(m) m.add("key", "val") assert multidict_getversion_callable(m) > v def test_delitem( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) del m["key"] assert multidict_getversion_callable(m) > v def test_delitem_not_found( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) with pytest.raises(KeyError): del m["notfound"] assert multidict_getversion_callable(m) == v def test_setitem( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m["key"] = "val2" assert multidict_getversion_callable(m) > v def test_setitem_not_found( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m["notfound"] = "val2" assert multidict_getversion_callable(m) > v def test_clear( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.clear() assert multidict_getversion_callable(m) > v def test_setdefault( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.setdefault("key2", "val2") assert multidict_getversion_callable(m) > v def test_popone( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.popone("key") assert multidict_getversion_callable(m) > v def test_popone_default( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.popone("key2", "default") assert multidict_getversion_callable(m) == v def test_popone_key_error( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) with pytest.raises(KeyError): m.popone("key2") assert multidict_getversion_callable(m) == v def test_pop( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.pop("key") assert multidict_getversion_callable(m) > v def test_pop_default( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.pop("key2", "default") assert multidict_getversion_callable(m) == v def test_pop_key_error( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) with pytest.raises(KeyError): m.pop("key2") assert multidict_getversion_callable(m) == v def test_popall( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.popall("key") assert multidict_getversion_callable(m) > v def test_popall_default( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.popall("key2", "default") assert multidict_getversion_callable(m) == v def test_popall_key_error( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) with pytest.raises(KeyError): m.popall("key2") assert multidict_getversion_callable(m) == v def test_popitem( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() m.add("key", "val") v = multidict_getversion_callable(m) m.popitem() assert multidict_getversion_callable(m) > v def test_popitem_key_error( any_multidict_class: type[MultiDict[str]], multidict_getversion_callable: GetVersion[str], ) -> None: m = any_multidict_class() v = multidict_getversion_callable(m) with pytest.raises(KeyError): m.popitem() assert multidict_getversion_callable(m) == v ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744320000.0 multidict-6.4.3/tests/test_views_benchmarks.py0000644000175100001660000002075614776033000021244 0ustar00runnerdocker"""codspeed benchmarks for multidict views.""" from typing import Type from pytest_codspeed import BenchmarkFixture from multidict import MultiDict def test_keys_view_equals( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) @benchmark def _run() -> None: assert md1.keys() == md2.keys() def test_keys_view_not_equals( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(20, 120)}) @benchmark def _run() -> None: assert md1.keys() != md2.keys() def test_keys_view_more( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) s = {str(i) for i in range(50)} @benchmark def _run() -> None: assert md.keys() > s def test_keys_view_more_or_equal( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) s = {str(i) for i in range(100)} @benchmark def _run() -> None: assert md.keys() >= s def test_keys_view_less( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) s = {str(i) for i in range(150)} @benchmark def _run() -> None: assert md.keys() < s def test_keys_view_less_or_equal( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) s = {str(i) for i in range(100)} @benchmark def _run() -> None: assert md.keys() <= s def test_keys_view_and( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(50, 150)}) @benchmark def _run() -> None: assert len(md1.keys() & md2.keys()) == 50 def test_keys_view_or( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(50, 150)}) @benchmark def _run() -> None: assert len(md1.keys() | md2.keys()) == 150 def test_keys_view_sub( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(50, 150)}) @benchmark def _run() -> None: assert len(md1.keys() - md2.keys()) == 50 def test_keys_view_xor( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(50, 150)}) @benchmark def _run() -> None: assert len(md1.keys() ^ md2.keys()) == 100 def test_keys_view_is_disjoint( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100, 200)}) @benchmark def _run() -> None: assert md1.keys().isdisjoint(md2.keys()) def test_keys_view_repr( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) @benchmark def _run() -> None: repr(md.keys()) def test_items_view_equals( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) @benchmark def _run() -> None: assert md1.items() == md2.items() def test_items_view_not_equals( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(20, 120)}) @benchmark def _run() -> None: assert md1.items() != md2.items() def test_items_view_more( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) s = {(str(i), str(i)) for i in range(50)} @benchmark def _run() -> None: assert md.items() > s def test_items_view_more_or_equal( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) s = {(str(i), str(i)) for i in range(100)} @benchmark def _run() -> None: assert md.items() >= s def test_items_view_less( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) s = {(str(i), str(i)) for i in range(150)} @benchmark def _run() -> None: assert md.items() < s def test_items_view_less_or_equal( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) s = {(str(i), str(i)) for i in range(100)} @benchmark def _run() -> None: assert md.items() <= s def test_items_view_and( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(50, 150)}) @benchmark def _run() -> None: assert len(md1.items() & md2.items()) == 50 def test_items_view_or( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(50, 150)}) @benchmark def _run() -> None: assert len(md1.items() | md2.items()) == 150 def test_items_view_sub( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(50, 150)}) @benchmark def _run() -> None: assert len(md1.items() - md2.items()) == 50 def test_items_view_xor( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(50, 150)}) @benchmark def _run() -> None: assert len(md1.items() ^ md2.items()) == 100 def test_items_view_is_disjoint( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md1: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) md2: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100, 200)}) @benchmark def _run() -> None: assert md1.items().isdisjoint(md2.items()) def test_items_view_repr( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) @benchmark def _run() -> None: repr(md.items()) def test_values_view_repr( benchmark: BenchmarkFixture, any_multidict_class: Type[MultiDict[str]] ) -> None: md: MultiDict[str] = any_multidict_class({str(i): str(i) for i in range(100)}) @benchmark def _run() -> None: repr(md.values())