pax_global_header00006660000000000000000000000064141733224340014515gustar00rootroot0000000000000052 comment=310129b1cc90af679a1f81db131a9edf09beee24 collections-extended-2.0.2/000077500000000000000000000000001417332243400156325ustar00rootroot00000000000000collections-extended-2.0.2/.bumpversion.cfg000066400000000000000000000003421417332243400207410ustar00rootroot00000000000000[bumpversion] current_version = 2.0.2 commit = True tag = True [bumpversion:file:collections_extended/_version.py] [bumpversion:file:pyproject.toml] search = version = "{current_version}" replace = version = "{new_version}" collections-extended-2.0.2/.editorconfig000066400000000000000000000003271417332243400203110ustar00rootroot00000000000000# http://editorconfig.org root = true [*] indent_style = tab end_of_line = lf charset = utf-8 insert_final_newline = true trim_trailing_whitespace = false [*.{py,ini,yaml,yml,rst}] trim_trailing_whitespace = true collections-extended-2.0.2/.github/000077500000000000000000000000001417332243400171725ustar00rootroot00000000000000collections-extended-2.0.2/.github/workflows/000077500000000000000000000000001417332243400212275ustar00rootroot00000000000000collections-extended-2.0.2/.github/workflows/python-package.yml000066400000000000000000000057131417332243400246720ustar00rootroot00000000000000name: Python package on: [push, pull_request] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.9 - name: Install dependencies run: make deps - name: Build package run: poetry build docs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.9 - name: Install dependencies run: make deps - name: Build Docs run: poetry run make docs - uses: actions/upload-artifact@v2 with: name: docs path: docs/_build/html docs-publish: runs-on: ubuntu-latest needs: docs if: ${{ github.ref == 'refs/heads/master' }} steps: - uses: actions/checkout@v2 with: ref: gh-pages - name: Download docs artifact uses: actions/download-artifact@v2 with: name: docs path: docs/_build/html - name: Replace docs run: | git rm -r . git reset HEAD .nojekyll CNAME .gitignore git checkout -- .nojekyll CNAME .gitignore cp -r docs/_build/html/* . git add . git config --local user.email "action@github.com" git config --local user.name "GitHub Action" git commit -m 'Triggered by ${{ github.sha }}' - name: Push changes uses: ad-m/github-push-action@v0.5.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.9 - name: Install dependencies run: make deps - name: Lint with flake8 run: make lint - name: Check for unfinished code run: make fixme-check tests: runs-on: ubuntu-latest strategy: matrix: python-version: ['3.6', '3.7', '3.8', '3.9', '3.10', pypy-3.6, pypy-3.7] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: make deps - name: Test with pytest run: poetry run coverage run --source collections_extended -m pytest # coverage: # # runs-on: ubuntu-latest # # needs: tests # env: # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # # steps: # - uses: actions/checkout@v2 # - name: Set up Python # uses: actions/setup-python@v2 # with: # python-version: 3.9 # - name: Install dependencies # run: | # pip install --upgrade pip setuptools coveralls # pip install -r requirements.txt # # - name: Upload Coverage Results # run: coveralls # after_success: # - coveralls collections-extended-2.0.2/.gitignore000066400000000000000000000006031417332243400176210ustar00rootroot00000000000000*.py[cod] .pytest_cache # C extensions *.so # Packages *.egg *.egg-info dist build eggs .eggs parts bin var sdist develop-eggs .installed.cfg lib lib64 # Installer logs pip-log.txt # Unit test / coverage reports .coverage htmlcov .tox nosetests.xml .hypothesis .pytest_cache # Translations *.mo # Sphinx docs docs/_build MANIFEST env venv .env .venv dev-env .cache .hypothesis collections-extended-2.0.2/CODE_OF_CONDUCT.md000066400000000000000000000121431417332243400204320ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. ## Our Standards Examples of behavior that contributes to a positive environment for our community include: * Demonstrating empathy and kindness toward other people * Being respectful of differing opinions, viewpoints, and experiences * Giving and gracefully accepting constructive feedback * Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience * Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behavior include: * The use of sexualized language or imagery, and sexual attention or advances of any kind * Trolling, insulting or derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or email address, without their explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Enforcement Responsibilities Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. ## Scope This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at conduct@lenzm.net. All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the reporter of any incident. ## Enforcement Guidelines Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: ### 1. Correction **Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. **Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. ### 2. Warning **Community Impact**: A violation through a single incident or series of actions. **Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. ### 3. Temporary Ban **Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. **Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. ### 4. Permanent Ban **Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. **Consequence**: A permanent ban from any sort of public interaction within the community. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see the FAQ at https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations. collections-extended-2.0.2/CONTRIBUTING.rst000066400000000000000000000066461417332243400203070ustar00rootroot00000000000000============ Contributing ============ Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given. You can contribute in many ways: Types of Contributions ---------------------- Report Bugs ~~~~~~~~~~~ Report bugs using `GitHub Issues`_. If you are reporting a bug, please include: * Any details about your local setup that might be helpful in troubleshooting. * Detailed steps to reproduce the bug. Fix Bugs ~~~~~~~~ Look through the GitHub Issues for bugs. Anything tagged with "bug" is open to whoever wants to implement it. Write Documentation ~~~~~~~~~~~~~~~~~~~ This could always use more documentation, whether as part of the official docs, in docstrings, or even on the web in blog posts, articles, and such. Documentation is built automatically on every push to master using GitHub Actions. Submit Feedback ~~~~~~~~~~~~~~~ The best way to send feedback is to file an issue using `GitHub Issues`_. If you are proposing a feature: * Explain in detail how it would work. * Keep the scope as narrow as possible, to make it easier to implement. * Remember that this is a volunteer-driven project, and that contributions are welcome :) Get Started! ------------ After checking out the project, running ``make`` at any time will clean up and set up a fresh dev environment. Read the ``Makefile`` for more common tasks/recipes. Ready to contribute? Here's how to set up `collections-extended` for local development. #. Fork the ``collections-extended`` repo on GitHub. #. Clone your fork locally:: $ git clone git@github.com:your_name_here/collections-extended.git #. Make sure you are excluding your editor's files from the repo. We don't want to use the project's gitignore to exclude every editor's files, so set up your global gitignore. See: https://help.github.com/articles/ignoring-files/ #. Set up your local dev environment:: $ cd collections-extended $ make #. Create a branch for local development:: $ git checkout -b name-of-your-bugfix-or-feature #. Make your changes locally. #. You may run checks locally without having to create a PR:: $ make lint $ make tests $ make testall $ make coverage #. Commit your changes and push your branch to GitHub:: $ git add . $ git commit $ git push origin name-of-your-bugfix-or-feature #. Submit a pull request through the GitHub website. Pull Request Guidelines ----------------------- Before you submit a pull request, check that it meets these guidelines: 1. The pull request should include tests. 2. If the pull request adds functionality, the docs should be updated. 3. The pull request should work for all supported versions. Check https://github.com/mlenzen/collections-extended/actions and make sure that the tests pass for all supported Python versions. 4. Add the feature/bug to the appropriate section in HISTORY.rst Tips ---- To run a subset of tests:: $ py.test tests/test_example.py $ py.test tests/test_example.py::test_func Useful Reading ~~~~~~~~~~~~~~ - Python docs - `3.3.7. Data model — Emulating container types `_ - `collections.abc — Abstract Base Classes for Containers `_ - `collections module source `_ .. _`GitHub Issues`: https://github.com/mlenzen/collections-extended/issues collections-extended-2.0.2/CONTRIBUTORS000066400000000000000000000005701417332243400175140ustar00rootroot00000000000000Mike Lenzen https://github.com/mlenzen Caleb Levy https://github.com/caleblevy Marein Könings https://github.com/MareinK Jad Kik https://github.com/jadkik Kuba Marek https://github.com/bluecube Itamar Turner-Trauring https://github.com/itamarst collections-extended-2.0.2/HISTORY.rst000066400000000000000000000051651417332243400175340ustar00rootroot00000000000000.. py:currentmodule:: collections_extended Change Log ========== 2.0.2 - 2022-01-23 ------------------ * Fix bug when setting overlapping ranges (`GH #172`_) .. _`GH #172`: https://github.com/mlenzen/collections-extended/issues/172 2.0.1 - 2022-01-19 ------------------ * Added Python 3.10 to tests & classifiers * Added equality testing to MappedRange (`GH #171`_) .. _`GH #171`: https://github.com/mlenzen/collections-extended/issues/171 2.0.0 - 2021-08-23 ------------------ * Drop support for Python 2.7, 3.4 & 3.5 * bags no longer inherit from Set * can no longer compare as equal to Sets * Rename and expose bag and set base classes * `_basebag` -> :class:`Bag` * `_basesetlist` -> :class:`SetList` * tuples passed to the bijection constructor must have len == 2, not >= 2 1.0.3 - 2019-11-23 ------------------ * Drop support for Python 2.6 & 3.3 * When multiplying bags, the cartesian product creates a tuple instead of adding the elements. * Added :class:`IndexedDict` * Improve efficiency for large bag operations * Add :meth:`setlist.swap` * Add :meth:`bag.count`, :class:`CountsView` & :class:`UniqueElementsView` * Add :meth:`bag.issubset` and :meth:`issuperset` * Add support for Python 3.8 * Add :class:`Sentinel` * Make :class:`MappedRange` a class instead of a namedtuple * Add change log 1.0.2 - 2018-06-30 ------------------ 1.0.1 - 2018-04-14 ------------------ 1.0.0 - 2017-10-17 ------------------ 0.10.1 - 2017-10-20 ------------------- 0.10.0 - 2017-10-20 ------------------- 0.9.0 - 2017-01-28 ------------------ 0.8.2 - 2016-10-24 ------------------ 0.8.1 - 2016-10-24 ------------------ 0.8.0 - 2016-08-21 ------------------ 0.7.2 - 2016-08-07 ------------------ 0.7.1 - 2016-08-07 ------------------ 0.7.0 - 2016-01-13 ------------------ 0.6.0 - 2015-10-18 ------------------ 0.5.2 - 2015-07-09 ------------------ 0.5.1 - 2015-07-08 ------------------ 0.5.0 - 2015-07-08 ------------------ 0.4.0 - 2015-03-29 ------------------ 0.3.1 - 2015-01-31 ------------------ 0.3.0 - 2015-01-31 ------------------ 0.2.0 - 2015-01-20 ------------------ Changed name from data-structures to collections-extended 0.1.6 - 2015-01-20 ------------------ Deprecated data-structures 0.1.5 - 2015-01-20 ------------------ 0.1.4 - 2014-05-24 ------------------ 0.1.3 - 2014-05-24 ------------------ Moved to Git & GitHub 0.1.2 - 2009-10-03 ------------------ 0.1.1 - 2009-10-03 ------------------ ??? --- Moved to Google Code 0.1.0 - 2009-10-01 ------------------ Initial release published to PyPi 0.0.0 - 2009-07-14 ------------------ * Repository created * Modules organized in one package. * Hosted on SourceForge collections-extended-2.0.2/LICENSE000066400000000000000000000240411417332243400166400ustar00rootroot00000000000000Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: You must give any other recipients of the Work or Derivative Works a copy of this License; and You must cause any modified files to carry prominent notices stating that You changed the files; and You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. collections-extended-2.0.2/Makefile000066400000000000000000000026471417332243400173030ustar00rootroot00000000000000PACKAGE = collections_extended VENV = $(shell poetry env info --path) .PHONY: default default: clean deps tests .PHONY: deps deps: pip install poetry poetry install --remove-untracked .PHONY: tests tests: poetry run py.test .PHONY: testall testall: poetry run tox .PHONY: clean clean: rm --recursive --force build rm --recursive --force dist rm --recursive --force *.egg-info find . -name *.py[co] -delete find . -name *~ -delete find . -name __pycache__ -delete find . -name *,cover -delete .PHONY: deep-clean deep-clean: clean clean-docs rm --recursive --force $(VENV) rm --recursive --force .eggs rm --recursive --force .pytest_cache rm --recursive --force .tox # Linting .PHONY: lint lint: poetry run flake8 --statistics --count poetry check .PHONY: fixme-check fixme-check: ! git grep FIXME | grep "^Makefile" --invert-match .PHONY: coverage coverage: poetry run coverage run --source $(PACKAGE) --module pytest poetry run coverage report --show-missing poetry run coverage html # Publishing .PHONY: publish publish: fixme-check lint testall publish-force .PHONY: publish-force publish-force: poetry build poetry publish git push git push --tags # Docs DOCS_BUILD = docs/_build .PHONY: clean-docs clean-docs: rm --force --recursive $(DOCS_BUILD) # rm --force docs/$(PACKAGE).rst # rm --force docs/modules.rst .PHONY: docs docs: clean-docs poetry run sphinx-build -b dirhtml docs $(DOCS_BUILD)/html collections-extended-2.0.2/README.rst000066400000000000000000000070761417332243400173330ustar00rootroot00000000000000README ###### .. image:: https://coveralls.io/repos/github/mlenzen/collections-extended/badge.svg?branch=master :target: https://coveralls.io/github/mlenzen/collections-extended?branch=master :alt: Coverage .. image:: https://pepy.tech/badge/collections-extended/week :target: https://pepy.tech/project/collections-extended/ :alt: Downloads Documentation: http://collections-extended.lenzm.net/ GitHub: https://github.com/mlenzen/collections-extended PyPI: https://pypi.python.org/pypi/collections-extended Overview ======== ``collections_extended`` is a pure Python module with no dependencies providing: - a ``bag`` class, AKA **multiset**, - a ``setlist`` class, which is a **unique list** or **ordered set**, - a ``bijection`` class, ``RangeMap`` which is a mapping from ranges to values and - a ``IndexedDict`` class, which is an ordered mapping whose elements can be accessed using index, in addition to key. There are also frozen (hashable) varieties of bags and setlists. Compatible with and tested against Python 3.6, 3.7, 3.8, 3.9, 3.10 & PyPy3. Getting Started =============== .. code-block:: python >>> from collections_extended import bag, setlist, bijection, RangeMap, IndexedDict >>> from datetime import date >>> b = bag('abracadabra') >>> b.count('a') 5 >>> b.remove('a') >>> b.count('a') 4 >>> 'a' in b True >>> b.count('d') 1 >>> b.remove('d') >>> b.count('d') 0 >>> 'd' in b False >>> sl = setlist('abracadabra') >>> sl setlist(('a', 'b', 'r', 'c', 'd')) >>> sl[3] 'c' >>> sl[-1] 'd' >>> 'r' in sl # testing for inclusion is fast True >>> sl.index('d') # so is finding the index of an element 4 >>> sl.insert(1, 'd') # inserting an element already in raises a ValueError Traceback (most recent call last): ... raise ValueError ValueError >>> sl.index('d') 4 >>> bij = bijection({'a': 1, 'b': 2, 'c': 3}) >>> bij.inverse[2] 'b' >>> bij['a'] = 2 >>> bij == bijection({'a': 2, 'c': 3}) True >>> bij.inverse[1] = 'a' >>> bij == bijection({'a': 1, 'c': 3}) True >>> version = RangeMap() >>> version[date(2017, 10, 20): date(2017, 10, 27)] = '0.10.1' >>> version[date(2017, 10, 27): date(2018, 2, 14)] = '1.0.0' >>> version[date(2018, 2, 14):] = '1.0.1' >>> version[date(2017, 10, 24)] '0.10.1' >>> version[date(2018, 7, 1)] '1.0.1' >>> version[date(2018, 6, 30):] = '1.0.2' >>> version[date(2018, 7, 1)] '1.0.2' >>> idict = IndexedDict() >>> idict['a'] = "A" >>> idict['b'] = "B" >>> idict['c'] = "C" >>> idict.get(key='a') 'A' >>> idict.get(index=2) 'C' >>> idict.index('b') 1 Installation ============ ``pip install collections-extended`` Usage ===== ``from collections_extended import bag, frozenbag, setlist, frozensetlist, bijection`` Classes ======= There are seven new collections provided: Bags ---- bag This is a bag AKA multiset. frozenbag This is a frozen (hashable) version of a bag. Setlists -------- setlist An ordered set or a list of unique elements depending on how you look at it. frozensetlist This is a frozen (hashable) version of a setlist. Mappings -------- bijection A one-to-one mapping. RangeMap A mapping from ranges (of numbers/dates/etc) IndexedDict A mapping that keeps insertion order and allows access by index. Python 2 -------- The package no longer supports Python 2. The last version to support Python 2.7, 3.4 & 3.5 was 1.0. No new feature releases will be done for 1.x but any significant bugs that come up may be fixed. :Author: Michael Lenzen :Copyright: 2021 Michael Lenzen :License: Apache License, Version 2.0 :Project Homepage: https://github.com/mlenzen/collections-extended collections-extended-2.0.2/RELEASE_CHECKLIST.rst000066400000000000000000000015671417332243400210460ustar00rootroot00000000000000Release Checklist ----------------- #. ``bumpversion [patch|minor|major]`` #. ``make publish`` #. Test that it pip installs #. Make a test virtual environment #. ``pip install collections-extended`` #. Confirm the new version number was installed #. Try it out #. Check the PyPI listing page to make sure that the README displays properly. If not, copy and paste the RestructuredText into http://rst.ninjs.org/ to find out what broke the formatting. New Python Versions ------------------- To add support for a new version of python, aside from any new functionality required, add version number to: #. tox.ini envlist #. .github/workflows/python-package.yml #. pyproject.toml classifiers #. README.rst description #. docs/index.rst list of versions New Year -------- When a new year arrives, it needs to be updated in: * README.rst * docs/index.rst * docs/conf.py collections-extended-2.0.2/artwork/000077500000000000000000000000001417332243400173235ustar00rootroot00000000000000collections-extended-2.0.2/artwork/favicon.ico000066400000000000000000000021761417332243400214520ustar00rootroot00000000000000 h(   L L '' .r77QnnQ77r..s77QnnQ77s. '' 33collections-extended-2.0.2/artwork/logo.inkscape.svg000066400000000000000000000130661417332243400226060ustar00rootroot00000000000000 image/svg+xml collections-extended collections-extended-2.0.2/artwork/logo.svg000066400000000000000000000100001417332243400207730ustar00rootroot00000000000000 image/svg+xml collections-extended collections-extended-2.0.2/artwork/mark.inkscape.svg000066400000000000000000000116171417332243400226000ustar00rootroot00000000000000 image/svg+xml collections-extended-2.0.2/artwork/mark.svg000066400000000000000000000066341417332243400210070ustar00rootroot00000000000000 image/svg+xml collections-extended-2.0.2/artwork/square-mark.png000066400000000000000000001042361417332243400222670ustar00rootroot00000000000000PNG  IHDRsBIT|d pHYsL L f\tEXtSoftwarewww.inkscape.org< IDATx ٻ+0?X Q"hL4`+ )֯@IkgN RA QP̘5잝J{1>0X_T\cC`)hNt IDATu:9ߪvO9]]nTߏ Y &i9QD* e+|C-w(Ml,S%dsy\]jXi'M8Z]n6})Bf[gn+gqڭ&{XK/;ȴ]}Z ){w]wFBy0 2IADZ<أQǴ,J ^բV4*-GKZjm2a&!a@0$[_\u?w{~ >W>US5`-=:AwTUؐ;TZot!tZ-2s~~M0] t K !hsAu_ `&nVu{ \ش,{i|/za_ pZ?jyW5ձKs Mb{*^UϓqD"Yud<}ĪMi8L{K^u_4{?j|!"'D>b֦AC :Y|\`6Tk|!"[3 KsÍ#%WgAsn,yOX-O8˳C,w5X\4hra\`ݼ@YM^!`n~=ly^{u-o꣍ Y׼M](`V}lU$>b]sE,Mh:@C_*`f_}}\>U=(`Imtι:M\+`!n4M~u8sC՝9!^ `];+~Z3  V]k^_}CuBrL |aow 8 wo\W= =՟6ϋ,XT6/@꾇n@έ7Bfn| "Kr`64 ~ rymuԍ]@`vNЮȺ@mKx=xίqce'rVU=vt!ҧ9 Má'.CKFWo]ta+M;0LգGa7GUT_5v̾Ǿ,lV/ߜP[V'}YNޅ5գkG~ctWVϪ^ԋLaON%Ofp-G][]TzoT>tv[ݱrJu1)˞]տBf̀ :;ߤ]OV?h;`>t+WoݴYݫz@ӱ_tFŦPu"rǦF†wO?pTuF/dyy*0Wj| 7T?S}mS߼4G73MλOUw 4]UKӴeoey_X`^An<UϮ6X=M](M7:s&0Uh| ۛkUw V)MS՟i|-۟몇,w6X׼:%?[sLuFuQn*s nUsmunu`4E}]mٹ<#`)nY}úu㛎za{zCꄛDL<\4)y`a45l^W: ՞ù8lɞq4\`^Av63)郍oesMujR<*}ngǒV{uquȁspӍ!dwr~`fNk08z[UUձuUIG7W4X#<,ədwsAn0̞;  F.,3?r6yý ;T;z^Av?Vߙ#<E?ՍoeL>Y5`)_k|yk}5g'smuþhw>BM[= ,_4X쫞tpTuVuMU_f?ؼzJudMjf~g^? X5Xżrjvdܩ 7<,ͩM\{#6Xs:4vsX4M ?.`i^byC^Voj}p  C:W{4MY7o XTG,9Yb 9zi%`.<0QkG2rzQvX)l&l$4;>bPuD_.9UF_v`ӳ` VL~.`ib9Xˍ/9k\C7`?>B GborAӷ5eK6ެ6m>~]b^[uYn.uz7\U3`i>b_&֛Ϫ.1zcR]>BWOp{_FQV#O XW4XZ5脍xBӽ3]ZfI#d5 0ަ51 Xn|쯞ņ{f,60܋G=S#`47ZHu%zA{%`M4|ՁKʹZi`Tn|/!K3c`4{Y?Y%sHV?KʾꮛZi`7̇`ˎoP=YnR=Ērn/4^R~us 6G`ncodVKt꓍'2MK-7`Wi|?!OUϪn76MSh|#'7,~b)-3ܤ#/%?efj Y\YS1vMSW7qɳK6pnQri`)ks{@4M~񍚬W.nT~b hS 卿or`~5 ;5tf7f~zh=ܳ.0l7gy]``\]B/zɯ ` `^}}gM޴SJs΁7n*߶⩍)D8]-47P"_LC οŽ.o~iyi /ϛ3k03{aIKwr\M.쌣%Ce_uҦWVB)M9]hG7MzQu߱Fl؍W=8} 2`.oz styջ3-X74M"7? Xbyv/5s2לu>\U](W?xap-p8>X=B-}ӎdΎwT]1.dTwjVX[Vo:8 ck',#rqK %,g'gVf~$%ߵSt9&VMyXvü:rMuv-D6m|1Vn昿ʢ"UM&w F8"!w~igD慨Bmqq]B`nWiL`uC űk.6a_{̌s[t;p ˴ *<ސgfWG1C{V5u M8չM_lݿI[l5OX5cnyouV:񟣹,*0KOo|!Ϋ*`=id몣V յ3ʢ @?Gs5[YT`v}v`ӠG65ߐlW>3M`}w92;z[?Os㶲,ݻ}v֎R^Nk,zlgl9~ht{f_7 Ò] }m_.#7ɐ4ӫkDNUs:TohjNxK+ ՞7٩zBW`6MN]Vk-ߵ'\-Ҋsuc5Dv2oh:XAȁճiJK-vOT.Rl:VɟUo]8&V1aV_zJurfVMS^Twl9+z"㏬_9E6lĚ}^Ppt ^=zwufu9X|T?TbunMJ稦]L\^R}fpWnpsqUujBqb1"N.~\ v0:i)P(ՏeVك3\~#%kk_=ptdw.n[=zouv@5@L`yNj{YkVc4a|t3g 5ꧪ4 il9:M[扬sT Xj|1|ⱍ%Z G"smunuM[`g]d.yf:{[[NX#Kbk ,3w%M0Kȁ/w5mvr`6X=ctvqupycr`=;Xv0ݱi./2\ e=PE) IDATυz]gF \Q9Xv0uTտ39?ffձ_"`_] |ftڑ Vvp- 8zb+,+F}{磋?T4 l#&n^=集w\ LWW'WcUǍ.X˫߬S}jp-2.V M[i_%fUK.XVO[]jh5" ֜KҦ; X3]x'7 \ZS~l9lȁ͹mOW\ Zz"`Y]0`\]Wǀ l]~ZjB`>Y;`^X=kpD5m2\d>uVVoέ3X&ph6R`1cVߍ.f+A[̛aj,yS/#ǖ7.f捣 M;W=|l90OLKK M%>#nz`f|&ݴi Ǎ-e`&Nj0µхY]#kq.~ziPvg`=zu2\ _ctZ{dU9:5gua2) `EX[L`;G0dt0S>0L`7)&0L`nF09EL}3 dHu`t0S.֝ 3`0>2.֝ ph>#09phL`0&0գ  xF330]̜ fƻnt0sv0 xvgC x{F330]̜ fs8cF΀ gg3`9L`0&0G3w`0N]ɣ ugs84̀ g8[.֝ ph];&0I:";pL`;.fՑugn 0`p` 0yGta.0`sqLl 9?UWOFy`N]EX[{F3qR/n5`=][P]7G+W\}xp-0R'TOΪ0`Z]4To]v>Zk;+~Q|eƖv3{TggaW7 ܣcCF3kҦ/4X\0CW[ݧcV׍.f+{7}3cˁy3XwVzZsmu1/vzkT/&1URrsLUxxKƦS t&y/Q=zZL,M_cˁe2`[wAs__H|`&at]Xww<ؒ= gV1`yN] tc/F|zYoW VL`:irv17o:7fؘM <%͎]M^tء\/nMl[}.1}t3ꓣX!WWFZ7{r`e]W;9MgEuݍA搷ou!a][]H`qDd):g3XC~iW[`uzף v=.b&_mt0[F1w7`לVMu:`ske` }:3nަG욋O.b&0F0WV.5W8+6XrfK`0q>4hMj`\=gtlu"f≣ AfM?ܦ/Ѧ/mߵicˁeƻ:iۧV[ҏ%37.`&S=`tTwt3M@/uy`=ɡL`F~U[oV]+.]ńu? `=ܦo\ڴ)M%҅3Fi2ӷ`FSKF1l:VB`" V]^.~z~uZ`V^tȺI;nq-a)񟷹` '5C\Q0kȁ;XW=ǖ="w ]'.`FwJFAչٽfπ ,˫GT^:eV݅ 4aw33Vަ#@n1C?nX&L4ȦAxY7E;ꕣ #.vS3D 37.::i~`a.~gy!sdхǫVW.vqեɣ +[e{pXU^W;%^VR[dX oh:cˁ]qLәG.]ČܺU#.bdVQgpi6oVX-5tWꚱ{Pu" ˎ#r;MlUv]ӗ4X.9MhVdEsUtsliEa~[ZQ`ѴS^CdrmӮXY{FT?YtuNi7+GlVw]Č98{7T]Ȍ\yn$e]Un+Շ0GzH1"we[E;/G03VttM/;9.a\4TrJufK`e~ُ2Md;Us;#0GTooinέ,*0Kڴ>Cd/5VR vkg-?E4mEOY|::.`]M V1RY5配1D5l;`UVibX~`to T?3ؤ] dtTﭞZݳ:iwQ`]0K{}ak͸z`7{}х̧<,ɝw(/wimat!SV MxKꏪkꕍ߂MdyEvU׍/s0mfyV=՟7h^_=!78"ə[1Gma]a7}S=;s3S nS]byguv1ycnf>MGY/D'T`W66YXd{KW9ȑw朋~uz;}M{Vՙhzi`~=g՞.0lꥍ=fKSzO `S=%dyAݩb.0lٍ~pΨFL3O X2#tx@v߷dp6,|O Y\Ҵ[ߘz]'Vlna^}M10WӴ;ަln|&듿,ёO,!onUwbKX=OM7ꕍod=K3%n:vQMۣ7 x{ Yiw_8LU5ս脦cFK2!{)ŦVՕ'dsASؒT/6ku ,ϯ6XR~qs 7鍿gmnV^BV7Wmj=Y<-`T]^bIM4\yIWyS+ z9д`ܣz^u]@Y\FK,)lj ~yIyV!j́]3oe5fK3 d3yf͍#d5rm. antՍoeyNee_mfYkOlwF߿K˟of>BkveKmO4a@yD%fXofk7Xʑ\ٴ `nQU} ,3n4}sM7'%Xo`[Vk|!M:ꃍo(eyyhӰ>byƗ5iRszFGryS|b,1S`ʲK6ꬪ#Ί}_.9/}W{YV>ܴq2ΨSTXSsV-Yo'Tq9P=p  ua},#4~l7/ㅍ!Tw³2^.<0ܟ4=MkcOӠɫߐʼ%ou]{cշopY>o鹮ɍ!dyS.GZ;zETg$brzV.=M;bj{rƖnէC%9*c6tXGWhJ ]`o| wAa{r*(/E!UĈ:1StZ!Ie!Cg ꐤvd&56``DQ*( ",t߳pn߾=AWcz\xuel]dWoߣeUkX`]'??hlf`tǦ]U=M /Ԓ={U4=MO/NNovam~ Xg2-WTkߗe30KZUlӪ_i` EqEenc1rh3s,s~;|mn4}Z0M?(kEò'8۲oFV^,{v̶,ڼiFy|0gN.YkK^u7z~XX]ZmieXO۫ՋfY`.x*$˓9 uZ\NXonJR]&akoԯϲ8\xbuGmltѴė,M_8VOU?մ jDuPHA-MK,%Ս1](.n찪}KJɮlf%G\%`^\AMw{d4} Z߾Y(m}:H Aάc99BsZicq.A\wX'Vk^zIm[uQc6~V/T'vyߍ~}&- ֧7,U7~~W_^pE[1WWU7~}4z +ol醼Ha]G߃yW??nnڌj_]Q}렃rynu[g/Ag߬.?2.WWok?mz]0WAG׾~/,ph?P]S=^kʃSϬm#@V/;zλ㚎UkLϫ7g޾|?%`l}Y/T}U=\p5{j#QvV4=K EO: \=TA|, &_VoN|,}o5memdrsӗӷMiMO8gU6m. ǫ>fiM:s_UW >̗8vU^4@0?@ηzdʶ}|uBCm1>\h]{KMz̷WWUw >ŵgտ6]@7XMWU} LN>ٴasWӓgҦGs<s??H5~~}U=#hh7ì0_TAEniXì0GVT76BSc:`_}3cM!S^K=zMM7?z(,-MMrd>)m!$m|V,-{?C>ѴdaW X(VoǪ#-0W$3E M77؅u_b߮W '?Zi!i{(`Ѵd:`V7jѺzQo`>lnL!bo/T`]bѺ?OF˛^2BkQzQi`^l~BZWW*ίbQz_wKյվ_|k< ̝4~Ɛ=Më?ck X$6m4 yǪo4~ΐW,o1okXr1Wuvuu`/: ̱U6~ސ[,77~ޘjYc;MMoۨ~'kOs }:=`mcT{6޴dхuh#`V/>9Fڌn^Ψ9[6^ִd/*^XR'Uh<#mdoo `~w6rc?.յÍWv?%.nkzvk4M7x?%MMX WW5siW]ypqvuu`;oO_`cejMĹ7m4j}`l֣+2#g[SE6bӴYi;5}M zN%^a~l_~s}M7EcxeӫGFo$ݧ\gUwtls7+|&; ~y?dWUw6~cV7ウuiǧnNhRKvz#^VW5'~FV{k'0k}6|icI#尳lQΩ~ٿT}V `rcnkAZ HWk7"h_]Wzk1_hz䎁Z]RE4=`ޝ&Z.`miTӊ ="Vʖ5ɣ`^RQ6hS(rrwy[F,kqAݞtzuEod|uOuuufՉegAczSuJzWP7;hsW]_$7~6/UWUOl^T=cMO:;.Yn.tЪlfDm_l*91>`i^TW{8+ջ?{82U/..7p-[cX66YOT?\{8 O6 &fV #Pucj#V & 1: strings.append(format_mult.format(elem=elem, mult=mult)) else: strings.append(format_single.format(elem=elem)) return '{%s}' % ', '.join(strings) # New public methods (not overriding/implementing anything) def num_unique_elements(self): """Return the number of unique elements. This runs in O(1) time """ return len(self._dict) def unique_elements(self): """Return a view of unique elements in this bag.""" return UniqueElementsView(self) def count(self, value): """Return the number of value present in this bag. If value is not in the bag no Error is raised, instead 0 is returned. This runs in O(1) time Args: value: The element of self to get the count of Returns: int: The count of value in self """ return self._dict.get(value, 0) @deprecated( "Use `heapq.nlargest(n, self.counts(), key=itemgetter(1))` instead or " "`sorted(self.counts(), reverse=True, key=itemgetter(1))` for `n=None`", '1.0', ) def nlargest(self, n=None): """List the n most common elements and their counts. List is from the most common to the least. If n is None, the list all element counts. Run time should be O(m log m) where m is len(self) Args: n (int): The number of elements to return """ if n is None: return sorted(self.counts(), key=itemgetter(1), reverse=True) else: return heapq.nlargest(n, self.counts(), key=itemgetter(1)) def counts(self): """Return a view of the unique elements in self and their counts. .. versionadded:: 1.0.3 """ return CountsView(self) @classmethod def from_mapping(cls, mapping): """Create a bag from a dict of elem->count. Each key in the dict is added if the value is > 0. Raises: ValueError: If any count is < 0. """ out = cls() for elem, count in mapping.items(): out._set_count(elem, count) return out # implementing Sized methods def __len__(self): """Return the cardinality of the bag. This runs in O(1) """ return self._size # implementing Container methods def __contains__(self, value): """Return the multiplicity of the element. This runs in O(1) """ return self.count(value) # implementing Iterable methods def __iter__(self): """Iterate through all elements. Multiple copies will be returned if they exist. """ for value, count in self.counts(): for _ in range(count): yield value # Comparison methods def issubset(self, other): """Check that every element in self has a count <= in other. Args: other (Iterable) """ if not isinstance(other, Bag): return self.issubset(frozenbag(other)) for elem, count in self.counts(): if not count <= other.count(elem): return False return True def issuperset(self, other): """Check that every element in self has a count >= in other. Args: other (Iterable) """ if not isinstance(other, Bag): return self.issuperset(bag(other)) for elem, count in other.counts(): if not self.count(elem) >= count: return False return True def __le__(self, other): if not isinstance(other, Bag): return NotImplemented return len(self) <= len(other) and self.issubset(other) def __lt__(self, other): if not isinstance(other, Bag): return NotImplemented return len(self) < len(other) and self.issubset(other) def __gt__(self, other): if not isinstance(other, Bag): return NotImplemented return len(self) > len(other) and self.issuperset(other) def __ge__(self, other): if not isinstance(other, Bag): return NotImplemented return len(self) >= len(other) and self.issuperset(other) def __eq__(self, other): if not isinstance(other, Bag): return False return self._dict == other._dict def __ne__(self, other): return not (self == other) # Operations - &, |, +, -, ^, * and isdisjoint def _iadd(self, other): """Add all of the elements of other to self. if isinstance(it, Bag): This runs in O(it.num_unique_elements()) else: This runs in O(len(it)) """ if isinstance(other, Bag): for elem, count in other.counts(): self._increment_count(elem, count) else: for elem in other: self._increment_count(elem, 1) return self def _iand(self, other): """Set multiplicity of each element to the minimum of the two collections. if isinstance(other, Bag): This runs in O(other.num_unique_elements()) else: This runs in O(len(other)) """ # TODO do we have to create a bag from the other first? if not isinstance(other, Bag): other = self._from_iterable(other) for elem, old_count in set(self.counts()): other_count = other.count(elem) new_count = min(other_count, old_count) self._set_count(elem, new_count) return self def _ior(self, other): """Set multiplicity of each element to the maximum of the two collections. if isinstance(other, Bag): This runs in O(other.num_unique_elements()) else: This runs in O(len(other)) """ # TODO do we have to create a bag from the other first? if not isinstance(other, Bag): other = self._from_iterable(other) for elem, other_count in other.counts(): old_count = self.count(elem) new_count = max(other_count, old_count) self._set_count(elem, new_count) return self def _ixor(self, other): """Set self to the symmetric difference between the sets. if isinstance(other, Bag): This runs in O(other.num_unique_elements()) else: This runs in O(len(other)) """ if isinstance(other, Bag): for elem, other_count in other.counts(): count = abs(self.count(elem) - other_count) self._set_count(elem, count) else: # Let a = self.count(elem) and b = other.count(elem) # if a >= b then elem is removed from self b times leaving a - b # if a < b then elem is removed from self a times then added (b - a) # times leaving a - a + (b - a) = b - a for elem in other: try: self._increment_count(elem, -1) except ValueError: self._increment_count(elem, 1) return self def _isub(self, other): """Discard the elements of other from self. if isinstance(it, Bag): This runs in O(it.num_unique_elements()) else: This runs in O(len(it)) """ if isinstance(other, Bag): for elem, other_count in other.counts(): try: self._increment_count(elem, -other_count) except ValueError: self._set_count(elem, 0) else: for elem in other: try: self._increment_count(elem, -1) except ValueError: pass return self def __and__(self, other): """Intersection is the minimum of corresponding counts. This runs in O(l + n) where: * n is self.num_unique_elements() * `l = 1` if other is a bag else `l = len(other)` """ return self.copy()._iand(other) def isdisjoint(self, other): """Return if this bag is disjoint with the passed collection. This runs in O(len(other)) """ for value in other: if value in self: return False return True def __or__(self, other): """Union is the maximum of all elements. This runs in O(m + n) where: * `n = self.num_unique_elements()` * m = other.num_unique_elements() if other is a bag else m = len(other) """ return self.copy()._ior(other) def __add__(self, other): """Return a new bag also containing all the elements of other. self + other = self & other + self | other This runs in O(m + n) where: * n is self.num_unique_elements() * m is len(other) Args: other (Iterable): elements to add to self """ return self.copy()._iadd(other) def __sub__(self, other): """Difference between the sets. For normal sets this is all x s.t. x in self and x not in other. For bags this is count(x) = max(0, self.count(x)-other.count(x)) This runs in O(m + n) where: * n is self.num_unique_elements() * m is len(other) Args: other (Iterable): elements to remove """ return self.copy()._isub(other) def __mul__(self, other): """Cartesian product with other.""" return self.product(other) def product(self, other, operator=None): """Cartesian product of the two sets. Optionally, pass an operator to combine elements instead of creating a tuple. This should run in O(m*n+l) where: * `m` is the number of unique elements in `self` * `n` is the number of unique elements in `other` * `l` is 0 if `other` is a bag, else `l` is the `len(other)` Args: other (Iterable): The iterable to take the product with. operator (Callable): A function that accepts an element from self and other and returns a combined value to include in the return value. """ if not isinstance(other, Bag): other = self._from_iterable(other) values = defaultdict(int) for elem, count in self.counts(): for other_elem, other_count in other.counts(): if operator: new_elem = operator(elem, other_elem) else: new_elem = (elem, other_elem) new_count = count * other_count values[new_elem] += new_count return self.from_mapping(values) def __xor__(self, other): """Symmetric difference between the sets. other can be any iterable. This runs in O(m + n) where: m = len(self) n = len(other) """ return self.copy()._ixor(other) class bag(Bag): """bag is a mutable unhashable bag. .. automethod:: __init__ """ def pop(self): """Remove and return an element of self.""" # TODO can this be done more efficiently (no need to create an iterator)? it = iter(self) try: value = next(it) except StopIteration: raise KeyError('pop from an empty bag') self.remove(value) return value def add(self, elem): """Add elem to self.""" self._increment_count(elem) def discard(self, elem): """Remove elem from this bag, silent if it isn't present.""" try: self.remove(elem) except ValueError: pass def remove(self, elem): """Remove elem from this bag, raising a ValueError if it isn't present. Args: elem: object to remove from self Raises: ValueError: if the elem isn't present """ self._increment_count(elem, -1) def discard_all(self, other): """Discard all of the elems from other.""" self._isub(other) def remove_all(self, other): """Remove all of the elems from other. Raises a ValueError if the multiplicity of any elem in other is greater than in self. """ if not self.issuperset(other): raise ValueError('Passed collection is not a subset of this bag') self.discard_all(other) def clear(self): """Remove all elements from this bag.""" self._dict = dict() self._size = 0 # In-place operations __ior__ = Bag._ior __iand__ = Bag._iand __ixor__ = Bag._ixor __isub__ = Bag._isub __iadd__ = Bag._iadd class frozenbag(Bag, Hashable): """frozenbag is an immutable, hashable bag. .. automethod:: __init__ """ def __hash__(self): """Compute the hash value of a frozenbag.""" if not hasattr(self, '_hash_value'): self._hash_value = Set._hash(self) return self._hash_value collections-extended-2.0.2/collections_extended/bijection.py000066400000000000000000000043261417332243400243550ustar00rootroot00000000000000"""Class definition for bijection.""" from collections.abc import Mapping, MutableMapping __all__ = ('bijection', ) class bijection(MutableMapping): """A one-to-one onto mapping, a dict with unique values.""" def __init__(self, iterable=None, **kwarg): """Create a bijection from an iterable. Matches dict.__init__. """ self._data = {} self.__inverse = self.__new__(bijection) self.__inverse._data = {} self.__inverse.__inverse = self if iterable is not None: if isinstance(iterable, Mapping): for key, value in iterable.items(): self[key] = value else: for pair in iterable: key, value = pair self[key] = value for key, value in kwarg.items(): self[key] = value def __repr__(self): if len(self._data) == 0: return '{0}()'.format(self.__class__.__name__) else: repr_format = '{class_name}({values!r})' return repr_format.format( class_name=self.__class__.__name__, values=self._data, ) @property def inverse(self): """Return the inverse of this bijection.""" return self.__inverse # Required for MutableMapping def __len__(self): return len(self._data) # Required for MutableMapping def __getitem__(self, key): return self._data[key] # Required for MutableMapping def __setitem__(self, key, value): if key in self: del self.inverse._data[self[key]] if value in self.inverse: del self._data[self.inverse[value]] self._data[key] = value self.inverse._data[value] = key # Required for MutableMapping def __delitem__(self, key): value = self._data.pop(key) del self.inverse._data[value] # Required for MutableMapping def __iter__(self): return iter(self._data) def __contains__(self, key): return key in self._data def clear(self): """Remove everything from this bijection.""" self._data.clear() self.inverse._data.clear() def copy(self): """Return a copy of this bijection.""" return bijection(self) def items(self): """See Mapping.items.""" return self._data.items() def keys(self): """See Mapping.keys.""" return self._data.keys() def values(self): """See Mapping.values.""" return self.inverse.keys() def __eq__(self, other): return isinstance(other, bijection) and self._data == other._data collections-extended-2.0.2/collections_extended/indexed_dict.py000066400000000000000000000241121417332243400250250ustar00rootroot00000000000000"""IndexedDict class definition. .. versionadded:: 1.0.3 """ from collections.abc import MutableMapping from ._util import deprecation_warning from .sentinel import NOT_SET __all__ = ('IndexedDict', ) # TODO these should be ValueErrors KEY_AND_INDEX_ERROR = TypeError( "Specifying both `key` and `index` is not allowed") KEY_EQ_INDEX_ERROR = TypeError( "Exactly one of `key` and `index` must be specified") class IndexedDict(MutableMapping): """A Mapping that preserves insertion order and allows access by item index. The API is an extension of OrderedDict. """ def __init__(self, iterable=None, **kwargs): """Create an IndexedDict and initialize it like a dict.""" self._dict = {} # key -> (index, value) self._list = [] # index -> (key, value) self.update(iterable or [], **kwargs) def clear(self): """Remove all items.""" self._dict = {} self._list = [] def get(self, key=NOT_SET, index=NOT_SET, default=NOT_SET, d=NOT_SET): """Return value with given `key` or `index`. If no value is found, return `default` (`None` by default). .. deprecated :: 1.0.3 The `d` parameter has been renamed `default`. `d` will be removed in some future version. Args: key: The key of the value to get index: The index of the value to get default: The value to return if `key` is not found or `index` is out of bounds. If it is NOT_SET, None is returned. d: DEPRECATED: Old parameter name for `default` """ if d is not NOT_SET: if default is not NOT_SET: raise ValueError('Specified default and d') deprecation_warning( "IndexedDict.pop parameter 'd' has been renamed to 'default'" ) default = d if default is NOT_SET: default = None if index is NOT_SET and key is not NOT_SET: try: index, value = self._dict[key] except KeyError: return default else: return value elif index is not NOT_SET and key is NOT_SET: try: key, value = self._list[index] except IndexError: return default else: return value else: raise KEY_EQ_INDEX_ERROR def pop(self, key=NOT_SET, index=NOT_SET, default=NOT_SET, d=NOT_SET): """Remove and return value. Optionally, specify the `key` or `index` of the value to pop. If `key` is specified and is not found a `KeyError` is raised unless `default` is specified. Likewise, if `index` is specified that is out of bounds, an `IndexError` is raised unless `default` is specified. Both `index` and `key` cannot be specified. If neither is specified, then the last value is popped. This is generally O(N) unless removing last item, then O(1). .. deprecated :: 1.0.3 The `d` parameter has been renamed `default`. `d` will be removed in some future version. Args: key: The key of the value to pop index: The index of the value to pop default: The value to return if the key is not found or the index is out of bounds d: DEPRECATED: Old parameter name for `default` """ if d is not NOT_SET: if default is not NOT_SET: raise ValueError('Specified default and d') deprecation_warning( "IndexedDict.pop parameter 'd' has been renamed to 'default'" ) default = d has_default = default is not NOT_SET if index is NOT_SET and key is not NOT_SET: index, value = self._pop_key(key, has_default) elif key is NOT_SET: key, index, value = self._pop_index(index, has_default) else: raise KEY_AND_INDEX_ERROR if index is None: return default else: self._fix_indices_after_delete(index) return value def _pop_key(self, key, has_default): """Remove an element by key.""" try: index, value = self._dict.pop(key) except KeyError: if has_default: return None, None else: raise key2, value2 = self._list.pop(index) assert key is key2 assert value is value2 return index, value def _pop_index(self, index, has_default): """Remove an element by index, or last element.""" try: if index is NOT_SET: index = len(self._list) - 1 key, value = self._list.pop() else: key, value = self._list.pop(index) if index < 0: index += len(self._list) + 1 except IndexError: if has_default: return None, None, None else: raise index2, value2 = self._dict.pop(key) assert index == index2 assert value is value2 return key, index, value def fast_pop(self, key=NOT_SET, index=NOT_SET): """Pop a specific item quickly by swapping it to the end. Remove value with given key or index (last item by default) fast by swapping it to the last place first. Changes order of the remaining items (item that used to be last goes to the popped location). Returns tuple of (poped_value, new_moved_index, moved_key, moved_value). If key is not found raises KeyError or IndexError. Runs in O(1). """ if index is NOT_SET and key is not NOT_SET: index, popped_value = self._dict.pop(key) elif key is NOT_SET: if index is NOT_SET: index = len(self._list) - 1 key, popped_value2 = self._list[-1] else: key, popped_value2 = self._list[index] if index < 0: index += len(self._list) index2, popped_value = self._dict.pop(key) assert index == index2 else: raise KEY_AND_INDEX_ERROR if key == self._list[-1][0]: # The item we're removing happens to be the last in the list, # no swapping needed _, popped_value2 = self._list.pop() assert popped_value is popped_value2 return popped_value, len(self._list), key, popped_value else: # Swap the last item onto the deleted spot and # pop the last item from the list self._list[index] = self._list[-1] moved_key, moved_value = self._list.pop() self._dict[moved_key] = (index, moved_value) return popped_value, index, moved_key, moved_value def popitem(self, last=NOT_SET, *, key=NOT_SET, index=NOT_SET): """Remove and return a (key, value) tuple. By default, the last item is popped. Optionally, specify the `key` or `index` of the value to pop. The `last` parameter is included to match the OrderedDict API. If `last` is passed then the first or last item is returned based on its truthiness. At most one of `index`, `last` and `key` can be specified. This is generally O(N) unless removing last item, then O(1). Args: key: The key of the value to pop index: The index of the value to pop last: Whether or not to pip the last item Raises: KeyError: If the dictionary is empty or a key is specified that is not present IndexError: If `index` is specified and is out of bounds ValueError: If more than one of `last`, `index` and `key` are specified """ if not self: raise KeyError('IndexedDict is empty') if sum(x is not NOT_SET for x in (last, key, index)) > 1: raise ValueError( "Cannot specify more than one of key, index and last" ) if key is not NOT_SET: index, value = self._pop_key(key=key, has_default=False) else: if last is not NOT_SET: index = -1 if last else 0 if index is NOT_SET: index = -1 key, index, value = self._pop_index(index, has_default=False) self._fix_indices_after_delete(starting_index=index) return key, value def move_to_end(self, key=NOT_SET, index=NOT_SET, last=True): """Move an existing element to the end (or beginning if last==False). Runs in O(N). """ if index is NOT_SET and key is not NOT_SET: index, value = self._dict[key] elif index is not NOT_SET and key is NOT_SET: key, value = self._list[index] # Normalize index if index < 0: index += len(self._list) else: raise KEY_EQ_INDEX_ERROR if last: index_range = range(len(self._list) - 1, index - 1, -1) self._dict[key] = (len(self._list) - 1, value) else: index_range = range(index + 1) self._dict[key] = (0, value) previous = (key, value) for i in index_range: self._dict[previous[0]] = i, previous[1] previous, self._list[i] = self._list[i], previous def copy(self): """Return a shallow copy.""" ret = IndexedDict() ret._dict = self._dict.copy() ret._list = list(self._list) return ret def index(self, key): """Return index of a record with given key. Runs in O(1). """ return self._dict[key][0] def key(self, index): """Return key of a record at given index. Runs in O(1). """ return self._list[index][0] def __len__(self): """Return number of elements stored.""" return len(self._list) def __repr__(self): return "{class_name}({data})".format( class_name=self.__class__.__name__, data=repr(self._list), ) def __str__(self): # When Python 3.5 support is dropped, we can rely on dict order and this # can be simplified to: # return "{class_name}({data})".format( # class_name=self.__class__.__name__, # data=repr(dict(self)), # ) data = ', '.join( '{k!r}: {v!r}'.format(k=k, v=v) for k, v in self.items() ) return "{class_name}({{{data}}})".format( class_name=self.__class__.__name__, data=data, ) def __getitem__(self, key): """Return value corresponding to given key. Raises KeyError when the key is not present in the mapping. Runs in O(1). """ return self._dict[key][1] def __setitem__(self, key, value): """Set item with given key to given value. If the key is already present in the mapping its order is unchanged, if it is not then it's added to the last place. Runs in O(1). """ if key in self._dict: index, old_value1 = self._dict[key] self._list[index] = key, value else: index = len(self._list) self._list.append((key, value)) self._dict[key] = index, value def __delitem__(self, key): """Remove item with given key from the mapping. Runs in O(n), unless removing last item, then in O(1). """ index, value = self._dict.pop(key) key2, value2 = self._list.pop(index) assert key == key2 assert value is value2 self._fix_indices_after_delete(index) def __contains__(self, key): """Check if a key is present in the mapping. Runs in O(1). """ return key in self._dict def __iter__(self): """Return iterator over the keys of the mapping in order.""" return (item[0] for item in self._list) def _fix_indices_after_delete(self, starting_index=0): for i, (k, v) in enumerate(self._list[starting_index:], starting_index): self._dict[k] = (i, v) collections-extended-2.0.2/collections_extended/range_map.py000066400000000000000000000255621417332243400243450ustar00rootroot00000000000000"""RangeMap class definition.""" from abc import ABCMeta, abstractmethod from bisect import bisect_left, bisect_right from collections.abc import Collection, Mapping, Set from .sentinel import NOT_SET class MappedRange: """Represents a subrange of a RangeMap. This is a glorified namedtuple. """ __slots__ = ('start', 'stop', 'value') def __init__(self, start, stop, value): """Create a mapped range. Args: start: The start of the range, inclusive. stop: The end of the range, exclusive. value: The mapped value. """ self.start = start self.stop = stop self.value = value # Implement __iter__ so we can unpack this def __iter__(self): yield self.start yield self.stop yield self.value def __str__(self): return '[{start!r}, {stop!r}) -> {value!r}'.format( start=self.start, stop=self.stop, value=self.value, ) def __repr__(self): return '{class_name}({start!r}, {stop!r}, {value!r})'.format( class_name=self.__class__.__name__, start=self.start, stop=self.stop, value=self.value, ) def __eq__(self, other): if isinstance(other, MappedRange): return (self.start, self.stop, self.value) ==\ (other.start, other.stop, other.value) return False class RangeMapView(Collection): """Base class for views of RangeMaps.""" __metaclass__ = ABCMeta def __init__(self, mapping): """Create a RangeMapView from a RangeMap.""" self._mapping = mapping def __len__(self): return len(self._mapping) @abstractmethod def __iter__(self): raise NotImplementedError @abstractmethod def __contains__(self, item): raise NotImplementedError def __repr__(self): return '{0.__class__.__name__}({0._mapping!r})'.format(self) @property def mapping(self): """Return the underlying RangeMap.""" return self._mapping class RangeMapKeysView(RangeMapView, Set): """A view of the keys that mark the starts of subranges of a RangeMap. Since iterating over all the keys is impossible, the view only iterates over the keys that start each subrange. """ def __contains__(self, key): return key in self.mapping def __iter__(self): for mapped_range in self.mapping.ranges(): yield mapped_range.start class RangeMapItemsView(RangeMapView, Set): """A view of the items that mark the starts of subranges of a RangeMap. Since iterating over all the items is impossible, the view only iterates over the items that start each subrange. """ def __contains__(self, item): # TODO should item be a MappedRange instead of a 2-tuple key, value = item try: mapped_value = self.mapping[key] except KeyError: return False else: return mapped_value == value def __iter__(self): for mapped_range in self.mapping.ranges(): yield (mapped_range.start, mapped_range.value) class RangeMapValuesView(RangeMapView): """A view on the values that mark the start of subranges of a RangeMap. Since iterating over all the values is impossible, the view only iterates over the values that start each subrange. """ def __contains__(self, value): for mapped_range in self.mapping.ranges(): if mapped_range.value == value: return True return False def __iter__(self): for mapped_range in self.mapping.ranges(): yield mapped_range.value def _check_start_stop(start, stop): """Check that start and stop are valid - orderable and in the right order. Raises: ValueError: if stop <= start TypeError: if unorderable """ if start is not None and stop is not None and stop <= start: raise ValueError('stop must be > start') def _check_key_slice(key): if not isinstance(key, slice): raise TypeError('Can only set and delete slices') if key.step is not None: raise ValueError('Cannot set or delete slices with steps') class RangeMap(Mapping): """Map ranges of orderable elements to values.""" def __init__(self, iterable=None, default_value=NOT_SET): """Create a RangeMap. A mapping or other iterable can be passed to initialize the RangeMap. If mapping is passed, it is interpreted as a mapping from range start indices to values. If an iterable is passed, each element will define a range in the RangeMap and should be formatted (start, stop, value). default_value is a an optional keyword argument that will initialize the entire RangeMap to that value. Any missing ranges will be mapped to that value. However, if ranges are subsequently deleted they will be removed and *not* mapped to the default_value. Args: iterable: A Mapping or an Iterable to initialize from. default_value: If passed, the return value for all keys less than the least key in mapping or missing ranges in iterable. If no mapping or iterable, the return value for all keys. """ self._keys = [None] self._values = [default_value] if iterable: if isinstance(iterable, Mapping): self._init_from_mapping(iterable) else: self._init_from_iterable(iterable) @classmethod def from_mapping(cls, mapping): """Create a RangeMap from a mapping of interval starts to values.""" obj = cls() obj._init_from_mapping(mapping) return obj def _init_from_mapping(self, mapping): for key, value in sorted(mapping.items()): self.set(value, key) @classmethod def from_iterable(cls, iterable): """Create a RangeMap from an iterable of tuples defining each range. Each element of the iterable is a tuple (start, stop, value). """ obj = cls() obj._init_from_iterable(iterable) return obj def _init_from_iterable(self, iterable): for start, stop, value in iterable: self.set(value, start=start, stop=stop) def __str__(self): range_format = '({range.start}, {range.stop}): {range.value}' values = ', '.join([range_format.format(range=r) for r in self.ranges()]) return 'RangeMap(%s)' % values def __repr__(self): range_format = '({range.start!r}, {range.stop!r}, {range.value!r})' values = ', '.join([range_format.format(range=r) for r in self.ranges()]) return 'RangeMap([%s])' % values def ranges(self, start=None, stop=None): """Generate MappedRanges for all mapped ranges. Yields: MappedRange """ _check_start_stop(start, stop) if start is None: start_loc = 1 else: start_loc = bisect_right(self._keys, start, lo=1) if stop is None: stop_loc = len(self._keys) else: stop_loc = bisect_left(self._keys, stop, lo=1) start_val = self._values[start_loc - 1] candidate_keys = [start] + self._keys[start_loc:stop_loc] + [stop] candidate_values = [start_val] + self._values[start_loc:stop_loc] for i, value in enumerate(candidate_values): if value is not NOT_SET: start_key = candidate_keys[i] stop_key = candidate_keys[i + 1] yield MappedRange(start_key, stop_key, value) def __contains__(self, key): try: self._getitem(key) except KeyError: return False else: return True def __iter__(self): for key, value in zip(self._keys, self._values): if value is not NOT_SET: yield key def __bool__(self): # any(val is not NOT_SET for val in self._values) if len(self._keys) > 1: return True else: return self._values[0] != NOT_SET __nonzero__ = __bool__ def _getitem(self, key): """Get the value for a key (not a slice).""" if key is None: loc = 0 else: loc = bisect_right(self._keys, key, lo=1) - 1 value = self._values[loc] if value is NOT_SET: raise KeyError(key) else: return value def get(self, key, restval=None): """Get the value of the range containing key, otherwise return restval.""" try: return self._getitem(key) except KeyError: return restval def get_range(self, start=None, stop=None): """Return a RangeMap for the range start to stop. Returns: A RangeMap """ return self.from_iterable(self.ranges(start, stop)) def set(self, value, start=None, stop=None): """Set the range from start to stop to value.""" _check_start_stop(start, stop) # start_index, stop_index will denote the section we are replacing if start is None: start_index = 0 else: start_index = bisect_left(self._keys, start, lo=1) if self._values[start_index - 1] == value: # We're setting a range where the left range has the same # value, so create one big range start_index -= 1 start = self._keys[start_index] new_keys = [start] new_values = [value] if stop is None: stop_index = len(self._keys) else: stop_index = bisect_right(self._keys, stop, lo=1) stop_value = self._values[stop_index - 1] if value != stop_value: new_keys.append(stop) new_values.append(stop_value) self._keys[start_index:stop_index] = new_keys self._values[start_index:stop_index] = new_values def delete(self, start=None, stop=None): """Delete the range from start to stop from self. Raises: KeyError: If part of the passed range isn't mapped. """ _check_start_stop(start, stop) if start is None: start_loc = 0 else: start_loc = bisect_right(self._keys, start, lo=1) - 1 if stop is None: stop_loc = len(self._keys) else: stop_loc = bisect_left(self._keys, stop, lo=1) for value in self._values[start_loc:stop_loc]: if value is NOT_SET: raise KeyError((start, stop)) # this is inefficient, we've already found the sub ranges self.set(NOT_SET, start=start, stop=stop) def empty(self, start=None, stop=None): """Empty the range from start to stop. Like delete, but no Error is raised if the entire range isn't mapped. """ self.set(NOT_SET, start=start, stop=stop) def clear(self): """Remove all elements.""" self._keys = [None] self._values = [NOT_SET] @property def start(self): """Get the start key of the first range. None if RangeMap is empty or unbounded to the left. """ if self._values[0] is NOT_SET: try: return self._keys[1] except IndexError: # This is empty or everything is mapped to a single value return None else: # This is unbounded to the left return self._keys[0] @property def end(self): """Get the stop key of the last range. None if RangeMap is empty or unbounded to the right. """ if self._values[-1] is NOT_SET: return self._keys[-1] else: # This is unbounded to the right return None def __eq__(self, other): if isinstance(other, RangeMap): return ( self._keys == other._keys and self._values == other._values ) else: return False def __getitem__(self, key): try: _check_key_slice(key) except TypeError: return self._getitem(key) else: return self.get_range(key.start, key.stop) def __setitem__(self, key, value): _check_key_slice(key) self.set(value, key.start, key.stop) def __delitem__(self, key): _check_key_slice(key) self.delete(key.start, key.stop) def __len__(self): count = 0 for v in self._values: if v is not NOT_SET: count += 1 return count def keys(self): """Return a view of the keys.""" return RangeMapKeysView(self) def values(self): """Return a view of the values.""" return RangeMapValuesView(self) def items(self): """Return a view of the item pairs.""" return RangeMapItemsView(self) collections-extended-2.0.2/collections_extended/sentinel.py000066400000000000000000000015251417332243400242260ustar00rootroot00000000000000"""Sentinel class.""" __all__ = ('Sentinel', 'NOT_SET') class Sentinel: """A class to create sentinel objects. The benefits vs. object() are a good repr it is picklable. Inspired by https://pypi.org/project/sentinels/ """ _registry = {} def __getnewargs__(self): return self._name, def __new__(cls, _name): """Find the Sentinel object with name or create a new one.""" try: return cls._registry[_name] except KeyError: new = super(Sentinel, cls).__new__(cls) cls._registry[_name] = new return new def __init__(self, name): super(Sentinel, self).__init__() self._name = name def __repr__(self): return '<%s>' % self._name def __bool__(self): return False def __eq__(self, other): if other.__class__ == self.__class__: return self._name == other._name return False NOT_SET = Sentinel('not_set') collections-extended-2.0.2/collections_extended/setlists.py000066400000000000000000000352551417332243400242660ustar00rootroot00000000000000"""Setlist class definitions.""" import random as random_ from collections.abc import ( Hashable, MutableSequence, MutableSet, Sequence, Set, ) from . import _util __all__ = ('SetList', 'setlist', 'frozensetlist') class SetList(Sequence, Set): """A setlist is an ordered `Collection` of unique elements. `SetList` is the superclass of `setlist` and `frozensetlist`. It is immutable and unhashable. """ def __init__(self, iterable=None, raise_on_duplicate=False): """Create a setlist, initializing from iterable if present. Args: iterable (Iterable): Values to initialize the setlist with. raise_on_duplicate: Raise a ValueError if any duplicate values are present. """ self._list = list() self._dict = dict() if iterable: if raise_on_duplicate: self._extend(iterable) else: self._update(iterable) def __repr__(self): if len(self) == 0: return '{0}()'.format(self.__class__.__name__) else: repr_format = '{class_name}({values!r})' return repr_format.format( class_name=self.__class__.__name__, values=tuple(self), ) # Convenience methods def _fix_neg_index(self, index): if index < 0: index += len(self) if index < 0: raise IndexError('index is out of range') return index def _fix_end_index(self, index): if index is None: return len(self) else: return self._fix_neg_index(index) def _append(self, value): # Checking value in self will check that value is Hashable if value in self: raise ValueError('Value "%s" already present' % str(value)) else: self._dict[value] = len(self) self._list.append(value) def _extend(self, values): new_values = set() for value in values: if value in new_values: raise ValueError('New values contain duplicates') elif value in self: raise ValueError('New values contain elements already present in self') else: new_values.add(value) for value in values: self._dict[value] = len(self) self._list.append(value) def _add(self, item): if item not in self: self._dict[item] = len(self) self._list.append(item) def _update(self, values): for value in values: if value not in self: self._dict[value] = len(self) self._list.append(value) @classmethod def _from_iterable(cls, it, **kwargs): return cls(it, **kwargs) # Implement Container def __contains__(self, value): return value in self._dict # Iterable we get by inheriting from Sequence # Implement Sized def __len__(self): return len(self._list) # Implement Sequence def __getitem__(self, index): if isinstance(index, slice): return self._from_iterable(self._list[index]) return self._list[index] def count(self, value): """Return the number of occurrences of value in self. This runs in O(1) Args: value: The value to count Returns: int: 1 if the value is in the setlist, otherwise 0 """ if value in self: return 1 else: return 0 def index(self, value, start=0, end=None): """Return the index of value between start and end. By default, the entire setlist is searched. This runs in O(1) Args: value: The value to find the index of start (int): The index to start searching at (defaults to 0) end (int): The index to stop searching at (defaults to the end of the list) Returns: int: The index of the value Raises: ValueError: If the value is not in the list or outside of start - end IndexError: If start or end are out of range """ try: index = self._dict[value] except KeyError: raise ValueError else: start = self._fix_neg_index(start) end = self._fix_end_index(end) if start <= index < end: return index else: raise ValueError @classmethod def _check_type(cls, other, operand_name): if not isinstance(other, SetList): message = ( "unsupported operand type(s) for {operand_name}: " "'{self_type}' and '{other_type}'").format( operand_name=operand_name, self_type=cls, other_type=type(other), ) raise TypeError(message) def __add__(self, other): self._check_type(other, '+') out = self.copy() out._extend(other) return out # Implement Set def issubset(self, other): """Report whether another set contains this set.""" return self <= other def issuperset(self, other): """Report whether this set contains another set.""" return self >= other def union(self, other): """Return the union of sets as a new set. (i.e. all elements that are in either set.) """ out = self.copy() out._update(other) return out def intersection(self, other): """Return the intersection of two sets as a new set. (i.e. all elements that are in both sets.) """ other = set(other) return self._from_iterable(item for item in self if item in other) def difference(self, other): """Return the difference of two or more sets as a new set. (i.e. all elements that are in this set but not the others.) """ other = set(other) return self._from_iterable(item for item in self if item not in other) def symmetric_difference(self, other): """Return the symmetric difference (disjuntive union) of two sets. (i.e. all elements that are in one set but not both.) """ return self.union(other) - self.intersection(other) def __sub__(self, other): self._check_type(other, '-') return self.difference(other) def __and__(self, other): self._check_type(other, '&') return self.intersection(other) def __or__(self, other): self._check_type(other, '|') return self.union(other) def __xor__(self, other): self._check_type(other, '^') return self.symmetric_difference(other) # Comparison def __eq__(self, other): if not isinstance(other, SetList): return False if not len(self) == len(other): return False for self_elem, other_elem in zip(self, other): if self_elem != other_elem: return False return True def __ne__(self, other): return not (self == other) # New methods def sub_index(self, sub, start=0, end=None): """Return the index of a subsequence. This runs in O(len(sub)) Args: sub (Sequence): An Iterable to search for start (int): The index at which to start the search end (int): The index at which to end the search Returns: int: The index of the first element of sub Raises: ValueError: If sub isn't a subsequence TypeError: If sub isn't iterable IndexError: If start or end are out of range """ start_index = self.index(sub[0], start, end) end = self._fix_end_index(end) if start_index + len(sub) > end: raise ValueError for i in range(1, len(sub)): if sub[i] != self[start_index + i]: raise ValueError return start_index def copy(self): """Return a shallow copy of the setlist.""" return self.__class__(self) class setlist(SetList, MutableSequence, MutableSet): """A mutable (unhashable) setlist. .. automethod:: __init__ """ def __str__(self): return '{[%s}]' % ', '.join(repr(v) for v in self) # Helper methods def _delete_all(self, elems_to_delete, raise_errors): indices_to_delete = set() for elem in elems_to_delete: try: elem_index = self._dict[elem] except KeyError: if raise_errors: raise ValueError('Passed values contain elements not in self') else: if elem_index in indices_to_delete: if raise_errors: raise ValueError('Passed vales contain duplicates') indices_to_delete.add(elem_index) self._delete_values_by_index(indices_to_delete) def _delete_values_by_index(self, indices_to_delete): deleted_count = 0 for i, elem in enumerate(self._list): if i in indices_to_delete: deleted_count += 1 del self._dict[elem] else: new_index = i - deleted_count self._list[new_index] = elem self._dict[elem] = new_index # Now remove deleted_count items from the end of the list if deleted_count: self._list = self._list[:-deleted_count] # Set/Sequence agnostic def pop(self, index=-1): """Remove and return the item at index.""" value = self._list.pop(index) del self._dict[value] return value def clear(self): """Remove all elements from self.""" self._dict = dict() self._list = list() # Implement MutableSequence def __setitem__(self, index, value): if isinstance(index, slice): old_values = self[index] for v in value: if v in self and v not in old_values: raise ValueError self._list[index] = value self._dict = {} for i, v in enumerate(self._list): self._dict[v] = i else: index = self._fix_neg_index(index) old_value = self._list[index] if value in self: if value == old_value: return else: raise ValueError del self._dict[old_value] self._list[index] = value self._dict[value] = index def __delitem__(self, index): if isinstance(index, slice): indices_to_delete = set(self.index(e) for e in self._list[index]) self._delete_values_by_index(indices_to_delete) else: index = self._fix_neg_index(index) value = self._list[index] del self._dict[value] for elem in self._list[index + 1:]: self._dict[elem] -= 1 del self._list[index] def insert(self, index, value): """Insert value at index. Args: index (int): Index to insert value at value: Value to insert Raises: ValueError: If value already in self IndexError: If start or end are out of range """ if value in self: raise ValueError index = self._fix_neg_index(index) self._dict[value] = index for elem in self._list[index:]: self._dict[elem] += 1 self._list.insert(index, value) def append(self, value): """Append value to the end. Args: value: Value to append Raises: ValueError: If value alread in self TypeError: If value isn't hashable """ self._append(value) def extend(self, values): """Append all values to the end. If any of the values are present, ValueError will be raised and none of the values will be appended. Args: values (Iterable): Values to append Raises: ValueError: If any values are already present or there are duplicates in the passed values. TypeError: If any of the values aren't hashable. """ self._extend(values) def __iadd__(self, values): """Add all values to the end of self. Args: values (Iterable): Values to append Raises: ValueError: If any values are already present """ self._check_type(values, '+=') self.extend(values) return self def remove(self, value): """Remove value from self. Args: value: Element to remove from self Raises: ValueError: if element is already present """ try: index = self._dict[value] except KeyError: raise ValueError('Value "%s" is not present.') else: del self[index] def remove_all(self, elems_to_delete): """Remove all elements from elems_to_delete, raises ValueErrors. See Also: discard_all Args: elems_to_delete (Iterable): Elements to remove. Raises: ValueError: If the count of any element is greater in elems_to_delete than self. TypeError: If any of the values aren't hashable. """ self._delete_all(elems_to_delete, raise_errors=True) def reverse(self): """Reverse the setlist in-place.""" self._list.reverse() for index, item in enumerate(self._list): self._dict[item] = index # Implement MutableSet def add(self, item): """Add an item. Note: This does not raise a ValueError for an already present value like append does. This is to match the behavior of set.add Args: item: Item to add Raises: TypeError: If item isn't hashable. """ self._add(item) def update(self, values): """Add all values to the end. If any of the values are present, silently ignore them (as opposed to extend which raises an Error). See also: extend Args: values (Iterable): Values to add Raises: TypeError: If any of the values are unhashable. """ self._update(values) def discard_all(self, elems_to_delete): """Discard all the elements from elems_to_delete. This is much faster than removing them one by one. This runs in O(len(self) + len(elems_to_delete)) Args: elems_to_delete (Iterable): Elements to discard. Raises: TypeError: If any of the values aren't hashable. """ self._delete_all(elems_to_delete, raise_errors=False) def discard(self, value): """Discard an item. Note: This does not raise a ValueError for a missing value like remove does. This matches the behavior of set.discard """ try: self.remove(value) except ValueError: pass def difference_update(self, other): """Update self to include only the difference with other.""" other = set(other) indices_to_delete = set() for i, elem in enumerate(self): if elem in other: indices_to_delete.add(i) if indices_to_delete: self._delete_values_by_index(indices_to_delete) def intersection_update(self, other): """Update self to include only the intersection with other.""" other = set(other) indices_to_delete = set() for i, elem in enumerate(self): if elem not in other: indices_to_delete.add(i) if indices_to_delete: self._delete_values_by_index(indices_to_delete) def symmetric_difference_update(self, other): """Update self to include only the symmetric difference with other.""" other = setlist(other) indices_to_delete = set() for i, item in enumerate(self): if item in other: indices_to_delete.add(i) for item in other: self.add(item) self._delete_values_by_index(indices_to_delete) def __isub__(self, other): self._check_type(other, '-=') self.difference_update(other) return self def __iand__(self, other): self._check_type(other, '&=') self.intersection_update(other) return self def __ior__(self, other): self._check_type(other, '|=') self.update(other) return self def __ixor__(self, other): self._check_type(other, '^=') self.symmetric_difference_update(other) return self # New methods def shuffle(self, random=None): """Shuffle all of the elements in self in place. Args: random: A function returning a random float in [0.0, 1.0). If none is passed, the default from `random.shuffle` will be used. """ random_.shuffle(self._list, random=random) for i, elem in enumerate(self._list): self._dict[elem] = i def sort(self, *args, **kwargs): """Sort this setlist in place.""" self._list.sort(*args, **kwargs) for index, value in enumerate(self._list): self._dict[value] = index def swap(self, i, j): """Swap the values at indices i & j. .. versionadded:: 1.0.3 """ i = self._fix_neg_index(i) j = self._fix_neg_index(j) self._list[i], self._list[j] = self._list[j], self._list[i] self._dict[self._list[i]] = i self._dict[self._list[j]] = j class frozensetlist(SetList, Hashable): """An immutable (hashable) setlist. .. automethod:: __init__ """ def __hash__(self): if not hasattr(self, '_hash_value'): self._hash_value = _util.hash_iterable(self) return self._hash_value collections-extended-2.0.2/docs/000077500000000000000000000000001417332243400165625ustar00rootroot00000000000000collections-extended-2.0.2/docs/.nojekyll000066400000000000000000000000001417332243400204000ustar00rootroot00000000000000collections-extended-2.0.2/docs/bags.rst000066400000000000000000000077711417332243400202440ustar00rootroot00000000000000.. py:currentmodule:: collections_extended bags (Multisets) ================ `bag` is a multiset_ implementation for Python. Currently, bags have constant time inclusion testing but can only contain hashable elements due to the implementation. .. _multiset: http://en.wikipedia.org/wiki/Multiset There are three classes provided: :class:`Bag` An abstract base class for bags. :class:`bag` A mutable (unhashable) bag. :class:`frozenbag` An immutable (implements :class:`collections.abc.Hashable`) version of a bag. Both classes implement :class:`collections.abc.Sized`, :class:`collections.abc.Iterable` and :class:`collections.abc.Container`. Both classes implement :class:`collections.abc.Collection` starting in Python 3.6 and the polyfilled :class:`Collection` for Python < 3.6. Set Operations -------------- :class:`bag` and :class:`frozenbag` use python operators for multiset operations: * `__add__` (`a + b`): The sum of two multisets * `__sub__` (`a - b`): The difference between a and b * `__and__` (`a & b`): The intersection of a and b * `__or__` (`a | b`): The union of a and b * `__xor__` (`a ^ b`): The symmetric difference between a and b :class:`bag` has the equivalent in-place operators defined. Comparison Methods ------------------ Bags are comparable only to other bags. Ordering comparisons are done setwise. .. testsetup:: >>> from collections_extended import bag .. code-block:: python >>> bag('ac') <= bag('ab') False >>> bag('ac') >= bag('ab') False >>> bag('a') <= bag('a') < bag('aa') True >>> bag('aa') <= bag('a') False Compared to existing similar implementations -------------------------------------------- collections.Counter ^^^^^^^^^^^^^^^^^^^ Counters don't really behave like Collections - Sized, Iterable, Containers .. testsetup:: >>> from collections import Counter >>> from collections_extended import bag Adding and Removing """"""""""""""""""" .. code-block:: python >>> c = Counter() >>> c['a'] += 1 >>> c['a'] -= 1 >>> 'a' in c True >>> b = bag() >>> b.add('a') >>> 'a' in b True >>> b.remove('a') >>> 'a' in b False ``len`` """"""" .. code-block:: python >>> c = Counter() >>> c['a'] += 1 >>> len(c) 1 >>> c['a'] -= 1 >>> len(c) 1 >>> c['a'] += 2 >>> len(c) 1 >>> len(Counter('aaabbc')) 3 >>> b = bag() >>> b.add('a') >>> len(b) 1 >>> b.remove('a') >>> len(b) 0 >>> len(bag('aaabbc')) 6 Iterating """"""""" .. code-block:: python >>> for item in Counter('aaa'): print(item) a >>> for item in bag('aaa'): print(item) a a a Compared to Standard Types -------------------------- bag vs. list ^^^^^^^^^^^^ * Inclusion testing is O(1) * Adding and removing elements is O(1) * Cannot add mutable elements * Elements aren't ordered bag vs. set ^^^^^^^^^^^ * Can add multiple instances of equal elements New Methods ----------- These are `bag` methods that are not implementing an abstract method from a standard Python ABC. ``num_unique_elements`` Returns the number of unique elements in the bag. O(1) ``unique_elements()`` Returns a set of all the unique elements in the bag. O(1) ``nlargest(n=None)`` Returns the n most common elements and their counts from most common to least. If n is None then all elements are returned. O(n log n) ``copy()`` Returns a shallow copy of self. O(self.num_unique_elements()) ``isdisjoint(other: Iterable)`` Tests if self is disjoint with any other Iterable. O(len(other)) ``issubset(other: Iterable)`` Tests if self is a subset of another Iterable. ``issuperset(other: Iterable)`` Tests if self is a superset of another Iterable. ``from_mapping(map: Mapping)`` Classmethod to create a bag from a Mapping that maps elements to counts. The following are only for mutable bags (not frozenbags). - ``pop()`` - ``add(elem)`` - ``discard(elem)`` - ``remove(elem)`` - ``clear()`` API --- Bag ^^^ .. autoclass:: Bag bag ^^^ .. autoclass:: bag frozenbag ^^^^^^^^^ .. autoclass:: frozenbag Views ^^^^^ .. autoclass:: CountsView :no-undoc-members: .. autoclass:: UniqueElementsView :no-undoc-members: collections-extended-2.0.2/docs/bijection.rst000066400000000000000000000012231417332243400212600ustar00rootroot00000000000000bijection ========= Bijections are functions that map keys to unique values, ie. one-to-one, onto functions. See: https://en.wikipedia.org/wiki/Bijection `bijection` maintains the inverse mapping on `bijection.inverse` which is itself an instance of `bijection`. Examples -------- .. code-block:: python >>> from collections_extended import bijection >>> bij = bijection({'a': 1, 'b': 2, 'c': 3}) >>> bij.inverse[2] 'b' >>> bij['a'] = 2 >>> bij == bijection({'a': 2, 'c': 3}) True >>> bij.inverse[1] = 'a' >>> bij == bijection({'a': 1, 'c': 3}) True >>> bij.inverse.inverse is bij True API --- .. autoclass:: collections_extended.bijection collections-extended-2.0.2/docs/changelog.rst000066400000000000000000000000341417332243400212400ustar00rootroot00000000000000.. include:: ../HISTORY.rst collections-extended-2.0.2/docs/conf.py000077500000000000000000000233331417332243400200700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # complexity documentation build configuration file, created by # sphinx-quickstart on Tue Jul 9 22:26:36 2013. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another # directory, add these directories to sys.path here. If the directory is # relative to the documentation root, use os.path.abspath to make it # absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # Get the project root dir, which is the parent dir of this cwd = os.getcwd() project_root = os.path.dirname(cwd) # Insert the project root dir as the first element in the PYTHONPATH. # This lets us ensure that the source package is imported, and that its # version is used. sys.path.insert(0, project_root) import collections_extended from collections_extended._version import __version__ import alabaster # -- General configuration --------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinxcontrib.napoleon', 'sphinx.ext.doctest', 'alabaster', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'collections_extended' copyright = u'2021, Michael Lenzen' # The version info for the project you're documenting, acts as replacement # for |version| and |release|, also used in various other places throughout # the built documents. # # The short X.Y version. version = __version__.rsplit(',', 1)[0] # The full version, including alpha/beta/rc tags. release = __version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to # some non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. show_authors = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built # documents. #keep_warnings = False # -- Options for HTML output ------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a # theme further. For a list of options available for each theme, see the # documentation. html_theme_options = { 'logo': 'logo.svg', 'github_user': 'mlenzen', 'github_repo': 'collections-extended', 'description': 'Extra Python Collections - bags (multisets), setlists (ordered sets) and RangeMap', 'github_banner': True, 'travis_button': True, 'coveralls_button': True, 'analytics_id': 'UA-4495487-4', 'pypi_name': 'collections-extended', } autodoc_default_options = { 'members': True, 'member-order': 'bysource', 'undoc-members': True, 'exclude-members': '__weakref__', # 'inherited-members': True, # 'special-members': True, 'show-inheritance': True, } # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [alabaster.get_path()] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as # html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the # top of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon # of the docs. This file should be a Windows icon file (.ico) being # 16x16 or 32x32 pixels large. html_favicon = '../artwork/favicon.ico' # Add any paths that contain custom static files (such as style sheets) # here, relative to this directory. They are copied after the builtin # static files, so a file named "default.css" will overwrite the builtin # "default.css". html_static_path = ['../artwork'] # If not '', a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. html_last_updated_fmt = '%Y-%b-%d' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { '**': [ 'about.html', 'navigation.html', 'relations.html', 'searchbox.html', 'donate.html', ] } # Additional templates that should be rendered to pages, maps page names # to template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. # Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. # Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages # will contain a tag referring to it. The value of this option # must be the base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'python-collections-extendeddoc' # Napoleon settings napoleon_google_docstring = True napoleon_numpy_docstring = True napoleon_include_init_with_doc = True napoleon_include_private_with_doc = False napoleon_include_special_with_doc = True napoleon_use_admonition_for_examples = False napoleon_use_admonition_for_notes = False napoleon_use_admonition_for_references = False napoleon_use_ivar = False napoleon_use_param = True napoleon_use_rtype = True # -- Options for LaTeX output ------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', 'python-collections-extended.tex', u'collections extended Documentation', u'Michael Lenzen', 'manual'), ] # The name of an image file (relative to this directory) to place at # the top of the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings # are parts, not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output ------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'python-collections-extended', u'collections extended Documentation', [u'Michael Lenzen'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ---------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'python-collections-extended', u'collections extended Documentation', u'Michael Lenzen', 'python-collections-extended', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False collections-extended-2.0.2/docs/contributing.rst000066400000000000000000000000411417332243400220160ustar00rootroot00000000000000.. include:: ../CONTRIBUTING.rst collections-extended-2.0.2/docs/factory.rst000066400000000000000000000024251417332243400207660ustar00rootroot00000000000000.. currentmodule:: collections_extended Collection Factory ================== `collections_extended` also provides a collection factory. Combining Python's standard collections with bags and setlists allows you to create a collection with any combination of ordered, unique and mutable. ================================================= ======= ======= ====== Collection Mutable Ordered Unique ================================================= ======= ======= ====== :class:`list` ✔ ✔ :class:`tuple` ✔ :class:`set` ✔ ✔ :class:`frozenset` ✔ :class:`bag` ✔ :class:`frozen_bag` :class:`setlist` ✔ ✔ ✔ :class:`frozensetlist` ✔ ✔ ================================================= ======= ======= ====== API --- .. autofunction:: collections_extended.collection Collection abstract base class from :mod:`collections.abc` for Python >= 3.6 and backported to < 3.6 collections-extended-2.0.2/docs/getting_started.rst000066400000000000000000000027561417332243400225150ustar00rootroot00000000000000Getting Started =============== Installation ------------ ``pip install collections-extended`` Usage ----- ``from collections_extended import bag, frozenbag, setlist, frozensetlist, bijection, RangeMap`` Examples -------- .. code-block:: python >>> from collections_extended import bag, setlist, bijection, RangeMap >>> from datetime import date >>> b = bag('abracadabra') >>> b.count('a') 5 >>> b.remove('a') >>> b.count('a') 4 >>> 'a' in b True >>> b.count('d') 1 >>> b.remove('d') >>> b.count('d') 0 >>> 'd' in b False >>> sl = setlist('abracadabra') >>> sl setlist(('a', 'b', 'r', 'c', 'd')) >>> sl[3] 'c' >>> sl[-1] 'd' >>> 'r' in sl # testing for inclusion is fast True >>> sl.index('d') # so is finding the index of an element 4 >>> sl.insert(1, 'd') # inserting an element already in raises a ValueError Traceback (most recent call last): ... raise ValueError ValueError >>> sl.index('d') 4 >>> bij = bijection({'a': 1, 'b': 2, 'c': 3}) >>> bij.inverse[2] 'b' >>> bij['a'] = 2 >>> bij == bijection({'a': 2, 'c': 3}) True >>> bij.inverse[1] = 'a' >>> bij == bijection({'a': 1, 'c': 3}) True >>> version = RangeMap() >>> version[date(2017, 10, 20): date(2017, 10, 27)] = '0.10.1' >>> version[date(2017, 10, 27): date(2018, 2, 14)] = '1.0.0' >>> version[date(2018, 2, 14):] = '1.0.1' >>> version[date(2017, 10, 24)] '0.10.1' >>> version[date(2018, 7, 1)] '1.0.1' >>> version[date(2018, 6, 30):] = '1.0.2' >>> version[date(2018, 7, 1)] '1.0.2' collections-extended-2.0.2/docs/index.rst000066400000000000000000000025471417332243400204330ustar00rootroot00000000000000``collections_extended`` documentation ====================================== ``collections_extended`` is a pure Python module with no dependencies providing - a ``bag`` class, AKA **multiset**, - a ``setlist`` class, which is a **unique list** or **ordered set**, - a ``bijection`` class, - a ``RangeMap`` which is a mapping from ranges to values, and - a ``IndexedDict`` class. There are also frozen (hashable) varieties of bags and setlists. It is `tested against`_ Python 3.6, 3.7, 3.8, 3.9, 3.10 & PyPy3. The current version no longer supports Python 2, install a 1.x version for a Python 2.7 compatible version. New features will not be developed but serious bugs may be fixed. Contents: .. toctree:: :maxdepth: 3 getting_started setlists bags range_map bijection indexed_dict sentinel factory contributing changelog Other Packages of Interest ========================== - http://stutzbachenterprises.com/blist/ - b+ trees - https://bitbucket.org/mozman/bintrees - Binary search trees - https://bidict.readthedocs.org/en/master/ - bijections - http://www.grantjenks.com/docs/sortedcollections/ - sortedcollections - http://www.grantjenks.com/docs/sortedcontainers/ - sortedcontainers :Author: Michael Lenzen :Copyright: 2021 Michael Lenzen :License: Apache License, Version 2.0 .. _`tested against`: https://travis-ci.org/mlenzen/collections-extended collections-extended-2.0.2/docs/indexed_dict.rst000066400000000000000000000023751417332243400217460ustar00rootroot00000000000000IndexedDicts ============ IndexedDict is an ordered mapping whose elements can be accessed using index, in addition to key. The interface is mostly a generalization of :class:`collections.OrderedDict`. Differences from OrderedDict ---------------------------- Methods ``get``, ``pop`` and ``move_to_end`` have a different signature from OrderedDict, allowing exactly one of ``index`` or ``key`` argument to be used. This causes the IndexedDict to not be a drop in replacement to OrderedDict. New Methods ^^^^^^^^^^^ ``fast_pop`` Remove an item with given key and value from the IndexedDict by first swapping the item to the last position and then removing it. Returns tuple of ``(popped_value, new_moved_index, moved_key, moved_value)``. Time complexity of this operation is O(1). ``index`` Return index of a record with given key. ``key`` Return key of a record at given index. Time Complexity --------------- IndexedDict generally combines time complexity of dict and list. Indexed lookups cost list's O(1), keyed lookups cost average case O(1) and worst case O(n) of dict. Deleting an element has a time complexity of O(1) if it is the last added one, or O(n) in general, in addition to the lookup cost. API --- .. autoclass:: collections_extended.IndexedDict collections-extended-2.0.2/docs/range_map.rst000066400000000000000000000035711417332243400212530ustar00rootroot00000000000000range_map ========= A RangeMap maps ranges to values. Keys must be hashable and comparable to all other keys (but not necessarily the same type). Each range `a:b` maps all values `a <= x < b` so it includes `a` but not `b`. Examples -------- .. code-block:: python >>> from collections_extended import RangeMap >>> from datetime import date >>> version = RangeMap() >>> version[date(2017, 10, 20): date(2017, 10, 27)] = '0.10.1' >>> version[date(2017, 10, 27): date(2018, 2, 14)] = '1.0.0' >>> version[date(2018, 2, 14):] = '1.0.1' >>> version[date(2017, 10, 24)] '0.10.1' >>> version[date(2018, 7, 1)] '1.0.1' >>> version[date(2018, 6, 30):] = '1.0.2' >>> version[date(2018, 7, 1)] '1.0.2' Creating RangeMaps ------------------ RangeMaps can be passed a mapping upon creation. Each key, value pair is interpreted as the start of a range and the mapped value. The end of the range is the next largest key in the mapping. RangeMaps can also be created from ``RangeMap.from_iterable(iterable)`` where the iterable's elements are tuples (start, stop, value). A start or stop key of None denotes an open range, ie. a start key of None is analgous to -infinity if the keys are all numbers. Quirks ------ Python 2 vs 3 ~~~~~~~~~~~~~ Slice notation is not implemented for get, set and delete in python 2 and raises a SyntaxError when used. This is because Python 2 assumes slices are integers and replaces open slices with 0 and maxint. Instead use ``RangeMap.set``, ``RangeMap.delete`` and ``RangeMap.get_range``. PyPy ~~~~ pypy (not pypy3) cannot accept non-ints in __getitem__ so RangeMap[1.5:3] doesn't work. Implementation -------------- RangeMaps are backed by lists of the keys, so it's only fast to add/remove the greatest values in the range (the end of the list). API --- .. autoclass:: collections_extended.RangeMap .. autoclass:: collections_extended.MappedRange :no-undoc-members: collections-extended-2.0.2/docs/sentinel.rst000066400000000000000000000010761417332243400211410ustar00rootroot00000000000000Sentinel ======== This is a small class that improves upon using a plain object as a sentinel for function arguments when None is a valid parameter and can't be used as the default. The improvements vs. a plain object are: * Better ``__str__`` and ``__repr__`` for better messages in logs and stacktraces. Instead of something like ``''`` you get ``''`` * Sentinels are picklable See: https://en.wikipedia.org/wiki/Sentinel_value Inspired by: https://pypi.org/project/sentinels/ .. autoclass:: collections_extended.sentinel collections-extended-2.0.2/docs/setlists.rst000066400000000000000000000101031417332243400211610ustar00rootroot00000000000000.. currentmodule:: collections_extended setlists ======== A :class:`setlist` is an ordered, indexed collection with unique elements. It it more than just an **ordered Set** in that the elements are accessible by index (ie. not just a linked set). However, :class:`setlist`'s are not comparable like sets or lists. Equality testing still works, but ``setlist(('a', 'c')) < setlist(('a', 'b'))`` does not because we'd have to choose to compare by order or by set comparison. There are two classes provided: :class:`collections_extended.setlist` This is a mutable setlist :class:`collections_extended.frozensetlist` This is a frozen (implements :class:`collections.abc.Hashable`) version of a setlist. Both classes implement :class:`collections.abc.Sequence`, :class:`collections.abc.Set` Examples -------- .. code-block:: python >>> from collections_extended import setlist >>> import string >>> sl = setlist(string.ascii_lowercase) >>> sl setlist(('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z')) >>> sl[3] 'd' >>> sl[-1] 'z' >>> 'r' in sl # testing for inclusion is fast True >>> sl.index('m') # so is finding the index of an element 12 >>> sl.insert(1, 'd') # inserting an element already in raises a ValueError Traceback (most recent call last): ... raise ValueError ValueError >>> sl.index('d') 3 Compared to existing similar implementations -------------------------------------------- Most implementations I've see are ordered sets where items are not accessible by index. Compared to Standard Types -------------------------- setlist vs. list ^^^^^^^^^^^^^^^^ * Inclusion testing is O(1) * Finding an element is O(1) * Adding an element that is already present raises a ValueError setlist vs. set ^^^^^^^^^^^^^^^ * Elements are ordered and accessible by index * Adding an element is as slow as adding to a list * Amortized O(n) for arbitrary insertions * O(1) for appending New Methods ----------- Swapping values doesn't work (see `Quirks`_) so some things don't work. To work around that a couple of methods were added: * :meth:`setlist.swap(i, j)` to swap elements * :meth:`setlist.shuffle(random=None)` instead of `random.shuffle(setlist)` Errors ------ Some methods will raise a :exc:`ValueError` when trying to add or remove elements when they respectively already or do not currently exist in the setlist. Each method has an analogous version that does/doesn't raise a ValueError. Methods implementing the `Set` methods do not raise :exc:`ValueError` while the one's implementing `Sequence` do. All will raise ``TypeError`` if you use unhashable values. The bulk operations are atomic, if any single value is unhashable or a duplicate, no changes will be made to the :class:`setlist`. ======================== =============== ================= Raises :exc:`ValueError` No Yes Interface :class:`Set` :class:`Sequence` ======================== =============== ================= Add a single value ``add`` ``append`` Add multiple values ``update`` ``extend`` Remove a single value ``discard`` ``remove`` Remove multiple values ``discard_all`` ``remove_all`` ======================== =============== ================= The setlist constructor by defualt does not raise :exc:`ValueError` on duplicate values because we have to choose one or the other and this matches the behavior of Set. There is a flag ``raise_on_duplicate`` that can be passed to ``__init__`` to raise a :exc:`ValueError` if duplicate values are passed. Quirks ------ * Swapping elements, eg. ``sl[0], sl[1] = sl[1], sl[0]``, doesn't work because it is implemented by first setting one element then the other. But since the first element it tries to set is still in the setlist, nothing happens. This causes random.shuffle not to work on a setlist. API --- SetList ^^^^^^^ .. autoclass:: collections_extended.SetList setlist ^^^^^^^ .. autoclass:: collections_extended.setlist frozensetlist ^^^^^^^^^^^^^ .. autoclass:: collections_extended.frozensetlist collections-extended-2.0.2/poetry.lock000066400000000000000000001410331417332243400200300ustar00rootroot00000000000000[[package]] name = "alabaster" version = "0.7.17" description = "A configurable sidebar-enabled Sphinx theme" category = "dev" optional = false python-versions = "*" develop = false [package.source] type = "git" url = "https://github.com/mlenzen/alabaster.git" reference = "master" resolved_reference = "63c5f262ea46062fcfba445e848fb7c5657c671e" [[package]] name = "atomicwrites" version = "1.4.0" description = "Atomic file writes." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" version = "21.4.0" description = "Classes Without Boilerplate" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "babel" version = "2.9.1" description = "Internationalization utilities" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] pytz = ">=2015.7" [[package]] name = "bump2version" version = "1.0.1" description = "Version-bump your software with a single command!" category = "dev" optional = false python-versions = ">=3.5" [[package]] name = "bumpversion" version = "0.6.0" description = "Version-bump your software with a single command!" category = "dev" optional = false python-versions = "*" [package.dependencies] bump2version = "*" [[package]] name = "certifi" version = "2021.10.8" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false python-versions = "*" [[package]] name = "charset-normalizer" version = "2.0.10" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "dev" optional = false python-versions = ">=3.5.0" [package.extras] unicode_backport = ["unicodedata2"] [[package]] name = "colorama" version = "0.4.4" description = "Cross-platform colored terminal text." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" version = "5.5" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] toml = ["toml"] [[package]] name = "distlib" version = "0.3.4" description = "Distribution utilities" category = "dev" optional = false python-versions = "*" [[package]] name = "docutils" version = "0.16" description = "Docutils -- Python Documentation Utilities" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "filelock" version = "3.4.2" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" [package.extras] docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] [[package]] name = "flake8" version = "3.9.2" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.7.0,<2.8.0" pyflakes = ">=2.3.0,<2.4.0" [[package]] name = "flake8-docstrings" version = "1.6.0" description = "Extension for flake8 which uses pydocstyle to check docstrings" category = "dev" optional = false python-versions = "*" [package.dependencies] flake8 = ">=3" pydocstyle = ">=2.1" [[package]] name = "hypothesis" version = "6.36.0" description = "A library for property-based testing" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] attrs = ">=19.2.0" sortedcontainers = ">=2.1.0,<3.0.0" [package.extras] all = ["black (>=19.10b0)", "click (>=7.0)", "django (>=2.2)", "dpcontracts (>=0.4)", "lark-parser (>=0.6.5)", "libcst (>=0.3.16)", "numpy (>=1.9.0)", "pandas (>=0.25)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "importlib-metadata (>=3.6)", "backports.zoneinfo (>=0.2.1)", "tzdata (>=2021.5)"] cli = ["click (>=7.0)", "black (>=19.10b0)", "rich (>=9.0.0)"] codemods = ["libcst (>=0.3.16)"] dateutil = ["python-dateutil (>=1.4)"] django = ["django (>=2.2)"] dpcontracts = ["dpcontracts (>=0.4)"] ghostwriter = ["black (>=19.10b0)"] lark = ["lark-parser (>=0.6.5)"] numpy = ["numpy (>=1.9.0)"] pandas = ["pandas (>=0.25)"] pytest = ["pytest (>=4.6)"] pytz = ["pytz (>=2014.1)"] redis = ["redis (>=3.0.0)"] zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2021.5)"] [[package]] name = "idna" version = "3.3" description = "Internationalized Domain Names in Applications (IDNA)" category = "dev" optional = false python-versions = ">=3.5" [[package]] name = "imagesize" version = "1.3.0" description = "Getting image size from png/jpeg/jpeg2000/gif file" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" version = "4.10.1" description = "Read metadata from Python packages" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] perf = ["ipython"] testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" version = "1.1.1" description = "iniconfig: brain-dead simple config-ini parsing" category = "dev" optional = false python-versions = "*" [[package]] name = "jinja2" version = "3.0.3" description = "A very fast and expressive template engine." category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] [[package]] name = "markupsafe" version = "2.0.1" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" optional = false python-versions = ">=3.6" [[package]] name = "mccabe" version = "0.6.1" description = "McCabe checker, plugin for flake8" category = "dev" optional = false python-versions = "*" [[package]] name = "packaging" version = "21.3" description = "Core utilities for Python packages" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "platformdirs" version = "2.4.1" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] name = "pockets" version = "0.9.1" description = "A collection of helpful Python tools!" category = "dev" optional = false python-versions = "*" [package.dependencies] six = ">=1.5.2" [[package]] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pycodestyle" version = "2.7.0" description = "Python style guide checker" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydocstyle" version = "6.1.1" description = "Python docstring style checker" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] snowballstemmer = "*" [package.extras] toml = ["toml"] [[package]] name = "pyflakes" version = "2.3.1" description = "passive checker of Python programs" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" version = "2.11.2" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.5" [[package]] name = "pyparsing" version = "3.0.6" description = "Python parsing module" category = "dev" optional = false python-versions = ">=3.6" [package.extras] diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" version = "6.2.5" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" py = ">=1.8.2" toml = "*" [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] name = "pytz" version = "2021.3" description = "World timezone definitions, modern and historical" category = "dev" optional = false python-versions = "*" [[package]] name = "requests" version = "2.27.1" description = "Python HTTP for Humans." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." category = "dev" optional = false python-versions = "*" [[package]] name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" category = "dev" optional = false python-versions = "*" [[package]] name = "sphinx" version = "3.5.4" description = "Python documentation generator" category = "dev" optional = false python-versions = ">=3.5" [package.dependencies] alabaster = ">=0.7,<0.8" babel = ">=1.3" colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} docutils = ">=0.12,<0.17" imagesize = "*" Jinja2 = ">=2.3" packaging = "*" Pygments = ">=2.0" requests = ">=2.5.0" snowballstemmer = ">=1.1" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = "*" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" sphinxcontrib-serializinghtml = "*" [package.extras] docs = ["sphinxcontrib-websupport"] lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.800)", "docutils-stubs"] test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] [[package]] name = "sphinxcontrib-applehelp" version = "1.0.2" description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" category = "dev" optional = false python-versions = ">=3.5" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." category = "dev" optional = false python-versions = ">=3.5" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" version = "2.0.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" category = "dev" optional = false python-versions = ">=3.6" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest", "html5lib"] [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" category = "dev" optional = false python-versions = ">=3.5" [package.extras] test = ["pytest", "flake8", "mypy"] [[package]] name = "sphinxcontrib-napoleon" version = "0.7" description = "Sphinx \"napoleon\" extension." category = "dev" optional = false python-versions = "*" [package.dependencies] pockets = ">=0.3" six = ">=1.5.2" [[package]] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." category = "dev" optional = false python-versions = ">=3.5" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." category = "dev" optional = false python-versions = ">=3.5" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tox" version = "3.24.5" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} filelock = ">=3.0.0" importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} packaging = ">=14" pluggy = ">=0.12.0" py = ">=1.4.17" six = ">=1.14.0" toml = ">=0.9.4" virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" [package.extras] docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "psutil (>=5.6.1)", "pathlib2 (>=2.3.3)"] [[package]] name = "typing-extensions" version = "4.0.1" description = "Backported and Experimental Type Hints for Python 3.6+" category = "dev" optional = false python-versions = ">=3.6" [[package]] name = "urllib3" version = "1.26.8" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" version = "20.13.0" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] distlib = ">=0.3.1,<1" filelock = ">=3.2,<4" importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} platformdirs = ">=2,<3" six = ">=1.9.0,<2" [package.extras] docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] [[package]] name = "zipp" version = "3.7.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false python-versions = ">=3.7" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [extras] docs = [] testing = [] [metadata] lock-version = "1.1" python-versions = "^3.7" content-hash = "576176e364da0371f97964943f6f8f83c406eec1f5f3f127d04b9751416cf54d" [metadata.files] alabaster = [] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] babel = [ {file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"}, {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, ] bump2version = [ {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, ] bumpversion = [ {file = "bumpversion-0.6.0-py2.py3-none-any.whl", hash = "sha256:4eb3267a38194d09f048a2179980bb4803701969bff2c85fa8f6d1ce050be15e"}, {file = "bumpversion-0.6.0.tar.gz", hash = "sha256:4ba55e4080d373f80177b4dabef146c07ce73c7d1377aabf9d3c3ae1f94584a6"}, ] certifi = [ {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, ] charset-normalizer = [ {file = "charset-normalizer-2.0.10.tar.gz", hash = "sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd"}, {file = "charset_normalizer-2.0.10-py3-none-any.whl", hash = "sha256:cb957888737fc0bbcd78e3df769addb41fd1ff8cf950dc9e7ad7793f1bf44455"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] coverage = [ {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, ] distlib = [ {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, ] docutils = [ {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, ] filelock = [ {file = "filelock-3.4.2-py3-none-any.whl", hash = "sha256:cf0fc6a2f8d26bd900f19bf33915ca70ba4dd8c56903eeb14e1e7a2fd7590146"}, {file = "filelock-3.4.2.tar.gz", hash = "sha256:38b4f4c989f9d06d44524df1b24bd19e167d851f19b50bf3e3559952dddc5b80"}, ] flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] flake8-docstrings = [ {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, ] hypothesis = [ {file = "hypothesis-6.36.0-py3-none-any.whl", hash = "sha256:fa9f845b06199ea87e68c6da04a609ff46e381b5d542351184790d54eaca144c"}, {file = "hypothesis-6.36.0.tar.gz", hash = "sha256:2b9c56faa067d660f0802679689f825bf142eec8261ab9e2e6ea916b1d8278a1"}, ] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] imagesize = [ {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"}, {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, ] importlib-metadata = [ {file = "importlib_metadata-4.10.1-py3-none-any.whl", hash = "sha256:899e2a40a8c4a1aec681feef45733de8a6c58f3f6a0dbed2eb6574b4387a77b6"}, {file = "importlib_metadata-4.10.1.tar.gz", hash = "sha256:951f0d8a5b7260e9db5e41d429285b5f451e928479f19d80818878527d36e95e"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] jinja2 = [ {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, ] markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] platformdirs = [ {file = "platformdirs-2.4.1-py3-none-any.whl", hash = "sha256:1d7385c7db91728b83efd0ca99a5afb296cab9d0ed8313a45ed8ba17967ecfca"}, {file = "platformdirs-2.4.1.tar.gz", hash = "sha256:440633ddfebcc36264232365d7840a970e75e1018d15b4327d11f91909045fda"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] pockets = [ {file = "pockets-0.9.1-py2.py3-none-any.whl", hash = "sha256:68597934193c08a08eb2bf6a1d85593f627c22f9b065cc727a4f03f669d96d86"}, {file = "pockets-0.9.1.tar.gz", hash = "sha256:9320f1a3c6f7a9133fe3b571f283bcf3353cd70249025ae8d618e40e9f7e92b3"}, ] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pycodestyle = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] pydocstyle = [ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, ] pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pygments = [ {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, ] pyparsing = [ {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, ] pytest = [ {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, ] pytz = [ {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, ] requests = [ {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] sortedcontainers = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] sphinx = [ {file = "Sphinx-3.5.4-py3-none-any.whl", hash = "sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8"}, {file = "Sphinx-3.5.4.tar.gz", hash = "sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1"}, ] sphinxcontrib-applehelp = [ {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, ] sphinxcontrib-devhelp = [ {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, ] sphinxcontrib-htmlhelp = [ {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, ] sphinxcontrib-jsmath = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, ] sphinxcontrib-napoleon = [ {file = "sphinxcontrib-napoleon-0.7.tar.gz", hash = "sha256:407382beed396e9f2d7f3043fad6afda95719204a1e1a231ac865f40abcbfcf8"}, {file = "sphinxcontrib_napoleon-0.7-py2.py3-none-any.whl", hash = "sha256:711e41a3974bdf110a484aec4c1a556799eb0b3f3b897521a018ad7e2db13fef"}, ] sphinxcontrib-qthelp = [ {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, ] sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tox = [ {file = "tox-3.24.5-py2.py3-none-any.whl", hash = "sha256:be3362472a33094bce26727f5f771ca0facf6dafa217f65875314e9a6600c95c"}, {file = "tox-3.24.5.tar.gz", hash = "sha256:67e0e32c90e278251fea45b696d0fef3879089ccbe979b0c556d35d5a70e2993"}, ] typing-extensions = [ {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, ] urllib3 = [ {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, ] virtualenv = [ {file = "virtualenv-20.13.0-py2.py3-none-any.whl", hash = "sha256:339f16c4a86b44240ba7223d0f93a7887c3ca04b5f9c8129da7958447d079b09"}, {file = "virtualenv-20.13.0.tar.gz", hash = "sha256:d8458cf8d59d0ea495ad9b34c2599487f8a7772d796f9910858376d1600dd2dd"}, ] zipp = [ {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, ] collections-extended-2.0.2/pyproject.toml000066400000000000000000000032371417332243400205530ustar00rootroot00000000000000[tool.poetry] name = "collections-extended" version = "2.0.2" description = "Extra Python Collections - bags (multisets) and setlists (ordered sets)" authors = ["Michael Lenzen "] license = "Apache-2.0" readme = "README.rst" documentation = "https://collections-extended.lenzm.net" repository = "https://github.com/mlenzen/collections-extended" keywords = [ "collections", "bag", "multiset", "ordered set", "unique list", ] classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development", "Topic :: Software Development :: Libraries", "Topic :: Software Development :: Libraries :: Python Modules", ] [tool.poetry.dependencies] python = "^3.7" [tool.poetry.dev-dependencies] pytest = "^6.2.5" bumpversion = "^0.6.0" tox = "^3.21.4" flake8 = "^3.8.4" flake8-docstrings = "^1.5.0" coverage = "^5.4" Sphinx = {version = "^3.4.3"} sphinxcontrib-napoleon = {version = "^0.7"} alabaster = {git = "https://github.com/mlenzen/alabaster.git"} hypothesis = "^6.36.0" [tool.poetry.extras] docs = ["Sphinx", "sphinxcontrib-napoleon", "alabaster"] testing = ["pytest", "tox"] [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" collections-extended-2.0.2/tests/000077500000000000000000000000001417332243400167745ustar00rootroot00000000000000collections-extended-2.0.2/tests/test_bags.py000066400000000000000000000230241417332243400213220ustar00rootroot00000000000000"""Test for bag classes.""" from operator import concat, mul import pytest from collections_extended.bags import Bag, bag, frozenbag def test_init(): """Test __init__.""" b = Bag('abracadabra') assert b.count('a') == 5 assert b.count('b') == 2 assert b.count('r') == 2 assert b.count('c') == 1 assert b.count('d') == 1 b2 = bag(b) assert b2 == b def test_repr(): """Test __repr__.""" ms = Bag() assert ms == eval(ms.__repr__()) ms = Bag('abracadabra') assert ms == eval(ms.__repr__()) assert repr(bag('a')) == "bag(('a',))" def test_str(): """Test __str__.""" def compare_bag_string(b): s = str(b) assert s.startswith('{') assert s.endswith('}') return set(s[1:-1].split(', ')) assert str(Bag()) == 'Bag()' assert "'a'^5" in str(Bag('abracadabra')) assert "'b'^2" in str(Bag('abracadabra')) assert "'c'" in str(Bag('abracadabra')) abra_elems = set(("'a'^5", "'b'^2", "'r'^2", "'c'", "'d'")) assert compare_bag_string(bag('abracadabra')) == abra_elems def test_count(): """Test count.""" ms = Bag('abracadabra') assert ms.count('a') == 5 assert ms.count('x') == 0 def test_nlargest(): """Test nlargest.""" abra = Bag('abracadabra') sort_key = lambda e: (-e[1], e[0]) abra_counts = [('a', 5), ('b', 2), ('r', 2), ('c', 1), ('d', 1)] assert sorted(abra.nlargest(), key=sort_key) == abra_counts assert sorted(abra.nlargest(3), key=sort_key) == abra_counts[:3] assert Bag('abcaba').nlargest(3) == [('a', 3), ('b', 2), ('c', 1)] def test_nlargest_deprecated(): """Test that nlargest raises a DeprecationWarning.""" b = bag() with pytest.deprecated_call(): b.nlargest() def test_from_map(): """Test from_mapping.""" assert Bag.from_mapping({'a': 1, 'b': 2}) == Bag('abb') assert Bag.from_mapping({'a': 1, 'b': 2, 'c': 0}) == Bag('abb') def test_copy(): """Test copy.""" b = Bag() assert b.copy() == b assert b.copy() is not b b = Bag('abc') assert b.copy() == b assert b.copy() is not b def test_len(): """Test __len__.""" assert len(Bag()) == 0 assert len(Bag('abc')) == 3 assert len(Bag('aaba')) == 4 def test_contains(): """Test __contains__.""" assert 'a' in Bag('bbac') assert 'a' not in Bag() assert 'a' not in Bag('missing letter') @pytest.mark.parametrize("bag_data, set_data", [ ('', ''), ('a', 'a'), ('ab', 'ab'), ]) def test_compare_eq_set(bag_data, set_data): """Test comparisons to Sets that should be not equal.""" assert Bag(bag_data) != set(set_data) @pytest.mark.parametrize("bag_data, set_data", [ ('ab', 'a'), ('a', 'ab'), ('aa', 'a'), ('aa', 'ab'), ('ac', 'ab'), ]) def test_compare_ne_set(bag_data, set_data): assert not Bag(bag_data) == set(set_data) def test_compare_unorderable(): assert not Bag('ac') <= Bag('ab') assert not Bag('ac') >= Bag('ab') def test_rich_comp_equal(): """Test rich comparisons for equal bags.""" assert Bag() <= Bag() assert not Bag() < Bag() assert Bag() >= Bag() assert not Bag() > Bag() b1 = Bag('aabc') b2 = Bag('aabc') assert not b2 > b1 assert b2 >= b1 assert not b2 < b1 assert b2 <= b1 def test_rich_comp_superset(): """Test rich comparisons for bags that are supersets of other bags.""" b1 = Bag('aabc') b2 = Bag('abc') assert b1 > b2 assert b1 >= b2 assert not b1 < b2 assert not b1 <= b2 def test_rich_comp_subset(): """Test rich comparisons for bags that are subsets of other bags.""" b1 = Bag('abc') b2 = Bag('aabc') assert not b1 > b2 assert not b1 >= b2 assert b1 < b2 assert b1 <= b2 def test_rich_comp_unorderable_eq_len(): """Test rich comparisons for bags of equal length but unorderable.""" b1 = Bag('abb') b2 = Bag('abc') assert not b1 < b2 assert not b1 <= b2 assert not b1 > b2 assert not b1 >= b2 assert not b1 == b2 assert b1 != b2 def test_rich_comp_unorderable_diff_len(): """Test rich comparisons for bags of unequal length and unorderable.""" b1 = Bag('abd') b2 = Bag('aabc') assert not b1 > b2 assert not b1 >= b2 assert not b1 < b2 assert not b1 <= b2 assert not b2 > b1 assert not b2 >= b1 assert not b2 < b1 assert not b2 <= b1 assert not b1 == b2 assert b1 != b2 def test_rich_comp_type_mismatch(): """Test rich comparisons for bags with type mismatches.""" with pytest.raises(TypeError): bag('abc') < 'abc' with pytest.raises(TypeError): bag('abc') <= 'abc' with pytest.raises(TypeError): bag('abc') > 'abc' with pytest.raises(TypeError): bag('abc') >= 'abc' with pytest.raises(TypeError): 'abc' < bag('abc') with pytest.raises(TypeError): 'abc' <= bag('abc') with pytest.raises(TypeError): 'abc' > bag('abc') with pytest.raises(TypeError): 'abc' >= bag('abc') assert not bag('abc') == 'abc' assert not 'abc' == bag('abc') def test_comparison_chaining(): assert bag('a') < bag('aa') <= bag('aa') def test_and(): """Test __and__.""" assert bag('aabc') & bag('aacd') == bag('aac') assert bag() & bag('safgsd') == bag() assert bag('abcc') & bag() == bag() assert bag('abcc') & bag('aabd') == bag('ab') assert bag('aabc') & set('abdd') == bag('ab') def test_isdisjoint(): """Test isdisjoint.""" assert bag().isdisjoint(bag()) assert bag().isdisjoint(bag('abc')) assert not bag('ab').isdisjoint(bag('ac')) assert bag('ab').isdisjoint(bag('cd')) def test_or(): """Test __or__.""" assert bag('abcc') | bag() == bag('abcc') assert bag('abcc') | bag('aabd') == bag('aabccd') assert bag('aabc') | set('abdd') == bag('aabcd') def test_add_op(): """Test __iadd__.""" b1 = bag('abc') result = b1 + bag('ab') assert result == bag('aabbc') assert b1 == bag('abc') assert result is not b1 def test_add(): """Test __add__.""" b = bag('abc') b.add('a') assert b == bag('aabc') def test_clear(): """Test clear.""" b = bag('abc') b.clear() assert b == bag() def test_discard(): """Test discard.""" b = bag('aabc') b.discard('a') assert b == bag('abc') b.discard('a') assert b == bag('bc') b.discard('a') assert b == bag('bc') def test_sub(): """Test __sub__.""" assert bag('abc') - bag() == bag('abc') assert bag('abbc') - bag('bd') == bag('abc') def test_mul(): """Test __mul__.""" assert bag('aab') * set('a') == bag((('a', 'a'), ('a', 'a'), ('b', 'a'))) def test_mul_empty_set(): """Test __mul__ on an empty set.""" assert bag('aab') * set() == bag() def test_product(): """Test product""" assert bag('aab').product(set('a'), operator=concat) == bag(('aa', 'aa', 'ba')) def test_product_commutative(): """Test product for a commutative operator.""" assert bag((1, 2)).product([2, 1], operator=mul) == bag((2, 1, 4, 2)) def test_xor(): """Test __xor__.""" assert bag('abc') ^ bag() == bag('abc') assert bag('aabc') ^ bag('ab') == bag('ac') assert bag('aabcc') ^ bag('abcde') == bag('acde') def test_ior(): """Test __ior__.""" b = bag() b |= bag() assert b == bag() b = bag('aab') b |= bag() assert b == bag('aab') b = bag('aab') b |= bag('ac') assert b == bag('aabc') b = bag('aab') b |= set('ac') assert b == bag('aabc') def test_iand(): """Test __iand__.""" b = bag() b &= bag() assert b == bag() b = bag('aab') b &= bag() assert b == bag() b = bag('aab') b &= bag('ac') assert b == bag('a') b = bag('aab') b &= set('ac') assert b == bag('a') def test_ixor(): """Test __ixor__.""" b = bag('abbbccd') b ^= bag('bbcdg') assert b == bag('abcg') b = bag('bbcdg') b ^= bag('abbbccd') assert b == bag('acbg') b = bag('abbc') b ^= set('bg') assert b == bag('abcg') def test_isub(): """Test __isub__ and discard_all.""" b = bag('aabbc') b -= bag('bd') assert b == bag('aabc') b = bag('aabc') b -= set('bd') assert b == bag('aac') def test_remove_all(): b = bag('abc') with pytest.raises(ValueError): b.remove_all('cd') assert b == bag('abc') b.remove_all('bc') assert b == bag('a') def test_iadd(): """Test __iadd__.""" b = bag('abc') b += bag('cde') assert b == bag('abccde') b = bag('abc') b += 'cde' assert b == bag('abccde') def test_hash(): """Test __hash__ vs an empty bag.""" bag_with_empty_tuple = frozenbag([()]) assert not hash(frozenbag()) == hash(bag_with_empty_tuple) assert not hash(frozenbag()) == hash(frozenbag((0,))) assert not hash(frozenbag('a')) == hash(frozenbag(('aa'))) assert not hash(frozenbag('a')) == hash(frozenbag(('aaa'))) assert not hash(frozenbag('a')) == hash(frozenbag(('aaaa'))) assert not hash(frozenbag('a')) == hash(frozenbag(('aaaaa'))) assert hash(frozenbag('ba')) == hash(frozenbag(('ab'))) assert hash(frozenbag('badce')) == hash(frozenbag(('dbeac'))) def test_num_unique_elems(): """Test Bag.num_unique_elements.""" assert bag('abracadabra').num_unique_elements() == 5 def test_pop(): """Test bag.pop.""" b = bag('a') assert b.pop() == 'a' with pytest.raises(KeyError): b.pop() def test_hashability(): """Test __hash__ for bags. Since bag is mutable and frozenbag is hashable, the second should be usable for dictionary keys and the second should raise a key or value error when used as a key or placed in a set. """ a = bag([1, 2, 3]) # Mutable multiset. b = frozenbag([1, 1, 2, 3]) # prototypical frozen multiset. c = frozenbag([4, 4, 5, 5, b, b]) # make sure we can nest them d = frozenbag([4, frozenbag([1, 3, 2, 1]), 4, 5, b, 5]) # c and d are the same; make sure nothing weird happens to hashes. assert c == d # Make sure both constructions work. dic = { b: 3, d: 5, d: 7, } assert len(dic) == 2 # Make sure no duplicates in dictionary. # Make sure TypeErrors are raised when using mutable bags for keys. with pytest.raises(TypeError): dic[a] = 4 with pytest.raises(TypeError): frozenbag([a, 1]) with pytest.raises(TypeError): bag([a, 1]) # test commutativity of bag instantiation. assert bag([4, 4, 5, 5, c]) == bag([4, 5, d, 4, 5]) collections-extended-2.0.2/tests/test_bijection.py000066400000000000000000000030221417332243400223500ustar00rootroot00000000000000"""Test bijection class.""" import pytest from collections_extended.bijection import bijection def test_bijection(): """General tests for bijection.""" b = bijection() assert len(b) == 0 b['a'] = 1 assert len(b) == 1 assert b['a'] == 1 assert b.inverse[1] == 'a' assert 'a' in b assert 1 not in b assert 1 in b.inverse with pytest.raises(KeyError): del b['f'] assert b == bijection(a=1) assert b.inverse.inverse is b assert b == b.copy() del b['a'] assert b == bijection() assert bijection(a=1, b=2, c=3) == bijection({'a': 1, 'b': 2, 'c': 3}) b['a'] = 1 b.inverse[1] = 'b' assert 'b' in b assert b['b'] == 1 assert 'a' not in b def test_init_from_pairs(): assert bijection({'a': 1, 'b': 2}) == bijection((('a', 1), ('b', 2))) def test_init_from_triples_fails(): with pytest.raises(ValueError): bijection((('a', 1, 0), ('b', 2, 0), )) def test_repr(): """Test __repr__.""" b = bijection() assert b == eval(b.__repr__()) b = bijection({'a': 1, 'b': 2, 'c': 3}) assert b == eval(b.__repr__()) assert repr(bijection({'a': 1})) == "bijection({'a': 1})" def test_setting_value(): """Test that setting an existing value removes that key.""" b = bijection() b['a'] = 1 b['b'] = 1 assert 'a' not in b assert 'b' in b assert 1 in b.values() def test_iter(): b = bijection({'a': 1, 'b': 2, 'c': 3}) assert set(b) == {'a', 'b', 'c'} def test_clear(): b = bijection({'a': 1, 'b': 2, 'c': 3}) assert b.keys() assert b.values() assert b b.clear() assert not b assert not b.keys() assert not b.values() collections-extended-2.0.2/tests/test_collection_abc.py000066400000000000000000000006451417332243400233520ustar00rootroot00000000000000import pytest from collections_extended import ( setlist, frozensetlist, bag, frozenbag, bijection, RangeMap, Collection, ) @pytest.mark.parametrize('klass', [ setlist, frozensetlist, bag, frozenbag, bijection, RangeMap, list, tuple, set, frozenset, dict, ]) def test_subclass(klass): """Test that all appropriate collections are subclasses of Collection.""" assert issubclass(klass, Collection) collections-extended-2.0.2/tests/test_collections_extended.py000066400000000000000000000012371417332243400246060ustar00rootroot00000000000000"""Test for collections_extended factory.""" from collections_extended import ( collection, bag, setlist, frozenbag, frozensetlist, ) def test_collection_factory(): """Test collection factory.""" assert type(collection()) == bag assert type(collection(ordered=True)) == list assert type(collection(unique=True)) == set assert type(collection(unique=True, ordered=True)) == setlist assert type(collection(mutable=False)) == frozenbag assert type(collection(mutable=False, ordered=True)) == tuple assert type(collection(mutable=False, unique=True)) == frozenset assert ( type(collection(mutable=False, unique=True, ordered=True)) == frozensetlist ) collections-extended-2.0.2/tests/test_indexed_dict.py000066400000000000000000000176541417332243400230450ustar00rootroot00000000000000import pytest from collections_extended.indexed_dict import IndexedDict def assert_internal_state(self): """Asserts that the inner state of the data structure is consistent. Returns True, so it can be used in an assert expression itself.""" assert len(self._dict) == len(self._list) for k, (i, v) in self._dict.items(): # noqa k2, v2 = self._list[i] assert k2 == k assert v2 is v def test_empty_construction(): d = IndexedDict() assert list(d) == [] assert_internal_state(d) def test_dict_construction(): d = IndexedDict({1: 2, 3: 4}) assert set(d) == {1, 3} # Not necessarily ordered for python < 3.6 assert_internal_state(d) def test_kwargs_construction(): d = IndexedDict(a=1, b=2, c=3) assert set(d) == set("abc") # Not necessarily ordered for python < 3.6 assert_internal_state(d) def test_tuples_construction(): d = IndexedDict([(1, 2), (3, 4)]) assert list(d) == [1, 3] # Must have correct order assert_internal_state(d) def test_clear(): d = IndexedDict(a=1, b=2, c=3) d.clear() assert len(d) == 0 assert list(d) == [] assert_internal_state(d) @pytest.fixture() def d(request): ret = IndexedDict([(chr(ord("a") + i), 10 + i) for i in range(5)]) request.addfinalizer(lambda: assert_internal_state(ret)) return ret @pytest.mark.parametrize("indexing", [{"key": "b"}, {"index": 1}, {"index": -4}]) def test_get_key_found(d, indexing): assert d.get(**indexing) == 11 @pytest.mark.parametrize("indexing", [{"key": "x"}, {"index": 100}, {"index": -6}]) def test_get_specifying_missing_default(d, indexing): assert d.get(default=5, **indexing) == 5 def test_get_deprecated_param(d): with pytest.deprecated_call(): assert d.get('x', d='XXX') == 'XXX' @pytest.mark.parametrize("indexing", [{"key": "x"}, {"index": 100}, {"index": -6}]) def test_get_missing_default(d, indexing): assert d.get(**indexing) is None def test_get_duplicate_default(d): with pytest.raises(ValueError): d.get(d=None, default=None) with pytest.raises(ValueError): d.get(d='XXX', default=None) with pytest.raises(ValueError): d.get(d=None, default='XXX') with pytest.raises(ValueError): d.get(d='XXX', default='XXX') def test_get_both_key_and_index(d): with pytest.raises(TypeError): d.get(key="a", index=4) def test_get_no_key_or_index(d): with pytest.raises(TypeError): d.get() @pytest.mark.parametrize("indexing", [{"key": "b"}, {"index": 1}, {"index": -4}]) def test_pop_found(d, indexing): assert d.pop(**indexing) == 11 assert list(d) == list("acde") def test_pop_last(d): assert d.pop() == 14 assert list(d) == list("abcd") @pytest.mark.parametrize("indexing", [{"key": "x"}, {"index": 100}, {"index": -6}]) def test_pop_missing_default(d, indexing): assert d.pop(d="XXX", **indexing) == "XXX" assert list(d) == list("abcde") def test_pop_duplicate_default(d): with pytest.raises(ValueError): d.pop(d='XXX', default='XXX') def test_pop_missing_key_no_default(d): with pytest.raises(KeyError): d.pop("X") assert list(d) == list("abcde") @pytest.mark.parametrize("index", [100, -6]) def test_pop_missing_index_no_default(d, index): with pytest.raises(IndexError): d.pop(index=index) assert list(d) == list("abcde") def test_deprecated_pop_default(d): with pytest.deprecated_call(): assert d.pop(999, d='XXX') == 'XXX' def test_pop_empty_default(): d = IndexedDict() assert d.pop(d="XXX") == "XXX" def test_pop_empty_no_default(): d = IndexedDict() with pytest.raises(IndexError): d.pop() def test_pop_both_key_and_index(d): with pytest.raises(TypeError): d.pop(key="a", index=4) @pytest.mark.parametrize("indexing", [{"key": "b"}, {"index": 1}, {"index": -4}]) def test_fast_pop_found(d, indexing): assert d.fast_pop(**indexing) == (11, 1, "e", 14) assert set(d) == set("acde") def test_fast_pop_last(d): assert d.fast_pop() == (14, 4, "e", 14) assert set(d) == set("abcd") def test_fast_pop_last_key(d): assert d.fast_pop("e") == (14, 4, "e", 14) assert set(d) == set("abcd") def test_fast_pop_missing_key(d): with pytest.raises(KeyError): d.fast_pop("X") assert list(d) == list("abcde") def test_fast_pop_missing_index(d): with pytest.raises(IndexError): d.fast_pop(index=100) assert list(d) == list("abcde") def test_fast_pop_empty(): d = IndexedDict() with pytest.raises(IndexError): d.fast_pop() def test_fast_pop_both_key_and_index(d): with pytest.raises(TypeError): d.fast_pop(key="a", index=4) def test_popitem(d): assert d.popitem() == ("e", 14) assert list(d) == list("abcd") def test_popitem_first(d): assert d.popitem(last=False) == ("a", 10) assert list(d) == list("bcde") def test_popitem_last(d): assert d.popitem(last=True) == ("e", 14) assert list(d) == list("abcd") def test_popitem_index(d): assert d.popitem(index=2) == ('c', 12) assert list(d) == list('abde') def test_popitem_key(d): assert d.popitem(key='d') == ('d', 13) assert list(d) == list('abce') def test_popitem_multiple_params(d): with pytest.raises(ValueError): d.popitem(last=True, index=-1) def test_popitem_empty(): d = IndexedDict() with pytest.raises(KeyError): d.popitem() def test_copy(d): l = list(d) d2 = d.copy() assert_internal_state(d2) d.fast_pop("e") assert_internal_state(d) assert_internal_state(d2) assert list(d) != l assert list(d2) == l d["X"] = "y" assert_internal_state(d) assert_internal_state(d2) assert list(d) != l assert list(d2) == l d2["Z"] = "w" assert_internal_state(d) assert_internal_state(d2) @pytest.mark.parametrize("indexing", [{"key": "b"}, {"index": 1}, {"index": -4}]) def test_move_to_end_key_found(d, indexing): d.move_to_end(**indexing) assert list(d) == list("acdeb") def test_move_to_end_noop(d): d.move_to_end("e") assert list(d) == list("abcde") @pytest.mark.parametrize("indexing", [{"key": "b"}, {"index": 1}, {"index": -4}]) def test_move_to_begin_key_found(d, indexing): d.move_to_end(last=False, **indexing) assert list(d) == list("bacde") def test_move_to_begin_noop(d): d.move_to_end("a", last=False) assert list(d) == list("abcde") def test_move_to_end_missing_key(d): with pytest.raises(KeyError): d.move_to_end(key="X") assert list(d) == list("abcde") @pytest.mark.parametrize("index", [100, -6]) def test_move_to_end_missing_index(d, index): with pytest.raises(IndexError): d.move_to_end(index=index) assert list(d) == list("abcde") def test_move_to_end_both_key_and_index(d): with pytest.raises(TypeError): d.move_to_end(key="a", index=4) def test_move_to_end_no_key_or_index(d): with pytest.raises(TypeError): d.move_to_end() def test_index(d): assert d.index("c") == 2 def test_index_missing(d): with pytest.raises(KeyError): d.index("X") def test_key(d): assert d.key(3) == "d" def test_key_negative(d): assert d.key(-2) == "d" def test_key_missing(d): with pytest.raises(IndexError): d.key(100) def test_len(d): assert len(d) == 5 def test_getitem(d): assert d["a"] == 10 assert d["c"] == 12 def test_getitem_missing(d): with pytest.raises(KeyError): d["X"] # noqa def test_setitem_overwrite(d): d["a"] = 110 assert list(d) == list("abcde") assert d["a"] == 110 def test_setitem_create(d): d["x"] = 500 assert list(d) == list("abcdex") assert d["x"] == 500 def test_delitem(d): del d["c"] assert list(d) == list("abde") def test_delitem_missing(d): with pytest.raises(KeyError): del d["x"] def test_contains(d): assert "d" in d assert "x" not in d def test_keys(d): assert list(d.keys()) == list("abcde") def test_values(d): assert list(d.values()) == [10, 11, 12, 13, 14] def test_none_key(d): d[None] = None assert d[None] is None assert list(d) == list("abcde") + [None] def test_repr(): d = IndexedDict() d[1] = "X" d["Y"] = 2 d[None] = None assert repr(d) == "IndexedDict([(1, 'X'), ('Y', 2), (None, None)])" def test_str(): d = IndexedDict() d[1] = "X" d["Y"] = 2 d[None] = None assert str(d) == "IndexedDict({1: 'X', 'Y': 2, None: None})" def test_items(d): assert list(d.items()) == [(chr(ord("a") + i), 10 + i) for i in range(5)] collections-extended-2.0.2/tests/test_range_map.py000066400000000000000000000341451417332243400223450ustar00rootroot00000000000000"""Tests for RangeMap class.""" import datetime from hypothesis import given, example, strategies import pytest from collections_extended.range_map import RangeMap, MappedRange def print_underlying(rm): print(rm._keys, rm._values) def test_simple_set(): """Test set.""" rm = RangeMap() rm.set('a', start=1) print_underlying(rm) assert rm[1] == 'a' assert rm[2] == 'a' with pytest.raises(KeyError): rm[0] rm.set('b', start=2) assert rm[1] == 'a' assert rm[2] == 'b' assert rm[3] == 'b' def test_closed(): """Test a closed RangeMap.""" rm = RangeMap() rm.set('a', start=1, stop=2) print_underlying(rm) assert rm[1] == 'a' assert rm[1.9] == 'a' with pytest.raises(KeyError): rm[2] with pytest.raises(KeyError): rm[0] def test_from_mapping(): """Test creating a RangeMap from a mapping.""" rm = RangeMap() rm.set('a', start=1) rm.set('b', start=2) assert rm == RangeMap({1: 'a', 2: 'b'}) def test_set_closed_interval_end(): """Test setting a closed range on the end.""" rm = RangeMap({1: 'a', 2: 'b'}) rm.set('c', start=3, stop=4) assert rm[1] == 'a' assert rm[2] == 'b' assert rm[3] == 'c' assert rm[4] == 'b' def test_set_existing_interval(): """Test setting an exact existing range.""" rm = RangeMap({1: 'a', 2: 'b'}) rm.set('c', start=1, stop=2) print_underlying(rm) assert rm[1] == 'c' assert rm[2] == 'b' assert rm[3] == 'b' assert rm == RangeMap({1: 'c', 2: 'b'}) with pytest.raises(KeyError): rm[0] def test_set_consecutive_before_eq(): """Test setting consecutive ranges to the same value.""" rm = RangeMap({1: 'a', 2: 'b', 3: 'c'}) print_underlying(rm) rm.set('b', 1, 2) print_underlying(rm) assert rm == RangeMap({1: 'b', 3: 'c'}) def test_set_consecutive_after_eq(): """Test setting consecutive ranges to the same value.""" rm = RangeMap({1: 'a', 2: 'b', 3: 'c'}) rm.set('a', 2, 3) assert rm == RangeMap({1: 'a', 3: 'c'}) def test_set_consecutive_between_eq(): """Test setting consecutive ranges to the same value.""" rm = RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'b'}) rm.set('b', 3, 4) assert rm == RangeMap({1: 'a', 2: 'b'}) def test_break_up_existing_open_end_interval(): """Test breaking up an existing open interval at the end.""" rm = RangeMap({1: 'a', 2: 'b'}) rm.set('d', start=2, stop=2.5) assert rm[1] == 'a' assert rm[2] == 'd' assert rm[2.5] == 'b' assert rm[3] == 'b' def test_break_up_existing_internal_interval(): """Test breaking up an existing interval.""" rm = RangeMap({1: 'a', 2: 'b'}) rm.set('d', start=1, stop=1.5) assert rm[1] == 'd' assert rm[1.5] == 'a' assert rm[2] == 'b' assert rm[3] == 'b' def test_overwrite_multiple_internal(): """Test overwriting multiple adjoining intervals.""" rm = RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}) rm.set('z', start=2, stop=5) assert rm[1] == 'a' assert rm[2] == 'z' assert rm[3] == 'z' assert rm[4] == 'z' assert rm[5] == 'e' def test_overwrite_all(): """Test overwriting the entire mapping.""" rm = RangeMap({1: 'a', 2: 'b'}) rm.set('z', start=0) with pytest.raises(KeyError): rm[-1] assert rm[0] == 'z' assert rm[1] == 'z' assert rm[2] == 'z' assert rm[3] == 'z' def test_default_value(): """Test setting just a default value.""" rm = RangeMap(default_value=None) print(rm) assert rm[1] is None assert rm[-2] is None rm.set('a', start=1) print(rm) assert rm[0] is None assert rm[1] == 'a' assert rm[2] == 'a' def test_whole_range(): """Test setting the whole range.""" rm = RangeMap() rm.set('a') assert rm[1] == 'a' assert rm[-1] == 'a' def test_set_beg(): """Test setting the beginning.""" rm = RangeMap() rm.set('a', stop=4) with pytest.raises(KeyError): rm[4] assert rm[3] == 'a' def test_alter_beg(): """Test altering the beginning.""" rm = RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}) rm.set('z', stop=3) assert rm[0] == 'z' assert rm[1] == 'z' assert rm[2] == 'z' assert rm[3] == 'c' assert rm[4] == 'd' assert rm[5] == 'e' rm.set('y', stop=3) assert rm == RangeMap({3: 'c', 4: 'd', 5: 'e'}, default_value='y') def test_merge_overlapping_pre(): rm = RangeMap() rm[1:31] = True rm[0:30] = True rm2 = RangeMap() rm2[0:31] = True print_underlying(rm) assert rm == rm2 def test_merge_overlapping_post(): rm = RangeMap() rm[0:30] = True rm[1:31] = True rm2 = RangeMap() rm2[0:31] = True print_underlying(rm) assert rm == rm2 def test_merge_overlapping_middle_pre(): rm = RangeMap() rm[0:31] = False rm[10:20] = True rm[5:15] = True rm2 = RangeMap() rm2[0:31] = False rm2[5:20] = True print_underlying(rm) assert rm == rm2 def test_merge_overlapping_middle_post(): rm = RangeMap() rm[0:31] = False rm[5:15] = True rm[10:20] = True rm2 = RangeMap() rm2[0:31] = False rm2[5:20] = True print_underlying(rm) assert rm == rm2 def test_dates(): """Test using dates.""" rm = RangeMap() rm.set('b', datetime.date(1936, 12, 11)) rm.set('a', datetime.date(1952, 2, 6)) assert rm[datetime.date(1945, 1, 1)] == 'b' assert rm[datetime.date(1965, 4, 6)] == 'a' with pytest.raises(KeyError): rm[datetime.date(1900, 1, 1)] def test_version_differences(): """Test python 2 and 3 differences.""" rm = RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}) rm[3:] = 'a' assert rm == RangeMap({1: 'a', 2: 'b', 3: 'a'}) del rm[1:2] assert rm == RangeMap({2: 'b', 3: 'a'}) def test_slice_errors(): """Test slicing errors.""" rm = RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}) with pytest.raises(ValueError): rm[2:5:2] with pytest.raises(TypeError): rm[3] = 'z' with pytest.raises(ValueError): rm[3:5:2] = 'z' def test_bool(): assert not bool(RangeMap()) assert bool(RangeMap(default_value='a')) assert bool(RangeMap({1: 1})) assert bool(RangeMap([(1, 2, 3)])) def test_delete(): """Test deleting.""" rm = RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}, default_value='z') rm.delete(stop=1) assert rm == RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}) rm.delete(start=2, stop=4) assert rm == RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), (5, None, 'e'), )) rm.delete(start=5) assert rm == RangeMap.from_iterable(((1, 2, 'a'), (4, 5, 'd'))) rm = RangeMap({1: 'a', 2: 'b', 3: 'c'}) rm.delete(2, 3) assert rm == RangeMap([(1, 2, 'a'), (3, None, 'c')]) print(repr(rm)) with pytest.raises(KeyError): rm.delete(2, 3) with pytest.raises(KeyError): rm.delete(0, 2) with pytest.raises(KeyError): rm.delete(2.5, 3.5) def test_delitem_beginning(): """Test RangeMap.__delitem__ at the beginning.""" rm = RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}) rm.delete(1, 2) assert rm == RangeMap({2: 'b', 3: 'c', 4: 'd', 5: 'e'}) def test_delitem_consecutive(): """Test deleting consecutive ranges.""" rm = RangeMap({2: 'b', 3: 'c', 4: 'd', 5: 'e'}) rm.delete(3, 4) rm.delete(4, 5) assert rm == RangeMap.from_iterable(((2, 3, 'b'), (5, None, 'e'))) def test_str(): """Test __str__.""" assert str(RangeMap()) == 'RangeMap()' rm = RangeMap(default_value='a') print_underlying(rm) assert str(rm) == "RangeMap((None, None): a)" assert str(RangeMap({1: 'b'})) == "RangeMap((1, None): b)" assert ( str(RangeMap({1: 'b'}, default_value='a')) == "RangeMap((None, 1): a, (1, None): b)" ) def test_empty(): """Test RangeMap.empty.""" rm = RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd'}) rm.empty(2, 3) rm.empty(2, 3) assert rm == RangeMap.from_iterable(( (1, 2, 'a'), (3, 4, 'c'), (4, None, 'd'), )) rm.empty(3.5, 4.5) assert rm == RangeMap.from_iterable(( (1, 2, 'a'), (3, 3.5, 'c'), (4.5, None, 'd'), )) def test_repr(): test_objects = [ RangeMap(), RangeMap(default_value='a'), RangeMap({1: 'a'}), RangeMap([(1, 2, 'a'), (2, 3, 'b')]), RangeMap([(1, 2, 'a'), (3, 4, 'b')]), RangeMap([ (datetime.date(2015, 1, 1), datetime.date(2015, 1, 2), 'a'), (datetime.date(2015, 1, 2), datetime.date(2015, 1, 3), 'b'), ]), ] for obj in test_objects: assert eval(repr(obj)) == obj def test_eq(): """Test __eq__.""" assert RangeMap() == RangeMap() assert RangeMap({1: 'a'}) == RangeMap({1: 'a'}) assert ( RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}) == RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}) ) assert RangeMap(default_value='z') == RangeMap(default_value='z') assert ( RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}, default_value='z') == RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}, default_value='z') ) assert RangeMap() != RangeMap(default_value='z') assert RangeMap({1: 'a'}, default_value='z') != RangeMap({1: 'a'}) assert RangeMap(default_value='z') != RangeMap(default_value='a') assert not RangeMap() == dict() def test_contains(): """Test __contains__.""" assert 1 not in RangeMap() assert 1 in RangeMap(default_value=1) assert 1 in RangeMap({1: 'a'}) assert 2 in RangeMap({1: 'a'}) assert 0 not in RangeMap({1: 'a'}) rm = RangeMap([(1, 2, 'a'), (3, 4, 'b')]) assert 0 not in rm assert 1 in rm assert 2 not in rm assert 3 in rm assert 4 not in rm def test_get_range(): """Test get_range.""" rm = RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}, default_value='z') print_underlying(rm) print_underlying(rm.get_range(1, 3)) print_underlying(RangeMap.from_iterable(((1, 2, 'a'), (2, 3, 'b')))) assert ( rm.get_range(1, 3) == RangeMap.from_iterable(((1, 2, 'a'), (2, 3, 'b'))) ) assert ( rm.get_range(1.5, 3) == RangeMap.from_iterable(((1.5, 2, 'a'), (2, 3, 'b'))) ) print_underlying(rm.get_range(start=3)) assert rm.get_range(start=3) == RangeMap({3: 'c', 4: 'd', 5: 'e'}) assert ( rm.get_range(stop=3) == RangeMap.from_iterable(((None, 1, 'z'), (1, 2, 'a'), (2, 3, 'b'))) ) assert rm[2:3] == rm.get_range(2, 3) def test_start_gt_stop(): rm = RangeMap({1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e'}, default_value='z') with pytest.raises(ValueError): rm.set('a', start=3, stop=2) with pytest.raises(ValueError): rm.get_range(start=3, stop=2) def test_init(): assert RangeMap(iterable=[]) == RangeMap() rm = RangeMap(((1, 2, 'a'), (2, None, 'b'))) assert RangeMap.from_mapping({1: 'a', 2: 'b'}) == rm with pytest.raises(TypeError): RangeMap(foo='bar') def test_len(): rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), (5, None, 'e'), )) assert len(rm) == 3 assert len(RangeMap(default_value='a')) == 1 assert len(RangeMap()) == 0 assert len(RangeMap(default_value=None)) == 1 def test_keys(): rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), (5, None, 'e'), )) assert rm.keys() == set((1, 4, 5)) rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), )) assert rm.keys() == set((1, 4)) assert RangeMap().keys() == set() assert RangeMap(default_value='a').keys() == set((None,)) assert RangeMap(default_value=None).keys() == set((None, )) def test_values(): rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), (5, None, 'e'), )) assert list(rm.values()) == ['a', 'd', 'e'] rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), )) assert list(rm.values()) == ['a', 'd'] assert list(RangeMap().values()) == [] assert list(RangeMap(default_value='a').values()) == ['a'] assert list(RangeMap(default_value=None).values()) == [None] def test_items(): rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), (5, None, 'e'), )) assert rm.items() == set(((1, 'a'), (4, 'd'), (5, 'e'))) rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), )) assert rm.items() == set(((1, 'a'), (4, 'd'))) assert RangeMap().items() == set() assert RangeMap(default_value='a').items() == set(((None, 'a'),)) assert RangeMap(default_value=None).items() == set(((None, None), )) def test_iter(): assert list(RangeMap()) == [] assert list(RangeMap(default_value='a')) == [None] rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), (5, None, 'e'), )) assert list(rm) == [1, 4, 5] rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), )) assert list(rm) == [1, 4] def test_key_view_contains(): rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), (5, None, 'e'), )) assert 1 in rm.keys() assert 2 not in rm.keys() assert 1.5 in rm.keys() def test_items_view_contains(): rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), (5, None, 'e'), )) assert (1, 'a') in rm.items() assert (2, 'a') not in rm.items() def test_values_view_contains(): rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), (5, None, 'e'), )) assert 'a' in rm.values() assert 'b' not in rm.values() def test_get(): rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), (5, None, 'e'), )) assert rm.get(1) == 'a' assert rm.get(1.5) == 'a' assert rm.get(2) is None def test_clear(): rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'd'), (5, None, 'e'), )) rm.clear() assert rm == RangeMap() def test_start(): assert RangeMap().start is None rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'b'), )) assert rm.start == 1 rm = RangeMap.from_iterable(( (None, 2, 'a'), (4, 5, 'b'), )) assert rm.start is None def test_end(): assert RangeMap().end is None rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, 5, 'b'), )) assert rm.end == 5 rm = RangeMap.from_iterable(( (1, 2, 'a'), (4, None, 'b'), )) assert rm.end is None class TestMappedRange: def test_str(self): mr = MappedRange(0, 1, 'a') assert str(mr) == "[0, 1) -> 'a'" def test_repr(self): mr = MappedRange(0, 1, 'a') assert repr(mr) == "MappedRange(0, 1, 'a')" def test_unpack(self): mr = MappedRange(0, 1, 'a') v1, v2, v3 = mr assert v1 == 0 assert v2 == 1 assert v3 == 'a' def test_equality(self): assert MappedRange(0, 1, 'a') == MappedRange(0, 1, 'a') assert not MappedRange(0, 1, 'a') is MappedRange(0, 1, 'a') assert MappedRange(0, 1, 'a') != MappedRange(None, 1, 'a') @given( offsets=strategies.lists( strategies.integers(min_value=0, max_value=99), min_size=1, max_size=20 ), ) @example(offsets=[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 40, 70]) def test_merge_ranges(offsets): """ The RangeMap merges ranges. """ range_map = RangeMap() for offset in offsets: length = min(30, 100 - offset) range_map.set(True, offset, offset + length) ranges = list(range_map.ranges()) if len(ranges) > 1: for first, second in zip(ranges[:-1], ranges[1:]): assert (first.stop, first.value) != (second.start, second.value), ranges collections-extended-2.0.2/tests/test_sentinel.py000066400000000000000000000011671417332243400222330ustar00rootroot00000000000000import pickle from collections_extended.sentinel import Sentinel, NOT_SET class TestSentinel: def test_equal(self): assert Sentinel('abc') == Sentinel('abc') def test_not_equal(self): assert not NOT_SET == None def test_is(self): assert Sentinel('a') is Sentinel('a') def test_str(self): assert str(Sentinel('abc')) == '' def test_repr(self): # assert repr(Sentinel('abc')) == "Sentinel('abc')" assert str(Sentinel('abc')) == '' def test_pickle(self): pickled = pickle.dumps(NOT_SET, protocol=2) unpickled = pickle.loads(pickled) assert unpickled == NOT_SET assert unpickled is NOT_SET collections-extended-2.0.2/tests/test_setlists.py000066400000000000000000000335271417332243400222710ustar00rootroot00000000000000"""Test for setlist classes.""" import pytest from collections_extended.setlists import setlist, frozensetlist def test_init(): """Test __init__.""" with pytest.raises(ValueError): setlist('aa', raise_on_duplicate=True) with pytest.raises(ValueError): setlist('aa', True) def test_count(): """Test count.""" sl = setlist('abcdea') assert sl.count('a') == 1 assert sl.count('f') == 0 assert sl.count('e') == 1 def test_index(): """Test index.""" sl = setlist('abcdef') assert sl.index('a') == 0 assert sl.index('f') == 5 with pytest.raises(ValueError): sl.index('g') with pytest.raises(ValueError): sl.index('a', start=1) with pytest.raises(ValueError): sl.index('f', end=5) with pytest.raises(ValueError): sl.index('f', end=-1) with pytest.raises(IndexError): sl.index('a', end=-10) def test_sub_index(): """Test sub_index.""" sl = setlist('abcdef') assert sl.sub_index('ef') == 4 with pytest.raises(ValueError): sl.sub_index('cb') with pytest.raises(ValueError): sl.sub_index('efg') with pytest.raises(TypeError): sl.sub_index(1) with pytest.raises(ValueError): sl.sub_index('ef', end=5) with pytest.raises(ValueError): sl.sub_index('ab', start=1) def test_setlist(): """General setlist tests.""" sl = setlist('abcde') sl[0] = 5 assert sl == setlist((5, 'b', 'c', 'd', 'e')) sl[-1] = 0 assert sl == setlist((5, 'b', 'c', 'd', 0)) with pytest.raises(ValueError): sl[1] = 'c' assert sl == setlist((5, 'b', 'c', 'd', 0)) with pytest.raises(ValueError): sl.append('c') assert sl == setlist((5, 'b', 'c', 'd', 0)) sl[2] == 'c' assert sl == setlist((5, 'b', 'c', 'd', 0)) assert sl == setlist((5, 'b', 'c', 'd', 0)) del sl[0] assert sl == setlist(('b', 'c', 'd', 0)) del sl[-1] assert sl == setlist(('b', 'c', 'd')) assert sl.pop() == 'd' assert sl.pop(0) == 'b' assert sl == setlist(('c',)) sl.insert(0, 'a') assert sl == setlist(('a', 'c')) sl.insert(len(sl), 'e') assert sl == setlist(('a', 'c', 'e')) with pytest.raises(ValueError): sl.insert(1, 'e') sl.append('f') assert sl == setlist(('a', 'c', 'e', 'f')) def test_operator_iadd(): sl = setlist('abc') sl += setlist('de') assert sl == setlist('abcde') def test_operator_add(): assert setlist('abc') + setlist('def') == setlist('abcdef') with pytest.raises(TypeError): assert setlist('abc') + 'def' == setlist('abcdef') assert frozensetlist(range(2)) + frozensetlist([2]) == frozensetlist(range(3)) assert setlist(range(2)) + frozensetlist([2]) == setlist(range(3)) assert frozensetlist(range(2)) + setlist([2]) == frozensetlist(range(3)) assert setlist(range(2)) + setlist([2]) == setlist(range(3)) with pytest.raises(TypeError): setlist() + set() with pytest.raises(TypeError): setlist() + list() def test_remove_all_works(): sl = setlist('abcdefgh') sl.remove_all('acdh') assert sl == setlist(('befg')) def test_remove_all_raises_on_all_missing(): sl = setlist(range(5)) with pytest.raises(ValueError): sl.remove_all([5, 6]) def test_remove_all_raises_on_some_missing(): sl = setlist(range(5)) with pytest.raises(ValueError): sl.remove_all([4, 5]) assert sl == setlist(range(5)) def test_remove_all_raises_on_duplicates(): sl = setlist(range(5)) with pytest.raises(ValueError): sl.remove_all([4, 4]) def test_discard_all_works(): sl = setlist(range(5)) sl.discard_all([3, 4]) assert sl == setlist(range(3)) def test_discard_all_ignores_some_missing_end(): sl = setlist(range(5)) sl.discard_all([4, 5]) assert sl == setlist(range(4)) def test_discard_all_ignores_some_missing_beg(): sl = setlist(range(5)) sl.discard_all([-1, 0]) assert sl == setlist([1, 2, 3, 4]) def test_discard_all_ignores_all_missing_end(): sl = setlist(range(5)) sl.discard_all([5, 6]) assert sl == setlist(range(5)) def test_discard_all_ignores_all_missing_beg(): sl = setlist(range(5)) sl.discard_all([-2, -1]) assert sl == setlist(range(5)) def test_discard_all_handles_duplicates(): sl = setlist(range(5)) sl.discard_all([3, 3]) assert sl == setlist([0, 1, 2, 4]) sl.discard_all([4, 4]) assert sl == setlist([0, 1, 2]) def test_len(): """Test __len__.""" assert len(setlist()) == 0 assert len(setlist('a')) == 1 assert len(setlist('ab')) == 2 assert len(setlist('abc')) == 3 def test_shuffle(): """Test shuffle.""" sl = setlist(range(100)) sl.shuffle() assert sl != setlist(range(100)) def test_del(): """Test __delitem__.""" sl = setlist('abcde') del sl[1] assert sl == setlist('acde') del sl[0] assert sl == setlist('cde') del sl[2] assert sl == setlist('cd') with pytest.raises(IndexError): del sl[2] with pytest.raises(IndexError): del sl[-3] def test_getitem(): """Test __getitem__.""" sl = setlist(range(10)) assert sl[0] == 0 assert sl[5] == 5 assert sl[9] == 9 with pytest.raises(IndexError): sl[10] assert sl[-1] == 9 with pytest.raises(IndexError): sl[-11] assert sl[1:3] == setlist([1, 2]) assert sl[1:6:2] == setlist([1, 3, 5]) assert sl[6:1:-2] == setlist([6, 4, 2]) def test_setitem(): """Test __setitem__.""" sl = setlist('abc') sl[0] = 'd' assert sl == setlist('dbc') sl[0] = 'd' assert sl == setlist('dbc') sl[1] = 'e' assert sl == setlist('dec') sl[2] = 'f' assert sl == setlist('def') with pytest.raises(IndexError): sl[3] = 'g' sl[0], sl[1] = 'h', 'i' assert sl == setlist('hif') sl = setlist(range(10)) sl[0] = 'a' assert sl == setlist(['a'] + list(range(1, 10))) sl[9] = 'b' assert sl == setlist(['a'] + list(range(1, 9)) + ['b']) sl[-1] = 'c' assert sl == setlist(['a'] + list(range(1, 9)) + ['c']) with pytest.raises(IndexError): sl[-11] = 'd' assert sl == setlist(['a'] + list(range(1, 9)) + ['c']) with pytest.raises(IndexError): sl[10] = 'd' assert sl == setlist(['a'] + list(range(1, 9)) + ['c']) with pytest.raises(TypeError): sl[0:2] = 1 sl = setlist(range(10)) with pytest.raises(ValueError): sl[0:2] = [8, 9] with pytest.raises(ValueError): sl[-1:0:-2] = ['a', 'b'] @pytest.mark.parametrize('slice_, replacement', [ (slice(0, 2), ['a', 'b']), (slice(2, 4), ['a', 'b']), (slice(7, 9), ['a', 'b']), (slice(2, -2), ['a', 'b']), (slice(2, 5, 2), ['a', 'b']), (slice(-1, None, -1), list(range(10))), ]) def test_compare_set_slice_to_list(slice_, replacement): sl = setlist(range(10)) sl[slice_] = replacement l = list(range(10)) l[slice_] = replacement assert sl == setlist(l) def test_delitem(): """Test __delitem__.""" sl = setlist(range(10)) del sl[9] assert sl == setlist(range(9)) del sl[-1] assert sl == setlist(range(8)) del sl[0] assert sl == setlist(range(1, 8)) with pytest.raises(IndexError): del sl[10] @pytest.mark.parametrize('slice_', [ slice(0, 2), slice(6, 9), slice(3, 7), slice(7, 3, -1), slice(0, 7, 2), ]) def test_compare_del_slice_to_list(slice_): sl = setlist(range(10)) del sl[slice_] l = list(range(10)) del l[slice_] assert sl == setlist(l) def test_append_works(): sl = setlist(range(2)) sl.append(2) assert sl == setlist(range(3)) def test_append_unhashable_raises_type_error(): sl = setlist() with pytest.raises(TypeError): sl.append(list()) def test_append_duplicate_raises_value_error(): sl = setlist('a') with pytest.raises(ValueError): sl.append('a') def test_extend_works(): """Test simple extend works.""" sl = setlist(range(1)) sl.extend([1, 2]) assert sl == setlist(range(3)) assert sl.index(0) == 0 assert sl.index(1) == 1 assert sl.index(2) == 2 def test_extend_fails_with_existing_values(): """Test extend with existing values fails.""" sl = setlist(range(3)) with pytest.raises(ValueError): sl.extend([1, 2]) assert sl == setlist(range(3)) def test_extend_fails_with_some_existing_values(): """Test extend with some existing values fails and doesn't change the setlist.""" sl = setlist(range(3)) with pytest.raises(ValueError): sl.extend([4, 2]) assert sl == setlist(range(3)) def test_extend_fails_with_duplicate_values(): """Test extend with duplicate values fails and doesn't change the setlist.""" sl = setlist(range(3)) with pytest.raises(ValueError): sl.extend([3, 3]) assert sl == setlist(range(3)) def test_extend_fails_with_unhashable_value(): sl = setlist() with pytest.raises(TypeError): sl.extend(['a', list()]) assert sl == setlist() def test_update(): sl = setlist(range(3)) sl.update([3]) assert sl == setlist(range(4)) def test_update_with_duplicates(): sl = setlist(range(3)) sl.update([2, 3]) assert sl == setlist(range(4)) def test_update_raises_type_error(): sl = setlist() with pytest.raises(TypeError): sl.update([list()]) def test_hash(): """Test __hash__.""" assert hash(frozensetlist('abc')) == hash(frozensetlist('abc')) assert hash(frozensetlist()) == hash(frozensetlist()) def test_hash_differs_with_order(): assert hash(frozensetlist('abc')) != hash(frozensetlist('cab')) def test_clear(): """Test clear.""" sl = setlist(range(10)) sl.clear() assert sl == setlist() def test_discard(): """Test discard.""" sl = setlist(range(10)) sl.discard(9) assert sl == setlist(range(9)) sl.discard(100) assert sl == setlist(range(9)) def test_add(): """Test add.""" sl = setlist(range(10)) sl.add(10) assert sl == setlist(range(11)) sl.add(10) assert sl == setlist(range(11)) def test_remove(): """Test remove.""" sl = setlist(range(10)) sl.remove(9) assert sl == setlist(range(9)) with pytest.raises(ValueError): sl.remove(100) def test_eq(): """Test __eq__.""" assert not setlist(range(10)) == list(range(10)) assert not setlist(range(10)) == setlist(range(9)) def test_str(): """Test __str__.""" assert str(setlist()) == '{[}]' assert str(setlist('abc')) == "{['a', 'b', 'c'}]" assert str(frozensetlist()) == 'frozensetlist()' assert str(frozensetlist('abc')) == "frozensetlist(('a', 'b', 'c'))" def test_repr(): """Test __repr.""" assert repr(setlist()) == 'setlist()' assert repr(setlist(range(4))) == 'setlist((0, 1, 2, 3))' assert repr(frozensetlist()) == 'frozensetlist()' assert repr(frozensetlist('abc')) == "frozensetlist(('a', 'b', 'c'))" def test_copy(): """Test copy.""" sl = setlist(range(10)) copy = sl.copy() assert sl == copy assert sl is not copy sl = setlist(('1', (0, 1))) copy = sl.copy() assert sl == copy assert sl is not copy assert sl[1] is copy[1] def test_is_subset(): assert setlist('ab').issubset(setlist('abc')) assert setlist('abc').issubset(setlist('abc')) assert not setlist('abc').issubset(setlist('ab')) def test_is_superset(): assert not setlist('ab').issuperset(setlist('abc')) assert setlist('abc').issuperset(setlist('abc')) assert setlist('abc').issuperset(setlist('ab')) def test_union(): assert setlist('ab').union(setlist('bc')) == setlist('abc') assert setlist('ab').union('bc') == setlist('abc') assert setlist('ab') | setlist('bc') == setlist('abc') with pytest.raises(TypeError): assert setlist('ab') | 'bc' == setlist('abc') def test_intersection(): assert setlist('abd').intersection(setlist('bcd')) == setlist('bd') assert setlist('abd').intersection('bcd') == setlist('bd') assert setlist('abd') & setlist('bcd') == setlist('bd') with pytest.raises(TypeError): assert setlist('abd') & 'bcd' == setlist('bd') def test_difference(): assert setlist('abd').difference(setlist('bcd')) == setlist('a') assert setlist('abd').difference('bcd') == setlist('a') assert setlist('abd') - setlist('bcd') == setlist('a') with pytest.raises(TypeError): assert setlist('abd') - 'bcd' == setlist('a') def test_symmetric_difference(): assert setlist('abd').symmetric_difference(setlist('bcd')) == setlist('ac') assert setlist('abd').symmetric_difference('bcd') == setlist('ac') assert setlist('abd') ^ setlist('bcd') == setlist('ac') with pytest.raises(TypeError): assert setlist('abd') ^ 'bcd' == setlist('ac') def test_intersection_update(): sl = setlist('abd') sl.intersection_update(setlist('bcd')) assert sl == setlist('bd') sl = setlist('abd') sl.intersection_update('bcd') assert sl == setlist('bd') sl = setlist('abd') sl &= setlist('bcd') assert sl == setlist('bd') sl = setlist('abd') with pytest.raises(TypeError): sl &= 'bcd' def test_difference_update(): sl = setlist('abd') sl.difference_update(setlist('bcd')) assert sl == setlist('a') sl = setlist('abd') sl.difference_update('bcd') assert sl == setlist('a') sl = setlist('abd') sl -= setlist('bcd') assert sl == setlist('a') sl = setlist('abd') with pytest.raises(TypeError): sl -= 'bcd' def test_symmetric_difference_update(): sl = setlist('abd') sl.symmetric_difference_update(setlist('bcd')) assert sl == setlist('ac') sl = setlist('abd') sl.symmetric_difference_update('bcd') assert sl == setlist('ac') sl = setlist('abd') sl ^= setlist('bcd') assert sl == setlist('ac') sl = setlist('abd') with pytest.raises(TypeError): sl ^= 'bcd' def test_union_update(): sl = setlist('abd') sl |= setlist('bcd') assert sl == setlist('abdc') def test_extend_update(): sl = setlist('abd') sl += setlist('e') assert sl == setlist('abde') with pytest.raises(TypeError): sl += 'f' assert sl == setlist('abde') with pytest.raises(ValueError): sl += setlist('fe') assert sl == setlist('abde') def test_sort(): sl = setlist([4, 7, 1, 0]) sl.sort() assert sl == setlist([0, 1, 4, 7]) sl = setlist([]) sl.sort() assert sl == setlist() sl = setlist(['a9', 'b7', 'c5']) sl.sort(key=lambda i: i[1]) assert sl == setlist(['c5', 'b7', 'a9']) def test_tuple_keys(): # https://github.com/mlenzen/collections-extended/issues/83 sl = setlist() sl.add((1, 2, 3)) with pytest.raises(ValueError): sl.append((1, 2, 3)) assert sl == setlist([(1, 2, 3)]) def assert_internal_structure(sl): print(sl._list) print(sl._dict) for i, elem in enumerate(sl): assert sl._dict[elem] == i assert len(sl._dict) == len(sl._list) def test_swap(): sl = setlist('abcdef') sl.swap(1, 2) assert_internal_structure(sl) assert sl == setlist('acbdef') sl.swap(-1, 1) assert_internal_structure(sl) assert sl == setlist('afbdec') def test_reverse(): sl = setlist('abcdef') sl.reverse() assert sl == setlist('fedcba') collections-extended-2.0.2/tox.ini000066400000000000000000000006071417332243400171500ustar00rootroot00000000000000[tox] envlist = py36, py37, py38, py39, py310, pypy3 isolated_build = True [testenv] setenv = PYTHONPATH = {toxinidir}:{toxinidir}/ deps = pytest commands = pytest [pytest] addopts = --doctest-glob='docs/*.rst' testpaths = tests docs README.rst [flake8] ignore = W191,W503,D105,D107,D203,D413 max-complexity = 10 hang-closing = True exclude = .git,env,.venv,docs,build,tests,.tox,.eggs