pax_global_header00006660000000000000000000000064143443517060014521gustar00rootroot0000000000000052 comment=007e8ec12662ffd896c6151239dc7ed1402dc710 pytest-asyncio-0.20.3/000077500000000000000000000000001434435170600145765ustar00rootroot00000000000000pytest-asyncio-0.20.3/.github/000077500000000000000000000000001434435170600161365ustar00rootroot00000000000000pytest-asyncio-0.20.3/.github/CODEOWNERS000066400000000000000000000000361434435170600175300ustar00rootroot00000000000000* @asvetlov @seifertm @Tinche pytest-asyncio-0.20.3/.github/actionlint-matcher.json000066400000000000000000000006631434435170600226230ustar00rootroot00000000000000{ "problemMatcher": [ { "owner": "actionlint", "pattern": [ { "code": 5, "column": 3, "file": 1, "line": 2, "message": 4, "regexp": "^(?:\\x1b\\[\\d+m)?(.+?)(?:\\x1b\\[\\d+m)*:(?:\\x1b\\[\\d+m)*(\\d+)(?:\\x1b\\[\\d+m)*:(?:\\x1b\\[\\d+m)*(\\d+)(?:\\x1b\\[\\d+m)*: (?:\\x1b\\[\\d+m)*(.+?)(?:\\x1b\\[\\d+m)* \\[(.+?)\\]$" } ] } ] } pytest-asyncio-0.20.3/.github/dependabot.yml000066400000000000000000000004361434435170600207710ustar00rootroot00000000000000--- version: 2 updates: - package-ecosystem: pip directory: /dependencies/default schedule: interval: weekly open-pull-requests-limit: 10 target-branch: master - package-ecosystem: github-actions directory: / schedule: interval: daily open-pull-requests-limit: 10 pytest-asyncio-0.20.3/.github/workflows/000077500000000000000000000000001434435170600201735ustar00rootroot00000000000000pytest-asyncio-0.20.3/.github/workflows/main.yml000066400000000000000000000067661434435170600216610ustar00rootroot00000000000000--- name: CI on: push: branches: [master] tags: [v*] pull_request: branches: [master] workflow_dispatch: jobs: lint: name: Run linters runs-on: ubuntu-latest outputs: version: ${{ steps.version.outputs.version }} prerelease: ${{ steps.version.outputs.prerelease }} steps: - uses: actions/checkout@v3 with: fetch-depth: 0 - uses: actions/setup-python@v4 with: python-version: '3.10' - name: Install GitHub matcher for ActionLint checker run: | echo "::add-matcher::.github/actionlint-matcher.json" - name: Install check-wheel-content, and twine run: python -m pip install build check-wheel-contents tox twine - name: Build package run: python -m build - name: Run tox for linter run: python -m tox -e lint - name: List result run: ls -l dist - name: Check wheel contents run: check-wheel-contents dist/*.whl - name: Check long_description run: python -m twine check dist/* - name: Get version info id: version run: tox -e version-info - name: Upload artifacts uses: actions/upload-artifact@v3 with: name: dist path: dist test: name: Python ${{ matrix.python-version }} runs-on: ubuntu-latest env: USING_COVERAGE: 3.7,3.8,3.9,3.10,3.11 strategy: matrix: python-version: ['3.7', '3.8', '3.9', '3.10', 3.11-dev] steps: - uses: actions/checkout@v3 with: fetch-depth: 0 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | set -xe python -VV python -m site python -m pip install --upgrade pip python -m pip install --upgrade coverage[toml] virtualenv tox tox-gh-actions - name: Run tox targets for ${{ matrix.python-version }} run: python -m tox - name: Prepare coverage artifact if: ${{ contains(env.USING_COVERAGE, matrix.python-version) }} uses: aio-libs/prepare-coverage@v21.9.1 check: name: Check if: always() needs: [lint, test] runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} - name: Upload coverage uses: aio-libs/upload-coverage@v21.9.4 deploy: name: Deploy environment: release # Run only on pushing a tag if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') needs: [lint, check] runs-on: ubuntu-latest steps: - name: Install pandoc run: | sudo apt-get install -y pandoc - name: Checkout uses: actions/checkout@v3 with: fetch-depth: 0 - name: Download distributions uses: actions/download-artifact@v3 with: name: dist path: dist - name: Collected dists run: | tree dist - name: Convert README.rst to Markdown run: | pandoc -s -o README.md README.rst - name: PyPI upload uses: pypa/gh-action-pypi-publish@v1.5.2 with: packages_dir: dist password: ${{ secrets.PYPI_API_TOKEN }} - name: GitHub Release uses: ncipollo/release-action@v1 with: name: pytest-asyncio ${{ needs.lint.outputs.version }} artifacts: dist/* bodyFile: README.md prerelease: ${{ needs.lint.outputs.prerelease }} token: ${{ secrets.GITHUB_TOKEN }} pytest-asyncio-0.20.3/.gitignore000066400000000000000000000014741434435170600165740ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg .hypothesis/ # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .pytest_cache nosetests.xml coverage.xml *,cover # Translations *.mo *.pot # Django stuff: *.log # Sphinx documentation docs/_build/ # PyBuilder target/ .venv* .idea .vscode # pyenv .python-version # generated by setuptools_scm pytest_asyncio/_version.py pytest-asyncio-0.20.3/.pre-commit-config.yaml000066400000000000000000000025621434435170600210640ustar00rootroot00000000000000--- repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.3.0 hooks: - id: check-merge-conflict exclude: rst$ - repo: https://github.com/asottile/yesqa rev: v1.4.0 hooks: - id: yesqa - repo: https://github.com/Zac-HD/shed rev: 0.10.7 hooks: - id: shed args: - --refactor types_or: - python - markdown - rst - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt rev: 0.2.2 hooks: - id: yamlfmt args: [--mapping, '2', --sequence, '2', --offset, '0'] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.3.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: fix-encoding-pragma args: [--remove] - id: check-case-conflict - id: check-json - id: check-xml - id: check-yaml - id: debug-statements - repo: https://github.com/pycqa/flake8 rev: 5.0.4 hooks: - id: flake8 language_version: python3 - repo: https://github.com/pre-commit/pygrep-hooks rev: v1.9.0 hooks: - id: python-use-type-annotations - repo: https://github.com/rhysd/actionlint rev: v1.6.22 hooks: - id: actionlint-docker args: - -ignore - 'SC2155:' - -ignore - 'SC2086:' - -ignore - 'SC1004:' - repo: https://github.com/sirosen/check-jsonschema rev: 0.19.2 hooks: - id: check-github-actions ci: skip: - actionlint-docker - check-github-actions pytest-asyncio-0.20.3/CHANGELOG.rst000066400000000000000000000231041434435170600166170ustar00rootroot00000000000000========= Changelog ========= 0.20.3 (22-12-08) ================= - Prevent DeprecationWarning to bubble up on CPython 3.10.9 and 3.11.1. `#460 `_ 0.20.2 (22-11-11) ================= - Fixes an issue with async fixtures that are defined as methods on a test class not being rebound to the actual test instance. `#197 `_ - Replaced usage of deprecated ``@pytest.mark.tryfirst`` with ``@pytest.hookimpl(tryfirst=True)`` `#438 `_ 0.20.1 (22-10-21) ================= - Fixes an issue that warned about using an old version of pytest, even though the most recent version was installed. `#430 `_ 0.20.0 (22-10-21) ================= - BREAKING: Removed *legacy* mode. If you're upgrading from v0.19 and you haven't configured ``asyncio_mode = legacy``, you can upgrade without taking any additional action. If you're upgrading from an earlier version or you have explicitly enabled *legacy* mode, you need to switch to *auto* or *strict* mode before upgrading to this version. - Deprecate use of pytest v6. - Fixed an issue which prevented fixture setup from being cached. `#404 `_ 0.19.0 (22-07-13) ================= - BREAKING: The default ``asyncio_mode`` is now *strict*. `#293 `_ - Removes `setup.py` since all relevant configuration is present `setup.cfg`. Users requiring an editable installation of pytest-asyncio need to use pip v21.1 or newer. `#283 `_ - Declare support for Python 3.11. 0.18.3 (22-03-25) ================= - Adds `pytest-trio `_ to the test dependencies - Fixes a bug that caused pytest-asyncio to try to set up async pytest_trio fixtures in strict mode. `#298 `_ 0.18.2 (22-03-03) ================= - Fix asyncio auto mode not marking static methods. `#295 `_ - Fix a compatibility issue with Hypothesis 6.39.0. `#302 `_ 0.18.1 (22-02-10) ================= - Fixes a regression that prevented async fixtures from working in synchronous tests. `#286 `_ 0.18.0 (22-02-07) ================= - Raise a warning if @pytest.mark.asyncio is applied to non-async function. `#275 `_ - Support parametrized ``event_loop`` fixture. `#278 `_ 0.17.2 (22-01-17) ================= - Require ``typing-extensions`` on Python<3.8 only. `#269 `_ - Fix a regression in tests collection introduced by 0.17.1, the plugin works fine with non-python tests again. `#267 `_ 0.17.1 (22-01-16) ================= - Fixes a bug that prevents async Hypothesis tests from working without explicit ``asyncio`` marker when ``--asyncio-mode=auto`` is set. `#258 `_ - Fixed a bug that closes the default event loop if the loop doesn't exist `#257 `_ - Added type annotations. `#198 `_ - Show asyncio mode in pytest report headers. `#266 `_ - Relax ``asyncio_mode`` type definition; it allows to support pytest 6.1+. `#262 `_ 0.17.0 (22-01-13) ================= - `pytest-asyncio` no longer alters existing event loop policies. `#168 `_, `#188 `_ - Drop support for Python 3.6 - Fixed an issue when pytest-asyncio was used in combination with `flaky` or inherited asynchronous Hypothesis tests. `#178 `_ `#231 `_ - Added `flaky `_ to test dependencies - Added ``unused_udp_port`` and ``unused_udp_port_factory`` fixtures (similar to ``unused_tcp_port`` and ``unused_tcp_port_factory`` counterparts. `#99 `_ - Added the plugin modes: *strict*, *auto*, and *legacy*. See `documentation `_ for details. `#125 `_ - Correctly process ``KeyboardInterrupt`` during async fixture setup phase `#219 `_ 0.16.0 (2021-10-16) =================== - Add support for Python 3.10 0.15.1 (2021-04-22) =================== - Hotfix for errors while closing event loops while replacing them. `#209 `_ `#210 `_ 0.15.0 (2021-04-19) =================== - Add support for Python 3.9 - Abandon support for Python 3.5. If you still require support for Python 3.5, please use pytest-asyncio v0.14 or earlier. - Set ``unused_tcp_port_factory`` fixture scope to 'session'. `#163 `_ - Properly close event loops when replacing them. `#208 `_ 0.14.0 (2020-06-24) =================== - Fix `#162 `_, and ``event_loop`` fixture behavior now is coherent on all scopes. `#164 `_ 0.12.0 (2020-05-04) =================== - Run the event loop fixture as soon as possible. This helps with fixtures that have an implicit dependency on the event loop. `#156 `_ 0.11.0 (2020-04-20) =================== - Test on 3.8, drop 3.3 and 3.4. Stick to 0.10 for these versions. `#152 `_ - Use the new Pytest 5.4.0 Function API. We therefore depend on pytest >= 5.4.0. `#142 `_ - Better ``pytest.skip`` support. `#126 `_ 0.10.0 (2019-01-08) ==================== - ``pytest-asyncio`` integrates with `Hypothesis `_ to support ``@given`` on async test functions using ``asyncio``. `#102 `_ - Pytest 4.1 support. `#105 `_ 0.9.0 (2018-07-28) ================== - Python 3.7 support. - Remove ``event_loop_process_pool`` fixture and ``pytest.mark.asyncio_process_pool`` marker (see https://bugs.python.org/issue34075 for deprecation and removal details) 0.8.0 (2017-09-23) ================== - Improve integration with other packages (like aiohttp) with more careful event loop handling. `#64 `_ 0.7.0 (2017-09-08) ================== - Python versions pre-3.6 can use the async_generator library for async fixtures. `#62 ` 0.6.0 (2017-05-28) ================== - Support for Python versions pre-3.5 has been dropped. - ``pytestmark`` now works on both module and class level. - The ``forbid_global_loop`` parameter has been removed. - Support for async and async gen fixtures has been added. `#45 `_ - The deprecation warning regarding ``asyncio.async()`` has been fixed. `#51 `_ 0.5.0 (2016-09-07) ================== - Introduced a changelog. `#31 `_ - The ``event_loop`` fixture is again responsible for closing itself. This makes the fixture slightly harder to correctly override, but enables other fixtures to depend on it correctly. `#30 `_ - Deal with the event loop policy by wrapping a special pytest hook, ``pytest_fixture_setup``. This allows setting the policy before fixtures dependent on the ``event_loop`` fixture run, thus allowing them to take advantage of the ``forbid_global_loop`` parameter. As a consequence of this, we now depend on pytest 3.0. `#29 `_ 0.4.1 (2016-06-01) ================== - Fix a bug preventing the propagation of exceptions from the plugin. `#25 `_ 0.4.0 (2016-05-30) ================== - Make ``event_loop`` fixtures simpler to override by closing them in the plugin, instead of directly in the fixture. `#21 `_ - Introduce the ``forbid_global_loop`` parameter. `#21 `_ 0.3.0 (2015-12-19) ================== - Support for Python 3.5 ``async``/``await`` syntax. `#17 `_ 0.2.0 (2015-08-01) ================== - ``unused_tcp_port_factory`` fixture. `#10 `_ 0.1.1 (2015-04-23) ================== Initial release. pytest-asyncio-0.20.3/LICENSE000066400000000000000000000260741434435170600156140ustar00rootroot00000000000000Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. pytest-asyncio-0.20.3/MANIFEST.in000066400000000000000000000001461434435170600163350ustar00rootroot00000000000000include CHANGELOG.rst recursive-exclude .github * exclude .gitignore exclude .pre-commit-config.yaml pytest-asyncio-0.20.3/Makefile000066400000000000000000000017051434435170600162410ustar00rootroot00000000000000.PHONY: clean clean-build clean-pyc clean-test lint test clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts clean-build: ## remove build artifacts rm -fr build/ rm -fr dist/ rm -fr .eggs/ find . -name '*.egg-info' -exec rm -fr {} + find . -name '*.egg' -exec rm -f {} + clean-pyc: ## remove Python file artifacts find . -name '*.pyc' -exec rm -f {} + find . -name '*.pyo' -exec rm -f {} + find . -name '*~' -exec rm -f {} + find . -name '__pycache__' -exec rm -fr {} + clean-test: ## remove test and coverage artifacts rm -fr .tox/ rm -f .coverage rm -fr htmlcov/ lint: # CI env-var is set by GitHub actions ifdef CI python -m pre_commit run --all-files --show-diff-on-failure else python -m pre_commit run --all-files endif python -m mypy pytest_asyncio --show-error-codes test: coverage run -m pytest tests coverage xml coverage report install: pip install -U pre-commit pre-commit install pytest-asyncio-0.20.3/README.rst000066400000000000000000000041701434435170600162670ustar00rootroot00000000000000pytest-asyncio ============== .. image:: https://img.shields.io/pypi/v/pytest-asyncio.svg :target: https://pypi.python.org/pypi/pytest-asyncio .. image:: https://github.com/pytest-dev/pytest-asyncio/workflows/CI/badge.svg :target: https://github.com/pytest-dev/pytest-asyncio/actions?workflow=CI .. image:: https://codecov.io/gh/pytest-dev/pytest-asyncio/branch/master/graph/badge.svg :target: https://codecov.io/gh/pytest-dev/pytest-asyncio .. image:: https://img.shields.io/pypi/pyversions/pytest-asyncio.svg :target: https://github.com/pytest-dev/pytest-asyncio :alt: Supported Python versions .. image:: https://img.shields.io/badge/code%20style-black-000000.svg :target: https://github.com/ambv/black pytest-asyncio is a `pytest `_ plugin. It facilitates testing of code that uses the `asyncio `_ library. Specifically, pytest-asyncio provides support for coroutines as test functions. This allows users to *await* code inside their tests. For example, the following code is executed as a test item by pytest: .. code-block:: python @pytest.mark.asyncio async def test_some_asyncio_code(): res = await library.do_something() assert b"expected result" == res Note that test classes subclassing the standard `unittest `__ library are not supported. Users are advised to use `unittest.IsolatedAsyncioTestCase `__ or an async framework such as `asynctest `__. pytest-asyncio is available under the `Apache License 2.0 `_. Installation ------------ To install pytest-asyncio, simply: .. code-block:: bash $ pip install pytest-asyncio This is enough for pytest to pick up pytest-asyncio. Contributing ------------ Contributions are very welcome. Tests can be run with ``tox``, please ensure the coverage at least stays the same before you submit a pull request. pytest-asyncio-0.20.3/dependencies/000077500000000000000000000000001434435170600172245ustar00rootroot00000000000000pytest-asyncio-0.20.3/dependencies/default/000077500000000000000000000000001434435170600206505ustar00rootroot00000000000000pytest-asyncio-0.20.3/dependencies/default/constraints.txt000066400000000000000000000006311434435170600237600ustar00rootroot00000000000000async-generator==1.10 attrs==22.1.0 coverage==6.5.0 exceptiongroup==1.0.4 flaky==3.7.0 hypothesis==6.58.1 idna==3.4 importlib-metadata==5.1.0 iniconfig==1.1.1 mypy==0.991 mypy-extensions==0.4.3 outcome==1.2.0 packaging==21.3 pluggy==1.0.0 pyparsing==3.0.9 pytest==7.2.0 pytest-trio==0.8.0 sniffio==1.3.0 sortedcontainers==2.4.0 tomli==2.0.1 trio==0.22.0 typed-ast==1.5.4 typing_extensions==4.4.0 zipp==3.11.0 pytest-asyncio-0.20.3/dependencies/default/requirements.txt000066400000000000000000000002661434435170600241400ustar00rootroot00000000000000# Always adjust install_requires in setup.cfg and pytest-min-requirements.txt # when changing runtime dependencies pytest >= 6.1.0 typing-extensions >= 3.7.2; python_version < "3.8" pytest-asyncio-0.20.3/dependencies/pytest-min/000077500000000000000000000000001434435170600213355ustar00rootroot00000000000000pytest-asyncio-0.20.3/dependencies/pytest-min/constraints.txt000066400000000000000000000005441434435170600244500ustar00rootroot00000000000000async-generator==1.10 attrs==21.4.0 coverage==6.3.2 flaky==3.7.0 hypothesis==6.43.3 idna==3.3 iniconfig==1.1.1 mypy==0.942 mypy-extensions==0.4.3 outcome==1.1.0 packaging==21.3 pluggy==0.13.1 py==1.11.0 pyparsing==3.0.8 pytest==6.1.0 pytest-trio==0.7.0 sniffio==1.2.0 sortedcontainers==2.4.0 toml==0.10.2 tomli==2.0.1 trio==0.20.0 typing_extensions==4.2.0 pytest-asyncio-0.20.3/dependencies/pytest-min/requirements.txt000066400000000000000000000002631434435170600246220ustar00rootroot00000000000000# Always adjust install_requires in setup.cfg and requirements.txt # when changing minimum version dependencies pytest == 6.1.0 typing-extensions >= 3.7.2; python_version < "3.8" pytest-asyncio-0.20.3/docs/000077500000000000000000000000001434435170600155265ustar00rootroot00000000000000pytest-asyncio-0.20.3/docs/Makefile000066400000000000000000000011761434435170600171730ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = source BUILDDIR = build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) pytest-asyncio-0.20.3/docs/make.bat000066400000000000000000000014011434435170600171270ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=source set BUILDDIR=build %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.https://www.sphinx-doc.org/ exit /b 1 ) if "%1" == "" goto help %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd pytest-asyncio-0.20.3/docs/source/000077500000000000000000000000001434435170600170265ustar00rootroot00000000000000pytest-asyncio-0.20.3/docs/source/concepts.rst000066400000000000000000000035051434435170600214010ustar00rootroot00000000000000======== Concepts ======== asyncio event loops =================== pytest-asyncio runs each test item in its own asyncio event loop. The loop can be accessed via the ``event_loop`` fixture, which is automatically requested by all async tests. .. code-block:: python async def test_provided_loop_is_running_loop(event_loop): assert event_loop is asyncio.get_running_loop() You can think of `event_loop` as an autouse fixture for async tests. Test discovery modes ==================== Pytest-asyncio provides two modes for test discovery, *strict* and *auto*. Strict mode ----------- In strict mode pytest-asyncio will only run tests that have the *asyncio* marker and will only evaluate async fixtures decorated with ``@pytest_asyncio.fixture``. Test functions and fixtures without these markers and decorators will not be handled by pytest-asyncio. This mode is intended for projects that want so support multiple asynchronous programming libraries as it allows pytest-asyncio to coexist with other async testing plugins in the same codebase. Pytest automatically enables installed plugins. As a result pytest plugins need to coexist peacefully in their default configuration. This is why strict mode is the default mode. Auto mode --------- In *auto* mode pytest-asyncio automatically adds the *asyncio* marker to all asynchronous test functions. It will also take ownership of all async fixtures, regardless of whether they are decorated with ``@pytest.fixture`` or ``@pytest_asyncio.fixture``. This mode is intended for projects that use *asyncio* as their only asynchronous programming library. Auto mode makes for the simplest test and fixture configuration and is the recommended default. If you intend to support multiple asynchronous programming libraries, e.g. *asyncio* and *trio*, strict mode will be the preferred option. pytest-asyncio-0.20.3/docs/source/conf.py000066400000000000000000000016771434435170600203400ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # # For the full list of built-in configuration values, see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information project = "pytest-asyncio" copyright = "2022, pytest-asyncio contributors" author = "Tin Tvrtković" release = "v0.20.1" # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration extensions = [] templates_path = ["_templates"] exclude_patterns = [] # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output html_theme = "sphinx_rtd_theme" html_static_path = ["_static"] pytest-asyncio-0.20.3/docs/source/index.rst000066400000000000000000000023321434435170600206670ustar00rootroot00000000000000========================== Welcome to pytest-asyncio! ========================== .. toctree:: :maxdepth: 1 :hidden: concepts reference support pytest-asyncio is a `pytest `_ plugin. It facilitates testing of code that uses the `asyncio `_ library. Specifically, pytest-asyncio provides support for coroutines as test functions. This allows users to *await* code inside their tests. For example, the following code is executed as a test item by pytest: .. code-block:: python @pytest.mark.asyncio async def test_some_asyncio_code(): res = await library.do_something() assert b"expected result" == res Note that test classes subclassing the standard `unittest `__ library are not supported. Users are advised to use `unittest.IsolatedAsyncioTestCase `__ or an async framework such as `asynctest `__. pytest-asyncio is available under the `Apache License 2.0 `_. pytest-asyncio-0.20.3/docs/source/reference.rst000066400000000000000000000115441434435170600215230ustar00rootroot00000000000000========= Reference ========= Configuration ============= The pytest-asyncio mode can be set by the ``asyncio_mode`` configuration option in the `configuration file `_: .. code-block:: ini # pytest.ini [pytest] asyncio_mode = auto The value can also be set via the ``--asyncio-mode`` command-line option: .. code-block:: bash $ pytest tests --asyncio-mode=strict If the asyncio mode is set in both the pytest configuration file and the command-line option, the command-line option takes precedence. If no asyncio mode is specified, the mode defaults to `strict`. Fixtures ======== ``event_loop`` -------------- Creates a new asyncio event loop based on the current event loop policy. The new loop is available as the return value of this fixture or via `asyncio.get_running_loop `__. The event loop is closed when the fixture scope ends. The fixture scope defaults to ``function`` scope. .. code-block:: python def test_http_client(event_loop): url = "http://httpbin.org/get" resp = event_loop.run_until_complete(http_client(url)) assert b"HTTP/1.1 200 OK" in resp Note that, when using the ``event_loop`` fixture, you need to interact with the event loop using methods like ``event_loop.run_until_complete``. If you want to *await* code inside your test function, you need to write a coroutine and use it as a test function. The `asyncio <#pytest-mark-asyncio>`__ marker is used to mark coroutines that should be treated as test functions. The ``event_loop`` fixture can be overridden in any of the standard pytest locations, e.g. directly in the test file, or in ``conftest.py``. This allows redefining the fixture scope, for example: .. code-block:: python @pytest.fixture(scope="session") def event_loop(): policy = asyncio.get_event_loop_policy() loop = policy.new_event_loop() yield loop loop.close() If you need to change the type of the event loop, prefer setting a custom event loop policy over redefining the ``event_loop`` fixture. If the ``pytest.mark.asyncio`` decorator is applied to a test function, the ``event_loop`` fixture will be requested automatically by the test function. ``unused_tcp_port`` ------------------- Finds and yields a single unused TCP port on the localhost interface. Useful for binding temporary test servers. ``unused_tcp_port_factory`` --------------------------- A callable which returns a different unused TCP port each invocation. Useful when several unused TCP ports are required in a test. .. code-block:: python def a_test(unused_tcp_port_factory): port1, port2 = unused_tcp_port_factory(), unused_tcp_port_factory() ... ``unused_udp_port`` and ``unused_udp_port_factory`` --------------------------------------------------- Works just like their TCP counterparts but returns unused UDP ports. Markers ======= ``pytest.mark.asyncio`` ----------------------- A coroutine or async generator with this marker will be treated as a test function by pytest. The marked function will be executed as an asyncio task in the event loop provided by the ``event_loop`` fixture. In order to make your test code a little more concise, the pytest |pytestmark|_ feature can be used to mark entire modules or classes with this marker. Only test coroutines will be affected (by default, coroutines prefixed by ``test_``), so, for example, fixtures are safe to define. .. code-block:: python import asyncio import pytest # All test coroutines will be treated as marked. pytestmark = pytest.mark.asyncio async def test_example(event_loop): """No marker!""" await asyncio.sleep(0, loop=event_loop) In *auto* mode, the ``pytest.mark.asyncio`` marker can be omitted, the marker is added automatically to *async* test functions. .. |pytestmark| replace:: ``pytestmark`` .. _pytestmark: http://doc.pytest.org/en/latest/example/markers.html#marking-whole-classes-or-modules Decorators ========== Asynchronous fixtures are defined just like ordinary pytest fixtures, except they should be decorated with ``@pytest_asyncio.fixture``. .. code-block:: python3 import pytest_asyncio @pytest_asyncio.fixture async def async_gen_fixture(): await asyncio.sleep(0.1) yield "a value" @pytest_asyncio.fixture(scope="module") async def async_fixture(): return await asyncio.sleep(0.1) All scopes are supported, but if you use a non-function scope you will need to redefine the ``event_loop`` fixture to have the same or broader scope. Async fixtures need the event loop, and so must have the same or narrower scope than the ``event_loop`` fixture. *auto* mode automatically converts async fixtures declared with the standard ``@pytest.fixture`` decorator to *asyncio-driven* versions. pytest-asyncio-0.20.3/docs/source/support.rst000066400000000000000000000023011434435170600212700ustar00rootroot00000000000000=============== Getting support =============== Enterprise support ================== `Tidelift `_ works with maintainers of numerous open source projects to ensure enterprise-grade support for your software supply chain. The Tidelift subscription includes security updates, verified license compliance, continuous software maintenance, and more. As a result, you get the guarantees provided by commercial software for the open source packages you use. Consider `signing up for the Tidelift subscription `__. Direct maintainer support ========================= If you require commercial support outside of the Tidelift subscription, reach out to `Michael Seifert, `__ one of the project's maintainers. Community support ================= The GitHub page of pytest-asyncio offers free community support on a best-effort basis. Please use the `issue tracker `__ to report bugs and the `discussions `__ to ask questions. pytest-asyncio-0.20.3/pyproject.toml000066400000000000000000000003121434435170600175060ustar00rootroot00000000000000[build-system] requires = [ "setuptools>=51.0", "wheel>=0.36", "setuptools_scm[toml]>=6.2" ] build-backend = "setuptools.build_meta" [tool.setuptools_scm] write_to = "pytest_asyncio/_version.py" pytest-asyncio-0.20.3/pytest_asyncio/000077500000000000000000000000001434435170600176535ustar00rootroot00000000000000pytest-asyncio-0.20.3/pytest_asyncio/__init__.py000066400000000000000000000002421434435170600217620ustar00rootroot00000000000000"""The main point for importing pytest-asyncio items.""" from ._version import version as __version__ # noqa from .plugin import fixture __all__ = ("fixture",) pytest-asyncio-0.20.3/pytest_asyncio/plugin.py000066400000000000000000000420211434435170600215220ustar00rootroot00000000000000"""pytest-asyncio implementation.""" import asyncio import contextlib import enum import functools import inspect import socket import sys import warnings from typing import ( Any, AsyncIterator, Awaitable, Callable, Dict, Iterable, Iterator, List, Optional, Set, TypeVar, Union, cast, overload, ) import pytest from pytest import Function, Item, Session if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal _R = TypeVar("_R") _ScopeName = Literal["session", "package", "module", "class", "function"] _T = TypeVar("_T") SimpleFixtureFunction = TypeVar( "SimpleFixtureFunction", bound=Callable[..., Awaitable[_R]] ) FactoryFixtureFunction = TypeVar( "FactoryFixtureFunction", bound=Callable[..., AsyncIterator[_R]] ) FixtureFunction = Union[SimpleFixtureFunction, FactoryFixtureFunction] FixtureFunctionMarker = Callable[[FixtureFunction], FixtureFunction] Config = Any # pytest < 7.0 PytestPluginManager = Any # pytest < 7.0 FixtureDef = Any # pytest < 7.0 Parser = Any # pytest < 7.0 SubRequest = Any # pytest < 7.0 class Mode(str, enum.Enum): AUTO = "auto" STRICT = "strict" ASYNCIO_MODE_HELP = """\ 'auto' - for automatically handling all async functions by the plugin 'strict' - for autoprocessing disabling (useful if different async frameworks \ should be tested together, e.g. \ both pytest-asyncio and pytest-trio are used in the same project) """ def pytest_addoption(parser: Parser, pluginmanager: PytestPluginManager) -> None: group = parser.getgroup("asyncio") group.addoption( "--asyncio-mode", dest="asyncio_mode", default=None, metavar="MODE", help=ASYNCIO_MODE_HELP, ) parser.addini( "asyncio_mode", help="default value for --asyncio-mode", default="strict", ) @overload def fixture( fixture_function: FixtureFunction, *, scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = ..., params: Optional[Iterable[object]] = ..., autouse: bool = ..., ids: Union[ Iterable[Union[str, float, int, bool, None]], Callable[[Any], Optional[object]], None, ] = ..., name: Optional[str] = ..., ) -> FixtureFunction: ... @overload def fixture( fixture_function: None = ..., *, scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = ..., params: Optional[Iterable[object]] = ..., autouse: bool = ..., ids: Union[ Iterable[Union[str, float, int, bool, None]], Callable[[Any], Optional[object]], None, ] = ..., name: Optional[str] = None, ) -> FixtureFunctionMarker: ... def fixture( fixture_function: Optional[FixtureFunction] = None, **kwargs: Any ) -> Union[FixtureFunction, FixtureFunctionMarker]: if fixture_function is not None: _make_asyncio_fixture_function(fixture_function) return pytest.fixture(fixture_function, **kwargs) else: @functools.wraps(fixture) def inner(fixture_function: FixtureFunction) -> FixtureFunction: return fixture(fixture_function, **kwargs) return inner def _is_asyncio_fixture_function(obj: Any) -> bool: obj = getattr(obj, "__func__", obj) # instance method maybe? return getattr(obj, "_force_asyncio_fixture", False) def _make_asyncio_fixture_function(obj: Any) -> None: if hasattr(obj, "__func__"): # instance method, check the function object obj = obj.__func__ obj._force_asyncio_fixture = True def _is_coroutine(obj: Any) -> bool: """Check to see if an object is really an asyncio coroutine.""" return asyncio.iscoroutinefunction(obj) def _is_coroutine_or_asyncgen(obj: Any) -> bool: return _is_coroutine(obj) or inspect.isasyncgenfunction(obj) def _get_asyncio_mode(config: Config) -> Mode: val = config.getoption("asyncio_mode") if val is None: val = config.getini("asyncio_mode") return Mode(val) def pytest_configure(config: Config) -> None: """Inject documentation.""" config.addinivalue_line( "markers", "asyncio: " "mark the test as a coroutine, it will be " "run using an asyncio event loop", ) if getattr(pytest, "version_tuple", (0, 0, 0)) < (7,): warnings.warn( "You're using an outdated version of pytest. Newer releases of " "pytest-asyncio will not be compatible with this pytest version. " "Please update pytest to version 7 or later.", DeprecationWarning, ) @pytest.hookimpl(tryfirst=True) def pytest_report_header(config: Config) -> List[str]: """Add asyncio config to pytest header.""" mode = _get_asyncio_mode(config) return [f"asyncio: mode={mode}"] def _preprocess_async_fixtures( config: Config, processed_fixturedefs: Set[FixtureDef], ) -> None: asyncio_mode = _get_asyncio_mode(config) fixturemanager = config.pluginmanager.get_plugin("funcmanage") for fixtures in fixturemanager._arg2fixturedefs.values(): for fixturedef in fixtures: func = fixturedef.func if fixturedef in processed_fixturedefs or not _is_coroutine_or_asyncgen( func ): continue if not _is_asyncio_fixture_function(func) and asyncio_mode == Mode.STRICT: # Ignore async fixtures without explicit asyncio mark in strict mode # This applies to pytest_trio fixtures, for example continue _make_asyncio_fixture_function(func) _inject_fixture_argnames(fixturedef) _synchronize_async_fixture(fixturedef) assert _is_asyncio_fixture_function(fixturedef.func) processed_fixturedefs.add(fixturedef) def _inject_fixture_argnames(fixturedef: FixtureDef) -> None: """ Ensures that `request` and `event_loop` are arguments of the specified fixture. """ to_add = [] for name in ("request", "event_loop"): if name not in fixturedef.argnames: to_add.append(name) if to_add: fixturedef.argnames += tuple(to_add) def _synchronize_async_fixture(fixturedef: FixtureDef) -> None: """ Wraps the fixture function of an async fixture in a synchronous function. """ if inspect.isasyncgenfunction(fixturedef.func): _wrap_asyncgen_fixture(fixturedef) elif inspect.iscoroutinefunction(fixturedef.func): _wrap_async_fixture(fixturedef) def _add_kwargs( func: Callable[..., Any], kwargs: Dict[str, Any], event_loop: asyncio.AbstractEventLoop, request: SubRequest, ) -> Dict[str, Any]: sig = inspect.signature(func) ret = kwargs.copy() if "request" in sig.parameters: ret["request"] = request if "event_loop" in sig.parameters: ret["event_loop"] = event_loop return ret def _perhaps_rebind_fixture_func( func: _T, instance: Optional[Any], unittest: bool ) -> _T: if instance is not None: # The fixture needs to be bound to the actual request.instance # so it is bound to the same object as the test method. unbound, cls = func, None try: unbound, cls = func.__func__, type(func.__self__) # type: ignore except AttributeError: pass # If unittest is true, the fixture is bound unconditionally. # otherwise, only if the fixture was bound before to an instance of # the same type. if unittest or (cls is not None and isinstance(instance, cls)): func = unbound.__get__(instance) # type: ignore return func def _wrap_asyncgen_fixture(fixturedef: FixtureDef) -> None: fixture = fixturedef.func @functools.wraps(fixture) def _asyncgen_fixture_wrapper( event_loop: asyncio.AbstractEventLoop, request: SubRequest, **kwargs: Any ): func = _perhaps_rebind_fixture_func( fixture, request.instance, fixturedef.unittest ) gen_obj = func(**_add_kwargs(func, kwargs, event_loop, request)) async def setup(): res = await gen_obj.__anext__() return res def finalizer() -> None: """Yield again, to finalize.""" async def async_finalizer() -> None: try: await gen_obj.__anext__() except StopAsyncIteration: pass else: msg = "Async generator fixture didn't stop." msg += "Yield only once." raise ValueError(msg) event_loop.run_until_complete(async_finalizer()) result = event_loop.run_until_complete(setup()) request.addfinalizer(finalizer) return result fixturedef.func = _asyncgen_fixture_wrapper def _wrap_async_fixture(fixturedef: FixtureDef) -> None: fixture = fixturedef.func @functools.wraps(fixture) def _async_fixture_wrapper( event_loop: asyncio.AbstractEventLoop, request: SubRequest, **kwargs: Any ): func = _perhaps_rebind_fixture_func( fixture, request.instance, fixturedef.unittest ) async def setup(): res = await func(**_add_kwargs(func, kwargs, event_loop, request)) return res return event_loop.run_until_complete(setup()) fixturedef.func = _async_fixture_wrapper _HOLDER: Set[FixtureDef] = set() @pytest.hookimpl(tryfirst=True) def pytest_pycollect_makeitem( collector: Union[pytest.Module, pytest.Class], name: str, obj: object ) -> Union[ pytest.Item, pytest.Collector, List[Union[pytest.Item, pytest.Collector]], None ]: """A pytest hook to collect asyncio coroutines.""" if not collector.funcnamefilter(name): return None _preprocess_async_fixtures(collector.config, _HOLDER) return None def pytest_collection_modifyitems( session: Session, config: Config, items: List[Item] ) -> None: """ Marks collected async test items as `asyncio` tests. The mark is only applied in `AUTO` mode. It is applied to: - coroutines - staticmethods wrapping coroutines - Hypothesis tests wrapping coroutines """ if _get_asyncio_mode(config) != Mode.AUTO: return function_items = (item for item in items if isinstance(item, Function)) for function_item in function_items: function = function_item.obj if isinstance(function, staticmethod): # staticmethods need to be unwrapped. function = function.__func__ if ( _is_coroutine(function) or _is_hypothesis_test(function) and _hypothesis_test_wraps_coroutine(function) ): function_item.add_marker("asyncio") def _hypothesis_test_wraps_coroutine(function: Any) -> bool: return _is_coroutine(function.hypothesis.inner_test) @pytest.hookimpl(trylast=True) def pytest_fixture_post_finalizer(fixturedef: FixtureDef, request: SubRequest) -> None: """Called after fixture teardown""" if fixturedef.argname == "event_loop": policy = asyncio.get_event_loop_policy() try: loop = policy.get_event_loop() except RuntimeError: loop = None if loop is not None: # Clean up existing loop to avoid ResourceWarnings loop.close() new_loop = policy.new_event_loop() # Replace existing event loop # Ensure subsequent calls to get_event_loop() succeed policy.set_event_loop(new_loop) @pytest.hookimpl(hookwrapper=True) def pytest_fixture_setup( fixturedef: FixtureDef, request: SubRequest ) -> Optional[object]: """Adjust the event loop policy when an event loop is produced.""" if fixturedef.argname == "event_loop": outcome = yield loop = outcome.get_result() policy = asyncio.get_event_loop_policy() try: with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) old_loop = policy.get_event_loop() if old_loop is not loop: old_loop.close() except RuntimeError: # Swallow this, since it's probably bad event loop hygiene. pass policy.set_event_loop(loop) return yield @pytest.hookimpl(tryfirst=True, hookwrapper=True) def pytest_pyfunc_call(pyfuncitem: pytest.Function) -> Optional[object]: """ Pytest hook called before a test case is run. Wraps marked tests in a synchronous function where the wrapped test coroutine is executed in an event loop. """ marker = pyfuncitem.get_closest_marker("asyncio") if marker is not None: funcargs: Dict[str, object] = pyfuncitem.funcargs # type: ignore[name-defined] loop = cast(asyncio.AbstractEventLoop, funcargs["event_loop"]) if _is_hypothesis_test(pyfuncitem.obj): pyfuncitem.obj.hypothesis.inner_test = wrap_in_sync( pyfuncitem, pyfuncitem.obj.hypothesis.inner_test, _loop=loop, ) else: pyfuncitem.obj = wrap_in_sync( pyfuncitem, pyfuncitem.obj, _loop=loop, ) yield def _is_hypothesis_test(function: Any) -> bool: return getattr(function, "is_hypothesis_test", False) def wrap_in_sync( pyfuncitem: pytest.Function, func: Callable[..., Awaitable[Any]], _loop: asyncio.AbstractEventLoop, ): """Return a sync wrapper around an async function executing it in the current event loop.""" # if the function is already wrapped, we rewrap using the original one # not using __wrapped__ because the original function may already be # a wrapped one raw_func = getattr(func, "_raw_test_func", None) if raw_func is not None: func = raw_func @functools.wraps(func) def inner(*args, **kwargs): coro = func(*args, **kwargs) if not inspect.isawaitable(coro): pyfuncitem.warn( pytest.PytestWarning( f"The test {pyfuncitem} is marked with '@pytest.mark.asyncio' " "but it is not an async function. " "Please remove asyncio marker. " "If the test is not marked explicitly, " "check for global markers applied via 'pytestmark'." ) ) return task = asyncio.ensure_future(coro, loop=_loop) try: _loop.run_until_complete(task) except BaseException: # run_until_complete doesn't get the result from exceptions # that are not subclasses of `Exception`. Consume all # exceptions to prevent asyncio's warning from logging. if task.done() and not task.cancelled(): task.exception() raise inner._raw_test_func = func # type: ignore[attr-defined] return inner def pytest_runtest_setup(item: pytest.Item) -> None: marker = item.get_closest_marker("asyncio") if marker is None: return fixturenames = item.fixturenames # type: ignore[attr-defined] # inject an event loop fixture for all async tests if "event_loop" in fixturenames: fixturenames.remove("event_loop") fixturenames.insert(0, "event_loop") obj = getattr(item, "obj", None) if not getattr(obj, "hypothesis", False) and getattr( obj, "is_hypothesis_test", False ): pytest.fail( "test function `%r` is using Hypothesis, but pytest-asyncio " "only works with Hypothesis 3.64.0 or later." % item ) @pytest.fixture def event_loop(request: "pytest.FixtureRequest") -> Iterator[asyncio.AbstractEventLoop]: """Create an instance of the default event loop for each test case.""" loop = asyncio.get_event_loop_policy().new_event_loop() yield loop loop.close() def _unused_port(socket_type: int) -> int: """Find an unused localhost port from 1024-65535 and return it.""" with contextlib.closing(socket.socket(type=socket_type)) as sock: sock.bind(("127.0.0.1", 0)) return sock.getsockname()[1] @pytest.fixture def unused_tcp_port() -> int: return _unused_port(socket.SOCK_STREAM) @pytest.fixture def unused_udp_port() -> int: return _unused_port(socket.SOCK_DGRAM) @pytest.fixture(scope="session") def unused_tcp_port_factory() -> Callable[[], int]: """A factory function, producing different unused TCP ports.""" produced = set() def factory(): """Return an unused port.""" port = _unused_port(socket.SOCK_STREAM) while port in produced: port = _unused_port(socket.SOCK_STREAM) produced.add(port) return port return factory @pytest.fixture(scope="session") def unused_udp_port_factory() -> Callable[[], int]: """A factory function, producing different unused UDP ports.""" produced = set() def factory(): """Return an unused port.""" port = _unused_port(socket.SOCK_DGRAM) while port in produced: port = _unused_port(socket.SOCK_DGRAM) produced.add(port) return port return factory pytest-asyncio-0.20.3/pytest_asyncio/py.typed000066400000000000000000000000001434435170600213400ustar00rootroot00000000000000pytest-asyncio-0.20.3/setup.cfg000066400000000000000000000031761434435170600164260ustar00rootroot00000000000000[metadata] name = pytest-asyncio version = attr: pytest_asyncio.__version__ url = https://github.com/pytest-dev/pytest-asyncio project_urls = GitHub = https://github.com/pytest-dev/pytest-asyncio description = Pytest support for asyncio long_description = file: README.rst long_description_content_type = text/x-rst author = Tin Tvrtković author_email = tinchester@gmail.com license = Apache 2.0 license_files = LICENSE classifiers = Development Status :: 4 - Beta Intended Audience :: Developers License :: OSI Approved :: Apache Software License Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Topic :: Software Development :: Testing Framework :: AsyncIO Framework :: Pytest Typing :: Typed [options] python_requires = >=3.7 packages = find: include_package_data = True # Always adjust requirements.txt and pytest-min-requirements.txt when changing runtime dependencies install_requires = pytest >= 6.1.0 typing-extensions >= 3.7.2; python_version < "3.8" [options.extras_require] testing = coverage >= 6.2 hypothesis >= 5.7.1 flaky >= 3.5.0 mypy >= 0.931 pytest-trio >= 0.7.0 docs = sphinx >= 5.3 sphinx-rtd-theme >= 1.0 [options.entry_points] pytest11 = asyncio = pytest_asyncio.plugin [coverage:run] source = pytest_asyncio branch = true [coverage:report] show_missing = true [tool:pytest] addopts = -rsx --tb=short testpaths = tests asyncio_mode = auto junit_family=xunit2 filterwarnings = error [flake8] max-line-length = 88 pytest-asyncio-0.20.3/tests/000077500000000000000000000000001434435170600157405ustar00rootroot00000000000000pytest-asyncio-0.20.3/tests/async_fixtures/000077500000000000000000000000001434435170600210065ustar00rootroot00000000000000pytest-asyncio-0.20.3/tests/async_fixtures/__init__.py000066400000000000000000000000001434435170600231050ustar00rootroot00000000000000pytest-asyncio-0.20.3/tests/async_fixtures/test_async_fixtures.py000066400000000000000000000014121434435170600254630ustar00rootroot00000000000000import asyncio import unittest.mock import pytest START = object() END = object() RETVAL = object() @pytest.fixture def mock(): return unittest.mock.Mock(return_value=RETVAL) @pytest.fixture async def async_fixture(mock): return await asyncio.sleep(0.1, result=mock(START)) @pytest.mark.asyncio async def test_async_fixture(async_fixture, mock): assert mock.call_count == 1 assert mock.call_args_list[-1] == unittest.mock.call(START) assert async_fixture is RETVAL class TestAsyncFixtureMethod: is_same_instance = False @pytest.fixture(autouse=True) async def async_fixture_method(self): self.is_same_instance = True @pytest.mark.asyncio async def test_async_fixture_method(self): assert self.is_same_instance pytest-asyncio-0.20.3/tests/async_fixtures/test_async_fixtures_scope.py000066400000000000000000000007521434435170600266620ustar00rootroot00000000000000""" We support module-scoped async fixtures, but only if the event loop is module-scoped too. """ import asyncio import pytest @pytest.fixture(scope="module") def event_loop(): """A module-scoped event loop.""" return asyncio.new_event_loop() @pytest.fixture(scope="module") async def async_fixture(): await asyncio.sleep(0.1) return 1 @pytest.mark.asyncio async def test_async_fixture_scope(async_fixture): assert async_fixture == 1 await asyncio.sleep(0.1) pytest-asyncio-0.20.3/tests/async_fixtures/test_async_fixtures_with_finalizer.py000066400000000000000000000037061434435170600305710ustar00rootroot00000000000000import asyncio import functools import pytest @pytest.mark.asyncio async def test_module_with_event_loop_finalizer(port_with_event_loop_finalizer): await asyncio.sleep(0.01) assert port_with_event_loop_finalizer @pytest.mark.asyncio async def test_module_with_get_event_loop_finalizer(port_with_get_event_loop_finalizer): await asyncio.sleep(0.01) assert port_with_get_event_loop_finalizer @pytest.fixture(scope="module") def event_loop(): """Change event_loop fixture to module level.""" policy = asyncio.get_event_loop_policy() loop = policy.new_event_loop() yield loop loop.close() @pytest.fixture(scope="module") async def port_with_event_loop_finalizer(request, event_loop): def port_finalizer(finalizer): async def port_afinalizer(): # await task using loop provided by event_loop fixture # RuntimeError is raised if task is created on a different loop await finalizer event_loop.run_until_complete(port_afinalizer()) worker = asyncio.ensure_future(asyncio.sleep(0.2)) request.addfinalizer(functools.partial(port_finalizer, worker)) return True @pytest.fixture(scope="module") async def port_with_get_event_loop_finalizer(request, event_loop): def port_finalizer(finalizer): async def port_afinalizer(): # await task using current loop retrieved from the event loop policy # RuntimeError is raised if task is created on a different loop. # This can happen when pytest_fixture_setup # does not set up the loop correctly, # for example when policy.set_event_loop() is called with a wrong argument await finalizer current_loop = asyncio.get_event_loop_policy().get_event_loop() current_loop.run_until_complete(port_afinalizer()) worker = asyncio.ensure_future(asyncio.sleep(0.2)) request.addfinalizer(functools.partial(port_finalizer, worker)) return True pytest-asyncio-0.20.3/tests/async_fixtures/test_async_gen_fixtures.py000066400000000000000000000020701434435170600263150ustar00rootroot00000000000000import unittest.mock import pytest START = object() END = object() RETVAL = object() @pytest.fixture(scope="module") def mock(): return unittest.mock.Mock(return_value=RETVAL) @pytest.fixture async def async_gen_fixture(mock): try: yield mock(START) except Exception as e: mock(e) else: mock(END) @pytest.mark.asyncio async def test_async_gen_fixture(async_gen_fixture, mock): assert mock.called assert mock.call_args_list[-1] == unittest.mock.call(START) assert async_gen_fixture is RETVAL @pytest.mark.asyncio async def test_async_gen_fixture_finalized(mock): try: assert mock.called assert mock.call_args_list[-1] == unittest.mock.call(END) finally: mock.reset_mock() class TestAsyncGenFixtureMethod: is_same_instance = False @pytest.fixture(autouse=True) async def async_gen_fixture_method(self): self.is_same_instance = True yield None @pytest.mark.asyncio async def test_async_gen_fixture_method(self): assert self.is_same_instance pytest-asyncio-0.20.3/tests/async_fixtures/test_nested.py000066400000000000000000000010331434435170600236760ustar00rootroot00000000000000import asyncio import pytest @pytest.fixture() async def async_inner_fixture(): await asyncio.sleep(0.01) print("inner start") yield True print("inner stop") @pytest.fixture() async def async_fixture_outer(async_inner_fixture, event_loop): await asyncio.sleep(0.01) print("outer start") assert async_inner_fixture is True yield True print("outer stop") @pytest.mark.asyncio async def test_async_fixture(async_fixture_outer): assert async_fixture_outer is True print("test_async_fixture") pytest-asyncio-0.20.3/tests/async_fixtures/test_parametrized_loop.py000066400000000000000000000010751434435170600261420ustar00rootroot00000000000000import asyncio import pytest TESTS_COUNT = 0 def teardown_module(): # parametrized 2 * 2 times: 2 for 'event_loop' and 2 for 'fix' assert TESTS_COUNT == 4 @pytest.fixture(scope="module", params=[1, 2]) def event_loop(request): request.param loop = asyncio.new_event_loop() yield loop loop.close() @pytest.fixture(params=["a", "b"]) async def fix(request): await asyncio.sleep(0) return request.param @pytest.mark.asyncio async def test_parametrized_loop(fix): await asyncio.sleep(0) global TESTS_COUNT TESTS_COUNT += 1 pytest-asyncio-0.20.3/tests/conftest.py000066400000000000000000000013411434435170600201360ustar00rootroot00000000000000import asyncio import pytest pytest_plugins = "pytester" @pytest.fixture def dependent_fixture(event_loop): """A fixture dependent on the event_loop fixture, doing some cleanup.""" counter = 0 async def just_a_sleep(): """Just sleep a little while.""" nonlocal event_loop await asyncio.sleep(0.1) nonlocal counter counter += 1 event_loop.run_until_complete(just_a_sleep()) yield event_loop.run_until_complete(just_a_sleep()) assert counter == 2 @pytest.fixture(scope="session", name="factory_involving_factories") def factory_involving_factories_fixture(unused_tcp_port_factory): def factory(): return unused_tcp_port_factory() return factory pytest-asyncio-0.20.3/tests/hypothesis/000077500000000000000000000000001434435170600201375ustar00rootroot00000000000000pytest-asyncio-0.20.3/tests/hypothesis/test_base.py000066400000000000000000000041621434435170600224650ustar00rootroot00000000000000"""Tests for the Hypothesis integration, which wraps async functions in a sync shim for Hypothesis. """ import asyncio from textwrap import dedent import pytest from hypothesis import given, strategies as st @pytest.fixture(scope="module") def event_loop(): loop = asyncio.get_event_loop_policy().new_event_loop() yield loop loop.close() @given(st.integers()) @pytest.mark.asyncio async def test_mark_inner(n): assert isinstance(n, int) @pytest.mark.asyncio @given(st.integers()) async def test_mark_outer(n): assert isinstance(n, int) @pytest.mark.parametrize("y", [1, 2]) @given(x=st.none()) @pytest.mark.asyncio async def test_mark_and_parametrize(x, y): assert x is None assert y in (1, 2) @given(st.integers()) @pytest.mark.asyncio async def test_can_use_fixture_provided_event_loop(event_loop, n): semaphore = asyncio.Semaphore(value=0) event_loop.call_soon(semaphore.release) await semaphore.acquire() def test_async_auto_marked(testdir): testdir.makepyfile( dedent( """\ import asyncio import pytest from hypothesis import given import hypothesis.strategies as st pytest_plugins = 'pytest_asyncio' @given(n=st.integers()) async def test_hypothesis(n: int): assert isinstance(n, int) """ ) ) result = testdir.runpytest("--asyncio-mode=auto") result.assert_outcomes(passed=1) def test_sync_not_auto_marked(testdir): """Assert that synchronous Hypothesis functions are not marked with asyncio""" testdir.makepyfile( dedent( """\ import asyncio import pytest from hypothesis import given import hypothesis.strategies as st pytest_plugins = 'pytest_asyncio' @given(n=st.integers()) def test_hypothesis(request, n: int): markers = [marker.name for marker in request.node.own_markers] assert "asyncio" not in markers assert isinstance(n, int) """ ) ) result = testdir.runpytest("--asyncio-mode=auto") result.assert_outcomes(passed=1) pytest-asyncio-0.20.3/tests/hypothesis/test_inherited_test.py000066400000000000000000000007351434435170600245670ustar00rootroot00000000000000import hypothesis.strategies as st import pytest from hypothesis import given class BaseClass: @pytest.mark.asyncio @given(value=st.integers()) async def test_hypothesis(self, value: int) -> None: pass class TestOne(BaseClass): """During the first execution the Hypothesis test is wrapped in a synchronous function.""" class TestTwo(BaseClass): """Execute the test a second time to ensure that the test receives a fresh event loop.""" pytest-asyncio-0.20.3/tests/loop_fixture_scope/000077500000000000000000000000001434435170600216505ustar00rootroot00000000000000pytest-asyncio-0.20.3/tests/loop_fixture_scope/conftest.py000066400000000000000000000005251434435170600240510ustar00rootroot00000000000000import asyncio import pytest class CustomSelectorLoop(asyncio.SelectorEventLoop): """A subclass with no overrides, just to test for presence.""" loop = CustomSelectorLoop() @pytest.fixture(scope="module") def event_loop(): """Create an instance of the default event loop for each test case.""" yield loop loop.close() pytest-asyncio-0.20.3/tests/loop_fixture_scope/test_loop_fixture_scope.py000066400000000000000000000006601434435170600271730ustar00rootroot00000000000000"""Unit tests for overriding the event loop with a larger scoped one.""" import asyncio import pytest @pytest.mark.asyncio async def test_for_custom_loop(): """This test should be executed using the custom loop.""" await asyncio.sleep(0.01) assert type(asyncio.get_event_loop()).__name__ == "CustomSelectorLoop" @pytest.mark.asyncio async def test_dependent_fixture(dependent_fixture): await asyncio.sleep(0.1) pytest-asyncio-0.20.3/tests/markers/000077500000000000000000000000001434435170600174045ustar00rootroot00000000000000pytest-asyncio-0.20.3/tests/markers/test_class_marker.py000066400000000000000000000010041434435170600234560ustar00rootroot00000000000000"""Test if pytestmark works when defined on a class.""" import asyncio import pytest class TestPyTestMark: pytestmark = pytest.mark.asyncio async def test_is_asyncio(self, event_loop, sample_fixture): assert asyncio.get_event_loop() counter = 1 async def inc(): nonlocal counter counter += 1 await asyncio.sleep(0) await asyncio.ensure_future(inc()) assert counter == 2 @pytest.fixture def sample_fixture(): return None pytest-asyncio-0.20.3/tests/markers/test_module_marker.py000066400000000000000000000014221434435170600236420ustar00rootroot00000000000000"""Test if pytestmark works when defined in a module.""" import asyncio import pytest pytestmark = pytest.mark.asyncio class TestPyTestMark: async def test_is_asyncio(self, event_loop, sample_fixture): assert asyncio.get_event_loop() counter = 1 async def inc(): nonlocal counter counter += 1 await asyncio.sleep(0) await asyncio.ensure_future(inc()) assert counter == 2 async def test_is_asyncio(event_loop, sample_fixture): assert asyncio.get_event_loop() counter = 1 async def inc(): nonlocal counter counter += 1 await asyncio.sleep(0) await asyncio.ensure_future(inc()) assert counter == 2 @pytest.fixture def sample_fixture(): return None pytest-asyncio-0.20.3/tests/modes/000077500000000000000000000000001434435170600170475ustar00rootroot00000000000000pytest-asyncio-0.20.3/tests/modes/test_auto_mode.py000066400000000000000000000056351434435170600224450ustar00rootroot00000000000000from textwrap import dedent def test_auto_mode_cmdline(testdir): testdir.makepyfile( dedent( """\ import asyncio import pytest pytest_plugins = 'pytest_asyncio' async def test_a(): await asyncio.sleep(0) """ ) ) result = testdir.runpytest("--asyncio-mode=auto") result.assert_outcomes(passed=1) def test_auto_mode_cfg(testdir): testdir.makepyfile( dedent( """\ import asyncio import pytest pytest_plugins = 'pytest_asyncio' async def test_a(): await asyncio.sleep(0) """ ) ) testdir.makefile(".ini", pytest="[pytest]\nasyncio_mode = auto\n") result = testdir.runpytest() result.assert_outcomes(passed=1) def test_auto_mode_async_fixture(testdir): testdir.makepyfile( dedent( """\ import asyncio import pytest pytest_plugins = 'pytest_asyncio' @pytest.fixture async def fixture_a(): await asyncio.sleep(0) return 1 async def test_a(fixture_a): await asyncio.sleep(0) assert fixture_a == 1 """ ) ) result = testdir.runpytest("--asyncio-mode=auto") result.assert_outcomes(passed=1) def test_auto_mode_method_fixture(testdir): testdir.makepyfile( dedent( """\ import asyncio import pytest pytest_plugins = 'pytest_asyncio' class TestA: @pytest.fixture async def fixture_a(self): await asyncio.sleep(0) return 1 async def test_a(self, fixture_a): await asyncio.sleep(0) assert fixture_a == 1 """ ) ) result = testdir.runpytest("--asyncio-mode=auto") result.assert_outcomes(passed=1) def test_auto_mode_static_method(testdir): testdir.makepyfile( dedent( """\ import asyncio pytest_plugins = 'pytest_asyncio' class TestA: @staticmethod async def test_a(): await asyncio.sleep(0) """ ) ) result = testdir.runpytest("--asyncio-mode=auto") result.assert_outcomes(passed=1) def test_auto_mode_static_method_fixture(testdir): testdir.makepyfile( dedent( """\ import asyncio import pytest pytest_plugins = 'pytest_asyncio' class TestA: @staticmethod @pytest.fixture async def fixture_a(): await asyncio.sleep(0) return 1 @staticmethod async def test_a(fixture_a): await asyncio.sleep(0) assert fixture_a == 1 """ ) ) result = testdir.runpytest("--asyncio-mode=auto") result.assert_outcomes(passed=1) pytest-asyncio-0.20.3/tests/modes/test_strict_mode.py000066400000000000000000000030001434435170600227650ustar00rootroot00000000000000from textwrap import dedent def test_strict_mode_cmdline(testdir): testdir.makepyfile( dedent( """\ import asyncio import pytest pytest_plugins = 'pytest_asyncio' @pytest.mark.asyncio async def test_a(): await asyncio.sleep(0) """ ) ) result = testdir.runpytest("--asyncio-mode=strict") result.assert_outcomes(passed=1) def test_strict_mode_cfg(testdir): testdir.makepyfile( dedent( """\ import asyncio import pytest pytest_plugins = 'pytest_asyncio' @pytest.mark.asyncio async def test_a(): await asyncio.sleep(0) """ ) ) testdir.makefile(".ini", pytest="[pytest]\nasyncio_mode = strict\n") result = testdir.runpytest() result.assert_outcomes(passed=1) def test_strict_mode_method_fixture(testdir): testdir.makepyfile( dedent( """\ import asyncio import pytest import pytest_asyncio pytest_plugins = 'pytest_asyncio' class TestA: @pytest_asyncio.fixture async def fixture_a(self): await asyncio.sleep(0) return 1 @pytest.mark.asyncio async def test_a(self, fixture_a): await asyncio.sleep(0) assert fixture_a == 1 """ ) ) result = testdir.runpytest("--asyncio-mode=auto") result.assert_outcomes(passed=1) pytest-asyncio-0.20.3/tests/multiloop/000077500000000000000000000000001434435170600177645ustar00rootroot00000000000000pytest-asyncio-0.20.3/tests/multiloop/conftest.py000066400000000000000000000005071434435170600221650ustar00rootroot00000000000000import asyncio import pytest class CustomSelectorLoop(asyncio.SelectorEventLoop): """A subclass with no overrides, just to test for presence.""" @pytest.fixture def event_loop(): """Create an instance of the default event loop for each test case.""" loop = CustomSelectorLoop() yield loop loop.close() pytest-asyncio-0.20.3/tests/multiloop/test_alternative_loops.py000066400000000000000000000006271434435170600251340ustar00rootroot00000000000000"""Unit tests for overriding the event loop.""" import asyncio import pytest @pytest.mark.asyncio async def test_for_custom_loop(): """This test should be executed using the custom loop.""" await asyncio.sleep(0.01) assert type(asyncio.get_event_loop()).__name__ == "CustomSelectorLoop" @pytest.mark.asyncio async def test_dependent_fixture(dependent_fixture): await asyncio.sleep(0.1) pytest-asyncio-0.20.3/tests/respect_event_loop_policy/000077500000000000000000000000001434435170600232165ustar00rootroot00000000000000pytest-asyncio-0.20.3/tests/respect_event_loop_policy/conftest.py000066400000000000000000000006441434435170600254210ustar00rootroot00000000000000"""Defines and sets a custom event loop policy""" import asyncio from asyncio import DefaultEventLoopPolicy, SelectorEventLoop class TestEventLoop(SelectorEventLoop): pass class TestEventLoopPolicy(DefaultEventLoopPolicy): def new_event_loop(self): return TestEventLoop() # This statement represents a code which sets a custom event loop policy asyncio.set_event_loop_policy(TestEventLoopPolicy()) pytest-asyncio-0.20.3/tests/respect_event_loop_policy/test_respects_event_loop_policy.py000066400000000000000000000011061434435170600322660ustar00rootroot00000000000000"""Tests that any externally provided event loop policy remains unaltered.""" import asyncio import pytest @pytest.mark.asyncio async def test_uses_loop_provided_by_custom_policy(): """Asserts that test cases use the event loop provided by the custom event loop policy""" assert type(asyncio.get_event_loop()).__name__ == "TestEventLoop" @pytest.mark.asyncio async def test_custom_policy_is_not_overwritten(): """Asserts that any custom event loop policy stays the same across test cases""" assert type(asyncio.get_event_loop()).__name__ == "TestEventLoop" pytest-asyncio-0.20.3/tests/test_asyncio_fixture.py000066400000000000000000000026301434435170600225650ustar00rootroot00000000000000import asyncio from textwrap import dedent import pytest import pytest_asyncio @pytest_asyncio.fixture async def fixture_bare(): await asyncio.sleep(0) return 1 @pytest.mark.asyncio async def test_bare_fixture(fixture_bare): await asyncio.sleep(0) assert fixture_bare == 1 @pytest_asyncio.fixture(name="new_fixture_name") async def fixture_with_name(request): await asyncio.sleep(0) return request.fixturename @pytest.mark.asyncio async def test_fixture_with_name(new_fixture_name): await asyncio.sleep(0) assert new_fixture_name == "new_fixture_name" @pytest_asyncio.fixture(params=[2, 4]) async def fixture_with_params(request): await asyncio.sleep(0) return request.param @pytest.mark.asyncio async def test_fixture_with_params(fixture_with_params): await asyncio.sleep(0) assert fixture_with_params % 2 == 0 @pytest.mark.parametrize("mode", ("auto", "strict")) def test_sync_function_uses_async_fixture(testdir, mode): testdir.makepyfile( dedent( """\ import pytest_asyncio pytest_plugins = 'pytest_asyncio' @pytest_asyncio.fixture async def always_true(): return True def test_sync_function_uses_async_fixture(always_true): assert always_true is True """ ) ) result = testdir.runpytest(f"--asyncio-mode={mode}") result.assert_outcomes(passed=1) pytest-asyncio-0.20.3/tests/test_dependent_fixtures.py000066400000000000000000000004551434435170600232540ustar00rootroot00000000000000import asyncio import pytest @pytest.mark.asyncio async def test_dependent_fixture(dependent_fixture): """Test a dependent fixture.""" await asyncio.sleep(0.1) @pytest.mark.asyncio async def test_factory_involving_factories(factory_involving_factories): factory_involving_factories() pytest-asyncio-0.20.3/tests/test_event_loop_scope.py000066400000000000000000000017331434435170600227200ustar00rootroot00000000000000"""Test the event loop fixture provides a separate loop for each test. These tests need to be run together. """ import asyncio import pytest loop: asyncio.AbstractEventLoop def test_1(): global loop # The main thread should have a default event loop. loop = asyncio.get_event_loop_policy().get_event_loop() @pytest.mark.asyncio async def test_2(): global loop running_loop = asyncio.get_event_loop_policy().get_event_loop() # Make sure this test case received a different loop assert running_loop is not loop loop = running_loop # Store the loop reference for later def test_3(): global loop current_loop = asyncio.get_event_loop_policy().get_event_loop() # Now the event loop from test_2 should have been cleaned up assert loop is not current_loop def test_4(event_loop): # If a test sets the loop to None -- pytest_fixture_post_finalizer() # still should work asyncio.get_event_loop_policy().set_event_loop(None) pytest-asyncio-0.20.3/tests/test_flaky_integration.py000066400000000000000000000025641434435170600230710ustar00rootroot00000000000000"""Tests for the Flaky integration, which retries failed tests. """ from textwrap import dedent def test_auto_mode_cmdline(testdir): testdir.makepyfile( dedent( """\ import asyncio import flaky import pytest _threshold = -1 @flaky.flaky(3, 2) @pytest.mark.asyncio async def test_asyncio_flaky_thing_that_fails_then_succeeds(): global _threshold await asyncio.sleep(0.1) _threshold += 1 assert _threshold != 1 """ ) ) # runpytest_subprocess() is required to don't pollute the output # with flaky restart information result = testdir.runpytest_subprocess("--asyncio-mode=strict") result.assert_outcomes(passed=1) result.stdout.fnmatch_lines( [ "===Flaky Test Report===", "test_asyncio_flaky_thing_that_fails_then_succeeds passed 1 " "out of the required 2 times. Running test again until it passes 2 times.", "test_asyncio_flaky_thing_that_fails_then_succeeds failed " "(1 runs remaining out of 3).", " ", " assert 1 != 1", "test_asyncio_flaky_thing_that_fails_then_succeeds passed 2 " "out of the required 2 times. Success!", "===End Flaky Test Report===", ] ) pytest-asyncio-0.20.3/tests/test_pytest_min_version_warning.py000066400000000000000000000012721434435170600250400ustar00rootroot00000000000000from textwrap import dedent import pytest @pytest.mark.skipif( pytest.__version__ < "7.0.0", reason="The warning shouldn't be present when run with recent pytest versions", ) @pytest.mark.parametrize("mode", ("auto", "strict")) def test_pytest_min_version_warning_is_not_triggered_for_pytest_7(testdir, mode): testdir.makepyfile( dedent( """\ import pytest pytest_plugins = 'pytest_asyncio' @pytest.mark.asyncio async def test_triggers_pytest_warning(): pass """ ) ) result = testdir.runpytest(f"--asyncio-mode={mode}") result.assert_outcomes(passed=1, warnings=0) pytest-asyncio-0.20.3/tests/test_simple.py000066400000000000000000000162641434435170600206530ustar00rootroot00000000000000"""Quick'n'dirty unit tests for provided fixtures and markers.""" import asyncio from textwrap import dedent import pytest import pytest_asyncio.plugin async def async_coro(): await asyncio.sleep(0) return "ok" def test_event_loop_fixture(event_loop): """Test the injection of the event_loop fixture.""" assert event_loop ret = event_loop.run_until_complete(async_coro()) assert ret == "ok" @pytest.mark.asyncio async def test_asyncio_marker(): """Test the asyncio pytest marker.""" await asyncio.sleep(0) @pytest.mark.xfail(reason="need a failure", strict=True) @pytest.mark.asyncio async def test_asyncio_marker_fail(): raise AssertionError @pytest.mark.asyncio async def test_asyncio_marker_with_default_param(a_param=None): """Test the asyncio pytest marker.""" await asyncio.sleep(0) @pytest.mark.asyncio async def test_unused_port_fixture(unused_tcp_port, event_loop): """Test the unused TCP port fixture.""" async def closer(_, writer): writer.close() server1 = await asyncio.start_server(closer, host="localhost", port=unused_tcp_port) with pytest.raises(IOError): await asyncio.start_server(closer, host="localhost", port=unused_tcp_port) server1.close() await server1.wait_closed() @pytest.mark.asyncio async def test_unused_udp_port_fixture(unused_udp_port, event_loop): """Test the unused TCP port fixture.""" class Closer: def connection_made(self, transport): pass def connection_lost(self, *arg, **kwd): pass transport1, _ = await event_loop.create_datagram_endpoint( Closer, local_addr=("127.0.0.1", unused_udp_port), reuse_port=False, ) with pytest.raises(IOError): await event_loop.create_datagram_endpoint( Closer, local_addr=("127.0.0.1", unused_udp_port), reuse_port=False, ) transport1.abort() @pytest.mark.asyncio async def test_unused_port_factory_fixture(unused_tcp_port_factory, event_loop): """Test the unused TCP port factory fixture.""" async def closer(_, writer): writer.close() port1, port2, port3 = ( unused_tcp_port_factory(), unused_tcp_port_factory(), unused_tcp_port_factory(), ) server1 = await asyncio.start_server(closer, host="localhost", port=port1) server2 = await asyncio.start_server(closer, host="localhost", port=port2) server3 = await asyncio.start_server(closer, host="localhost", port=port3) for port in port1, port2, port3: with pytest.raises(IOError): await asyncio.start_server(closer, host="localhost", port=port) server1.close() await server1.wait_closed() server2.close() await server2.wait_closed() server3.close() await server3.wait_closed() @pytest.mark.asyncio async def test_unused_udp_port_factory_fixture(unused_udp_port_factory, event_loop): """Test the unused UDP port factory fixture.""" class Closer: def connection_made(self, transport): pass def connection_lost(self, *arg, **kwd): pass port1, port2, port3 = ( unused_udp_port_factory(), unused_udp_port_factory(), unused_udp_port_factory(), ) transport1, _ = await event_loop.create_datagram_endpoint( Closer, local_addr=("127.0.0.1", port1), reuse_port=False, ) transport2, _ = await event_loop.create_datagram_endpoint( Closer, local_addr=("127.0.0.1", port2), reuse_port=False, ) transport3, _ = await event_loop.create_datagram_endpoint( Closer, local_addr=("127.0.0.1", port3), reuse_port=False, ) for port in port1, port2, port3: with pytest.raises(IOError): await event_loop.create_datagram_endpoint( Closer, local_addr=("127.0.0.1", port), reuse_port=False, ) transport1.abort() transport2.abort() transport3.abort() def test_unused_port_factory_duplicate(unused_tcp_port_factory, monkeypatch): """Test correct avoidance of duplicate ports.""" counter = 0 def mock_unused_tcp_port(_ignored): """Force some duplicate ports.""" nonlocal counter counter += 1 if counter < 5: return 10000 else: return 10000 + counter monkeypatch.setattr(pytest_asyncio.plugin, "_unused_port", mock_unused_tcp_port) assert unused_tcp_port_factory() == 10000 assert unused_tcp_port_factory() > 10000 def test_unused_udp_port_factory_duplicate(unused_udp_port_factory, monkeypatch): """Test correct avoidance of duplicate UDP ports.""" counter = 0 def mock_unused_udp_port(_ignored): """Force some duplicate ports.""" nonlocal counter counter += 1 if counter < 5: return 10000 else: return 10000 + counter monkeypatch.setattr(pytest_asyncio.plugin, "_unused_port", mock_unused_udp_port) assert unused_udp_port_factory() == 10000 assert unused_udp_port_factory() > 10000 class TestMarkerInClassBasedTests: """Test that asyncio marked functions work for methods of test classes.""" @pytest.mark.asyncio async def test_asyncio_marker_with_explicit_loop_fixture(self, event_loop): """Test the "asyncio" marker works on a method in a class-based test with explicit loop fixture.""" ret = await async_coro() assert ret == "ok" @pytest.mark.asyncio async def test_asyncio_marker_with_implicit_loop_fixture(self): """Test the "asyncio" marker works on a method in a class-based test with implicit loop fixture.""" ret = await async_coro() assert ret == "ok" class TestEventLoopStartedBeforeFixtures: @pytest.fixture async def loop(self): return asyncio.get_event_loop() @staticmethod def foo(): return 1 @pytest.mark.asyncio async def test_no_event_loop(self, loop): assert await loop.run_in_executor(None, self.foo) == 1 @pytest.mark.asyncio async def test_event_loop_after_fixture(self, loop, event_loop): assert await loop.run_in_executor(None, self.foo) == 1 @pytest.mark.asyncio async def test_event_loop_before_fixture(self, event_loop, loop): assert await loop.run_in_executor(None, self.foo) == 1 @pytest.mark.asyncio async def test_no_warning_on_skip(): pytest.skip("Test a skip error inside asyncio") def test_async_close_loop(event_loop): event_loop.close() def test_warn_asyncio_marker_for_regular_func(testdir): testdir.makepyfile( dedent( """\ import pytest pytest_plugins = 'pytest_asyncio' @pytest.mark.asyncio def test_a(): pass """ ) ) testdir.makefile( ".ini", pytest=dedent( """\ [pytest] asyncio_mode = strict filterwarnings = default """ ), ) result = testdir.runpytest() result.assert_outcomes(passed=1) result.stdout.fnmatch_lines( ["*is marked with '@pytest.mark.asyncio' but it is not an async function.*"] ) pytest-asyncio-0.20.3/tests/test_subprocess.py000066400000000000000000000023131434435170600215400ustar00rootroot00000000000000"""Tests for using subprocesses in tests.""" import asyncio.subprocess import sys import pytest if sys.platform == "win32": # The default asyncio event loop implementation on Windows does not # support subprocesses. Subprocesses are available for Windows if a # ProactorEventLoop is used. @pytest.yield_fixture() def event_loop(): loop = asyncio.ProactorEventLoop() yield loop loop.close() @pytest.mark.skipif( sys.version_info < (3, 8), reason=""" When run with Python 3.7 asyncio.subprocess.create_subprocess_exec seems to be affected by an issue that prevents correct cleanup. Tests using pytest-trio will report that signal handling is already performed by another library and fail. [1] This is possibly a bug in CPython 3.7, so we ignore this test for that Python version. [1] https://github.com/python-trio/pytest-trio/issues/126 """, ) @pytest.mark.asyncio async def test_subprocess(event_loop): """Starting a subprocess should be possible.""" proc = await asyncio.subprocess.create_subprocess_exec( sys.executable, "--version", stdout=asyncio.subprocess.PIPE ) await proc.communicate() pytest-asyncio-0.20.3/tests/trio/000077500000000000000000000000001434435170600167155ustar00rootroot00000000000000pytest-asyncio-0.20.3/tests/trio/test_fixtures.py000066400000000000000000000011061434435170600221750ustar00rootroot00000000000000from textwrap import dedent def test_strict_mode_ignores_trio_fixtures(testdir): testdir.makepyfile( dedent( """\ import pytest import pytest_asyncio import pytest_trio pytest_plugins = ["pytest_asyncio", "pytest_trio"] @pytest_trio.trio_fixture async def any_fixture(): return True @pytest.mark.trio async def test_anything(any_fixture): pass """ ) ) result = testdir.runpytest("--asyncio-mode=strict") result.assert_outcomes(passed=1) pytest-asyncio-0.20.3/tools/000077500000000000000000000000001434435170600157365ustar00rootroot00000000000000pytest-asyncio-0.20.3/tools/get-version.py000066400000000000000000000006571434435170600205620ustar00rootroot00000000000000import json import sys from importlib import metadata from packaging.version import parse as parse_version def main(): version_string = metadata.version("pytest-asyncio") version = parse_version(version_string) print(f"::set-output name=version::{version}") prerelease = json.dumps(version.is_prerelease) print(f"::set-output name=prerelease::{prerelease}") if __name__ == "__main__": sys.exit(main()) pytest-asyncio-0.20.3/tox.ini000066400000000000000000000020211434435170600161040ustar00rootroot00000000000000[tox] minversion = 3.14.0 envlist = py37, py38, py39, py310, py311, lint, version-info, pytest-min isolated_build = true passenv = CI [testenv] extras = testing deps = --requirement dependencies/default/requirements.txt --constraint dependencies/default/constraints.txt commands = make test allowlist_externals = make [testenv:pytest-min] extras = testing deps = --requirement dependencies/pytest-min/requirements.txt --constraint dependencies/pytest-min/constraints.txt commands = make test allowlist_externals = make [testenv:lint] basepython = python3.10 extras = testing deps = pre-commit == 2.16.0 commands = make lint allowlist_externals = make [testenv:coverage-report] deps = coverage skip_install = true commands = coverage combine coverage report [testenv:version-info] deps = packaging == 21.3 commands = python ./tools/get-version.py [gh-actions] python = 3.7: py37, pytest-min 3.8: py38 3.9: py39 3.10: py310 3.11-dev: py311 pypy3: pypy3