pax_global_header00006660000000000000000000000064147616467110014527gustar00rootroot0000000000000052 comment=a3274f2eeac78be4213230088c1f120da50ef633 python-aio-pika-9.5.5/000077500000000000000000000000001476164671100145605ustar00rootroot00000000000000python-aio-pika-9.5.5/.coafile000066400000000000000000000001731476164671100161640ustar00rootroot00000000000000[Default] bears = PEP8Bear, PyUnusedCodeBear, FilenameBear, InvalidLinkBear files = aio_pika/**/*.py max_line_length = 120 python-aio-pika-9.5.5/.coveragerc000066400000000000000000000001731476164671100167020ustar00rootroot00000000000000[run] omit = aio_pika/compat.py branch = True [report] exclude_lines = pragma: no cover raise NotImplementedError python-aio-pika-9.5.5/.deepsource.toml000066400000000000000000000001341476164671100176670ustar00rootroot00000000000000version = 1 [[analyzers]] name = "python" enabled = true runtime_version = "3.x.x" python-aio-pika-9.5.5/.drone.yml000066400000000000000000000072231476164671100164740ustar00rootroot00000000000000--- kind: pipeline name: default steps: - name: prepare toxenv image: snakepacker/python:all group: tests pull: always commands: - tox --notest volumes: - name: cache path: /drone/src/.tox - name: linter image: snakepacker/python:all commands: - tox environment: TOXENV: lint volumes: - name: cache path: /drone/src/.tox - name: mypy image: snakepacker/python:all group: tests pull: always commands: - tox environment: TOXENV: mypy volumes: - name: cache path: /drone/src/.tox - name: checkdoc image: snakepacker/python:all group: tests pull: always commands: - tox environment: TOXENV: checkdoc volumes: - name: cache path: /drone/src/.tox - name: python 3.8 image: snakepacker/python:all commands: - tox environment: AMQP_URL: amqp://guest:guest@rabbitmq TOXENV: py38 COVERALLS_REPO_TOKEN: from_secret: COVERALLS_TOKEN volumes: - name: cache path: /drone/src/.tox - name: python 3.8 uvloop image: snakepacker/python:all commands: - tox environment: AMQP_URL: amqp://guest:guest@rabbitmq TOXENV: py38-uvloop COVERALLS_REPO_TOKEN: from_secret: COVERALLS_TOKEN volumes: - name: cache path: /drone/src/.tox - name: python 3.7 image: snakepacker/python:all commands: - tox environment: AMQP_URL: amqp://guest:guest@rabbitmq TOXENV: py37 COVERALLS_REPO_TOKEN: from_secret: COVERALLS_TOKEN volumes: - name: cache path: /drone/src/.tox - name: python 3.7 uvloop image: snakepacker/python:all commands: - tox environment: AMQP_URL: amqp://guest:guest@rabbitmq TOXENV: py37-uvloop COVERALLS_REPO_TOKEN: from_secret: COVERALLS_TOKEN volumes: - name: cache path: /drone/src/.tox - name: python 3.6 image: snakepacker/python:all commands: - tox environment: AMQP_URL: amqp://guest:guest@rabbitmq TOXENV: py36 COVERALLS_REPO_TOKEN: from_secret: COVERALLS_TOKEN volumes: - name: cache path: /drone/src/.tox - name: python 3.6 uvloop image: snakepacker/python:all commands: - tox environment: AMQP_URL: amqp://guest:guest@rabbitmq TOXENV: py36-uvloop COVERALLS_REPO_TOKEN: from_secret: COVERALLS_TOKEN volumes: - name: cache path: /drone/src/.tox - name: python 3.5 image: snakepacker/python:all commands: - tox environment: AMQP_URL: amqp://guest:guest@rabbitmq TOXENV: py35 COVERALLS_REPO_TOKEN: from_secret: COVERALLS_TOKEN volumes: - name: cache path: /drone/src/.tox - name: python 3.5 uvloop image: snakepacker/python:all commands: - tox environment: AMQP_URL: amqp://guest:guest@rabbitmq TOXENV: py35-uvloop COVERALLS_REPO_TOKEN: from_secret: COVERALLS_TOKEN volumes: - name: cache path: /drone/src/.tox - name: notify image: drillster/drone-email settings: host: from_secret: SMTP_HOST username: from_secret: SMTP_USERNAME password: from_secret: SMTP_PASSWORD from: from_secret: SMTP_USERNAME when: status: - changed - failure volumes: - name: cache temp: {} services: - name: rabbitmq image: rabbitmq:3-alpine --- kind: signature hmac: 32a7f019710b16f795a6531ef6fab89d2ab24f50aaee729c3a7379a0dda472b0 ... python-aio-pika-9.5.5/.editorconfig000066400000000000000000000004171476164671100172370ustar00rootroot00000000000000root = true [*] end_of_line = lf insert_final_newline = true charset = utf-8 trim_trailing_whitespace = true [*.{py,yml}] indent_style = space max_line_length = 79 [*.py] indent_size = 4 [*.rst] indent_size = 3 [Makefile] indent_style = tab [*.yml] indent_size = 2 python-aio-pika-9.5.5/.github/000077500000000000000000000000001476164671100161205ustar00rootroot00000000000000python-aio-pika-9.5.5/.github/workflows/000077500000000000000000000000001476164671100201555ustar00rootroot00000000000000python-aio-pika-9.5.5/.github/workflows/docs.yml000066400000000000000000000040211476164671100216250ustar00rootroot00000000000000name: Deploy Documentation on: push: branches: - master jobs: build-and-deploy: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Setup Python 3.12 uses: actions/setup-python@v2 with: python-version: "3.12" - name: Cache virtualenv id: venv-cache uses: actions/cache@v3 with: path: .venv key: venv-${{ runner.os }}-${{ github.job }}-${{ github.ref }}-3.12 restore-keys: | venv-${{ runner.os }}-${{ github.job }}-${{ github.ref }}- venv-${{ runner.os }}-${{ github.job }}- venv-${{ runner.os }}- - name: Install Poetry run: python -m pip install poetry - name: Cache Poetry and pip uses: actions/cache@v3 with: path: | ~/.cache/pypoetry ~/.cache/pip key: poetry-pip-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} restore-keys: | poetry-pip-${{ runner.os }}- - name: Install Dependencies with Poetry run: poetry install --no-interaction --no-ansi - name: Build Documentation run: | cd docs poetry -C .. run make html - name: Install AWS CLI run: | sudo apt update sudo apt install -y awscli - name: Configure AWS CLI for Cloudflare R2 run: | aws configure set aws_access_key_id ${{ secrets.CF_R2_ACCESS_KEY_ID }} aws configure set aws_secret_access_key ${{ secrets.CF_R2_SECRET_ACCESS_KEY }} aws configure set default.region us-east-1 # R2 uses us-east-1 by default aws configure set default.output json - name: Sync to Cloudflare R2 env: CF_R2_ENDPOINT: ${{ secrets.CF_R2_ENDPOINT }} CF_R2_BUCKET_NAME: ${{ secrets.CF_R2_BUCKET_NAME }} run: | aws s3 sync docs/build/html s3://$CF_R2_BUCKET_NAME \ --delete \ --acl public-read \ --endpoint-url $CF_R2_ENDPOINT python-aio-pika-9.5.5/.github/workflows/tests.yml000066400000000000000000000047631476164671100220540ustar00rootroot00000000000000name: tests on: push: branches: [ master ] pull_request: branches: [ master ] jobs: pylama: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Setup python3.10 uses: actions/setup-python@v2 with: python-version: "3.10" - name: Cache virtualenv id: venv-cache uses: actions/cache@v3 with: path: .venv key: venv-${{ runner.os }}-${{ github.job }}-${{ github.ref }} - run: python -m pip install poetry - run: poetry install - run: poetry run pylama env: FORCE_COLOR: 1 mypy: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Setup python3.10 uses: actions/setup-python@v2 with: python-version: "3.10" - name: Cache virtualenv id: venv-cache uses: actions/cache@v3 with: path: .venv key: venv-${{ runner.os }}-${{ github.job }}-${{ github.ref }} - run: python -m pip install poetry - run: poetry install - run: poetry run mypy env: FORCE_COLOR: 1 tests: runs-on: ubuntu-latest strategy: fail-fast: false matrix: python: - '3.9' - '3.10' - '3.11' - '3.12' steps: - uses: actions/checkout@v2 - name: Setup python${{ matrix.python }} uses: actions/setup-python@v2 with: python-version: "${{ matrix.python }}" - name: Cache virtualenv id: venv-cache uses: actions/cache@v3 with: path: .venv key: venv-${{ runner.os }}-${{ github.job }}-${{ github.ref }}-${{ matrix.python }} - run: python -m pip install poetry - run: poetry install --with=uvloop - name: pytest run: >- poetry run pytest \ -vv \ --cov=aio_pika \ --cov-report=term-missing \ --doctest-modules \ --aiomisc-test-timeout=120 \ tests env: FORCE_COLOR: 1 - run: poetry run coveralls env: COVERALLS_PARALLEL: 'true' COVERALLS_SERVICE_NAME: github GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} finish: needs: - tests runs-on: ubuntu-latest steps: - name: Coveralls Finished uses: coverallsapp/github-action@master with: github-token: ${{ secrets.github_token }} parallel-finished: true python-aio-pika-9.5.5/.gitignore000066400000000000000000000037541476164671100165610ustar00rootroot00000000000000# Created by .ignore support plugin (hsz.mobi) ### VirtualEnv template # Virtualenv # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ .Python [Bb]in [Ii]nclude [Ll]ib [Ll]ib64 [Ll]ocal [Ss]cripts pyvenv.cfg .venv pip-selfcheck.json ### IPythonNotebook template # Temporary data .ipynb_checkpoints/ ### Python template # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover .hypothesis/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ docs/source/apidoc # PyBuilder target/ # IPython Notebook .ipynb_checkpoints # pyenv .python-version # pytest .pytest_cache # celery beat schedule file celerybeat-schedule # dotenv .env # virtualenv venv/ ENV/ # Spyder project settings .spyderproject # Rope project settings .ropeproject ### JetBrains template # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 # User-specific stuff: .idea/ .vscode/ ## File-based project format: *.iws ## Plugin-specific files: # IntelliJ /out/ # mpeltonen/sbt-idea plugin .idea_modules/ # JIRA plugin atlassian-ide-plugin.xml # Crashlytics plugin (for Android Studio and IntelliJ) com_crashlytics_export_strings.xml crashlytics.properties crashlytics-build.properties fabric.properties /htmlcov /temp .DS_Store .*cache .nox python-aio-pika-9.5.5/CHANGELOG.md000066400000000000000000000462571476164671100164070ustar00rootroot000000000000009.5.5 ----- * Replace WeakSet with set for robust channels tracking #666 by shushpanov 9.5.4 ----- * fix: RobustChannel should not reopen after close() call #658 9.5.3 ----- * python3.8-eol #657 * self-hosted docs 9.5.2 ----- * Fix documentation links 9.5.1 ----- * Fix documentation links 9.5.0 ----- * Fix two bugs by adding more type hints to `CallbackCollection`. @Darsstar * Dropped python 3.7 @Darsstar * QueueIterator raises StopAsyncIteration when channel is closed. @Darsstar 9.4.3 ----- * fix: raise ChannelInvalidStateError at exchange.publish with closed channel #637 9.4.2 ----- * Only nack messages upon cancellation of a consumer subscription … #634 9.4.1 ----- * Prevent deadlock in RobustChannel.reopen() #622 * Python 3.12 tests #603 9.4.0 ----- * Support aiormq 6.8.0 #614 9.3.1 ----- * Define empty __slots__ in base classes #598 9.3.0 ----- * new: add custom exchanges to rpc pattern #377 by @cloud-rocket 9.2.3 ----- * Fix restore bug of RobustChannel #578 by @aozupek 9.2.2 ----- * Fix bug with RPC when handling `on_close` with a RobustConnection #573 by @CodeCorrupt 9.2.1 ----- * Fix reopen of robust channel after close #571 by @decaz. Fixes #570 9.2.0 ----- * URL params passing to aiormq #569 * `Connection.KWARGS_TYPES` renamed to `Connection.PARAMETERS` and rework it to `dataclass` * `Connection._parse_kwargs` renamed to `Connection._parse_parameters` * [AMQP URL parameters](https://docs.aio-pika.com/#amqp-url-parameters) documentation article 9.1.5 ----- * Fix race condition in RobustChannel in reopen/ready #566 by @isra17 9.1.4 ----- * use fork friendly random robust queue generation way #560 9.1.3 ----- * Ported publisher confirms tutorial by @MaPePeR #550 * Fixed errored response when `aio_pika.patterns.RPC` can not serialize the result #552 9.1.2 ----- * Fix badges in docs 9.1.1 ----- * Fix readthedocs build file 9.1.0 ----- The bulk of the changes are related to how the library entities are now interconnected. In previous versions of `aio_pika.Channel` instances not contains a link to the `aio_pika.Connection` instances for now is contains it. While I don't want custom code to work directly with the `aiormq.Channel` instance, this was a public API and I should warn you about the change here. The `aio_pika.Channel.channel` property is deprecated. Use `aio_pika.Channel.get_underlay_chanel()` instead. Now all library entities already use this method. 9.0.7 ----- * Update aiormq version 9.0.6 ----- * Amend Exchange.__repr__ to include class name #527 Also switch to f-strings rather than %-formatting, modelled after Queue.__repr__. * Update example code of rpc tutorial #530 * bugfix: kwargs not working in `aio_pika.connect_robust` #531 * Improve type hints for `queue.get()` #542 9.0.5 ----- * Prevent 'Task exception was never retrieved' #524 If future.exception() is not called (even on cancelled futures), it seems Python will then log 'Task exception was never retrieved'. Rewriting this logic slightly should hopefully achieve the same functionality while preventing the Python errors. * Avoid implicitly depending on setuptools #526 9.0.4 ----- * fix README badge * upgrade requirements 9.0.3 ----- * RPCs: Show exceptions on Host (remote side) #503 * Fixed queue_name was set as channel_name for `patterns/master.py` #523 9.0.2 ----- * Do not nack if consumer is no_ack in QueueIterator #521 9.0.1 ----- * change classifiers in pyproject.toml 9.0.0 ----- The main goal of this release is the migration to `poetry` and stronger type checking with mypy. User code should remain compatible, just test it with mypy. The tests still work the same, without public API changes, this indicates that your code should work without changes, but does not prove it. ### Deprecations * `aio_pika.message.HeaderProxy` - removed * `aio_pika.message.header_converter` - removed * `aio_pika.message.format_headers` - removed * `aio_pika.message.Message.headers_raw` - prints deprecation warning * `aio_pika.abc.AbstractMessage.headers_raw` - removed 8.3.0 ----- * Update `aiormq~=6.6.3` #512 * Fix getting futures exceptions #509 8.2.4 ----- * Fix memory leaks around channel close callbacks #496 * Fastest way to reject all messages when queue iterator is closing #498 8.2.3 ----- * Fix memory leak when callback collections is chaining #495 8.2.2 ----- * Prevent "Task exception was never retrieved" on timeout #492 8.2.1 ----- * Fix memory leaks on channel close #491 8.2.0 ----- * allow passing ssl_context to the connection #474. A default parameter has been added to the public API, this does not break anything unless your code relies on the order of the arguments. 8.1.1 ----- * Generated anonymous queue name may conflict #486 * improve typing in multiple library actors #478 8.1.0 ----- * Bump `aiormq~=6.4.0` with `connection blocking` feature * `Connection.update_secret` method (#481) 8.0.3 ----- * cannot use client_properties issue #469 8.0.2 ----- * linter fixes in `aio_pika.rpc.__all__` 8.0.1 ----- * aio_pika.rpc fix for `TypeError: invalid exception object` for future 8.0.0 ----- ***Release notes*** In this release, there are many changes to the internal API and bug fixes related to sudden disconnection and correct recovery after reconnection. Unfortunately, the behavior that was in version 7.x was slightly affected. It's the reason the major version has been updated. The entire set of existing tests passes with minimal changes, therefore, except for some minor changes in behavior, the user code should work either without any modifications or with minimal changes, such as replacing removed deprecated functions with alternatives. This release has been already tested in a working environment, and now it seems that we have completely resolved all the known issues related to recovery after network failures. ***Changes***: * Added tests for unexpected network connection resets and fixed many related problems. * Added `UnderlayChannel` and `UnderlayConneciton`, this is `NamedTuple`s contains all connection and channel related properties. The `aiormq.Connection` and `aiormq.Channel` objects are now packaged in this `NamedTuple`s and can be atomically assigned to `aio_pika.Connection` and `aio_pika.Channel` objects. The main benefit is the not needed to add locks during the connection, in the best case, the container object is assigned to callee as usual, however, if something goes wrong during the connection, there is no need to clear something in `aio_pika.RobustConnection` or `aio_pika.RobustChannel`. * An `__init__` method is now a part of abstract classes for most `aio_pika` entities. * Removed explicit relations between `aio_pika.Channel` and `aio_pika.Connection`. Now you can't get a `aio_pika.Connection` instance from the `aio_pika.Channel` instance. * Fixed a bug that caused the whole connection was closed when a timeout occurred in one of the channels, in case the channel was waiting for a response frame to an amqp-rpc call. * Removed deprecated `add_close_callback` and `remove_close_callback` methods in `aio_pika.Channel`. Use `aio_pika.Channel.close_callbacks.add(callback, ...)` and `aio_pika.Channel.close_callbacks.remove(callback, ...)` instead. * Fixed a bug in `aio_pika.RobustChannel` that caused `default_exchane` broken after reconnecting. * The `publisher_confirms` property of `aio_pika.Channel` is public now. * Function `get_exchange_name` is public now. * Fixed an error in which the queue iterator could enter a deadlock state, with a sudden disconnection. * The new entity `OneShotCallback` helps, for example, to call all the closing callbacks at the channel if the `Connection` was unexpectedly closed, and the channel closing frame did not come explicitly. 7.2.0 ----- * Make `aio_pika.patterns.rpc` more extendable. 7.1.0 ----- * Fixes in documentation 7.0.0 ----- This release brings support for a new version of `aiormq`, which is used as a low-level driver for working with AMQP. The release contains a huge number of changes in the internal structure of the library, mainly related to type inheritance and abstract types, as well as typehints checking via mypy. The biggest change to the user API is the violation of the inheritance order, due to the introduction of abstract types, so this release is a major one. ### Changes * There are a lot of changes in the structure of the library, due to the widespread use of typing. * `aio_pika.abc` module now contains all types and abstract class prototypes. * Modern `aiormq~=6.1.1` used. * Complete type checks coverage via mypy. * The interface of `aio_pika`'s classes has undergone minimal changes, but you should double-check your code before migrating, at least because almost all types are now in `aio_pika.abc`. Module `aio_pika.types` still exists, but will produce a `DeprecationWarning`. * Default value for argument `weak` is changed to `False` in `CallbackCollection.add(func, weak=False)`. ### Known 6.x to 7.x migration issues * `pamqp.specification` module didn't exist in `pamqp==3.0.1` so you have to change it: * `pamqp.commands` for AMPQ-RPC–relates classes * `pamqp.base` for `Frame` class * `pamqp.body` for `ContentBody` class * `pamqp.commands` for `Basic`, `Channel`, `Confirm`, `Exchange`, `Queue`, `Tx` classes. * `pamqp.common` for `FieldArray`, `FieldTable`, `FieldValue` classes * `pamqp.constants` for constants like `REPLY_SUCCESS`. * `pamqp.header` for `ContentHeader` class. * `pamqp.heartbeat` for `Heartbeat` class. * Type definitions related to imports from `aio_pika` might throw warnings like `'SomeType' is not declared in __all__ `. This is a normal situation, since now it is necessary to import types from `aio_pika.abc`. In this release, these are just warnings, but in the next major release, this will stop working, so you should take care of changes in your code. Just use `aio_pika.abc` in your imports. The list of deprecated imports: * `from aio_pika.message import ReturnCallback` * `from aio_pika.patterns.rpc import RPCMessageType` - renamed to `RPCMessageTypes` * `import aio_pika.types` - module deprecated use `aio_pika.abc` instead * `from aio_pika.connection import ConnectionType` 6.8.2 ----- * explicit `Channel.is_user_closed` property * user-friendly exception when channel has been closed * reopen channels which are closed from the broker side 6.8.1 ----- * Fix flapping test test_robust_duplicate_queue #424 * Fixed callback on_close for rpc #424 6.8.0 ----- * fix: master deserialize types #366 * fix: add missing type hint on exchange publish method #370 * Return self instead of select result in `__aenter__` #373 * fix: call remove_close_callback #374 6.7.1 ----- * Fix breaking change in callback definition #344 6.7.0 ----- * Reworked tests and finally applied PR #311 * Improve documentation examples and snippets #339 * Restore RobustChannel.default_exchange on reconnect #340 * Improve the docs a bit #335 6.6.1 ----- * Add generics to Pool and PoolItemContextManager #321 * Fix Docs for ``DeliveryError`` #322 6.6.0 ----- * message.reject called inside ProcessContext.__exit__ fails when channel is closed #302 6.5.3 ----- * Add docs and github links to setup.py #304 6.5.2 ----- * Type annotation fixes * Add documentation 6.5.1 ----- * Test fixes * Add reopen method for channel #263 6.5.0 ----- * Add get methods for exchange and queue #282 * fix type annotation and documentation for Connection.add_close_callback #290 6.4.3 ----- * log channel close status * add OSError to `CONNECTION_EXCEPTIONS` 6.4.2 ----- * [fix] heartbeat_last to heartbeat_last_received #274 * Fix memory leak #285 * Fix type hint #287 * Pass loop when connecting to aiormq #294 6.4.1 ----- * RobustConnection cleanup fixes #273 6.4.0 ----- * aiormq updates: * Fixes for python 3.8 [#69](https://github.com/mosquito/aiormq/pull/69) [#67](https://github.com/mosquito/aiormq/pull/67) * [passing ``name=`` query parameter](https://github.com/mosquito/aiormq/pull/69/commits/a967502e6dbdf5de422cfb183932bcec134250ad) from URL to user defined connection name (Rabbitmq 3.8+) * Fix connection drain [#68](https://github.com/mosquito/aiormq/pull/68) * Remove ``loop=`` argument from asyncio entities [#67](https://github.com/mosquito/aiormq/pull/67) * ChannelInvalidStateError exceptions instead of RuntimeError [#65](https://github.com/mosquito/aiormq/pull/65) * Update tests for python 3.8 * ``Pool.close()`` method and allow to use ``Pool`` as a context manager [#269](https://github.com/mosquito/aio-pika/pull/269) * Fix stuck of ``RobustConnection`` when exclusive queues still locked on server-side [#267](https://github.com/mosquito/aio-pika/pull/267) * Add ``global_`` parameter to ``Channel.set_qos`` method [#266](https://github.com/mosquito/aio-pika/pull/266) * Fix ``Connection.drain()`` is ``None`` [Fix connection drain](https://github.com/mosquito/aiormq/pull/68) 6.3.0 ----- * passing `client_properties` 6.2.0 ----- * Allow str as an exchange type #260 6.1.2 ----- * Added typing on process method #252 6.1.1 ----- * Documentation fixes * Missed timeout parameter on `connect()` #245 6.1.0 ----- * Unified `CallbackCollection`s for channels and connections * Make RobustConnection more robust * `JsonRPC` and `JsonMaster` adapters * Improve patterns documentation 6.0.1 ----- * Extended ExchangeType #237. Added `x-modulus-hash` exchange type. 6.0.0 ----- * `RobustConnection` logic changes (see #234). Thanks to @decaz for analysis and fixes. 5.6.3 ----- * add more type annotations * consistent setting headers for message #233 5.6.2 ----- * Fixes: set header value on HeaderProxy #232 5.5.3 ----- * Fixed #218. How to properly close RobustConnection? 5.5.2 ----- * Fixed #216. Exception in Queue.consume callback isn't propagated properly. 5.5.1 ----- * Allow to specify `requeue=` and `reject_on_redelivered=` in Master pattern #212 5.5.0 ----- * Fixed #209 int values for headers 5.4.1 ----- * update aiormq version * use `AMQPError` instead of `AMQPException`. `AMQPException` is now alias for `AMQPError` 5.4.0 ----- * Fix routing key handling (#206 @decaz) * Fix URL building (#207 @decaz) * Test suite for `connect` function 5.3.2 ----- * Fix tests for `Pool` 5.3.1 ----- * no duplicate call message when exception * add robust classes to apidoc 5.3.0 ----- * use None instead of Elipsis for initial state (@chibby0ne) * `Pool`: enable arguments for pool constructor (@chibby0ne) * Create py.typed (#176 @zarybnicky) * 5.2.4 ----- * Fix encode timestamp error on copy (#198 @tzoiker) * Bump `aiormq` 5.2.2 ----- * Fix HeaderProxy bug (#195 @tzoiker) 5.2.1 ----- * remove non-initialized channels when reconnect 5.2.0 ----- * robust connection close only when unclosed * `heartbeat_last` property 5.1.1 ----- * Simple test suite for testing robust connection via tcp proxy 5.0.1 ----- * robust connection initialization hotfix 5.0.0 ----- * Connector is now `aiormq` and not `pika` * Remove vendored `pika` * Compatibility changes: * **[HIGH]** Exceptions hierarchy completely changed: * ``UnroutableError`` removed. Use ``DeliveryError`` instead. * ``ConnectionRefusedError`` is now standard ``ConnectionError`` * Each error code has separate exception type. * **[LOW]** ``Connection.close`` method requires exception instead of ``code`` ``reason`` pair or ``None`` * **[MEDIUM]** ``IncomingMessage.ack`` ``IncomingMessage.nack`` ``IncomingMessage.reject`` returns coroutines. Old usage compatible but event loop might throw warnings. * **[HIGH]** ``Message.timestamp`` property is now ``datetime.datetime`` * **[LOW]** Tracking of ``publisher confirms`` removed, using similar feature from ``aiormq`` instead. * **[LOW]** non async context manager ``IncomingMessage.process()`` is deprecated. Use ``async with message.process():`` instead. 4.9.1 ----- * Fix race condition on callback timeout #180 4.9.0 ----- * Add abstract pool #174 * Fixed Deprecation Warnings in Python 3.7 #153 4.8.1 ----- * Migrate from travis to drone.io * Use pylava instead of pylama 4.8.0 ----- * save passive flag on reconnect #170 4.7.0 ----- * fixed inconsistent argument type for connection.connect #136 * fixed conditions for creating SSL connection. #135 4.6.4 ----- * Fix UnboundLocalError exception #163 4.6.3 ----- * RobustConnection fixes #162 * Fix code examples in the README.rst 4.6.1 ----- * Close connection in examples 4.6.0 ----- * Add content_type for all patterns 4.5.0 ----- * Add special exceptions for Worker 4.4.0 ----- * More extendable Master 4.3.0 ----- * Fix #112 * Fix #155 4.2.0 ----- * Add default params for RPC.cereate() 4.1.0 ----- * Fix InvalidStateError when connection lost 4.0.1 ----- * Fix: RPC stuck when response deserialization error 4.0.0 ----- * Drop python 3.4 support 2.9.0 ----- * prevent `set_results` on cancelled future #133 * Added asynchronous context manager support for channels #130 2.8.3 ----- * BUGFIX: ChannelClosed exception was never retrieved 2.8.2 ----- * BUGFIX: handle coroutine double wrapping for Python 3.4 2.8.1 ----- * added example for URL which contains ssl required options. 2.8.0 ----- * `ssl_options` for coonect and connect_robust * default ports for `amqp` and `amqps` 2.7.1 ----- * python 3.4 fix 2.7.0 ----- * Add `message_kwargs` for worker pattern 2.6.0 ----- * Added `timeout` parameter for `Exchange.declare` * QueueEmpty exception public added to the module `__all__` 2.5.0 ----- * Ability to reconnect on Channel.Close * Ability to reconnect on Channel.Cancel 2.4.0 ----- * Rollback to pika==0.10 because new one had issues. 2.3.0 ----- * Feature: abillity to use ExternalCredentials with blank login. 2.2.2 ----- * Bugfix: _on_getempty should delete _on_getok_callback #110. (thank's to @dhontecillas) 2.2.1 ----- * Fixes for pyflakes 2.2.0 ----- * Rework transactions 2.1.0 ----- * Use pika's asyncio adapter 2.0.0 ----- * Rework robust connector 1.9.0 ----- * Ability to disable robustness for single queue in `rubust_connect` mode. * Ability to pass exchage by name. 1.8.1 ----- * Added `python_requires=">3.4.*, <4",` instead of `if sys.version_info` in the `setup.py` 1.8.0 ----- * Change `TimeoutError` to the `asyncio.TimeoutError` * Allow to bind queue by exchange name * Added `extras_require = {':python_version': 'typing >= 3.5.3',` to the `setup.py` 1.7.0 ----- * `aio_pika.patterns` submodule * `aio_pika.patterns.RPC` - RPC pattern * `aio_pika.patterns.Master` - Master/Worker pattern 1.5.1 ----- * `passive` argument for excahnge 1.5.0 ----- * `Channel.is_closed` property * `Channel.close` just return `None` when channel already closed * `Connection` might be used in `async with` expression * `Queue` might be used in `async with` and returns `QueueIterator` * Changing examples * `Queue.iterator()` method * `QueueIterator.close()` returns `asyncio.Future` instead of `asyncio.Task` * Ability to use `QueueIterator` in `async for` expression * `connect_robust` is a `coroutine` instead of function which returns a coroutine (PyCharm type checking display warning instead) * add tests 1.4.2 ----- * Improve documentation. Add examples for connection and channel * `Conneciton.close` returns `asyncio.Task` instead coroutine. * `connect_robust` now is function instead of `partial`. python-aio-pika-9.5.5/COPYING000066400000000000000000000243301476164671100156150ustar00rootroot00000000000000Apache License ============== _Version 2.0, January 2004_ ### Terms and Conditions for use, reproduction, and distribution #### 1. Definitions “License” shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. “Licensor” shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. “Legal Entity” shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, “control” means **(i)** the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or **(ii)** ownership of fifty percent (50%) or more of the outstanding shares, or **(iii)** beneficial ownership of such entity. “You” (or “Your”) shall mean an individual or Legal Entity exercising permissions granted by this License. “Source” form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. “Object” form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. “Work” shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). “Derivative Works” shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. “Contribution” shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, “submitted” means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as “Not a Contribution.” “Contributor” shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. #### 2. Grant of Copyright License Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. #### 3. Grant of Patent License Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. #### 4. Redistribution You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: * **(a)** You must give any other recipients of the Work or Derivative Works a copy of this License; and * **(b)** You must cause any modified files to carry prominent notices stating that You changed the files; and * **(c)** You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and * **(d)** If the Work includes a “NOTICE” text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. #### 5. Submission of Contributions Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. #### 6. Trademarks This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. #### 7. Disclaimer of Warranty Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. #### 8. Limitation of Liability In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. #### 9. Accepting Warranty or Additional Liability While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. _END OF TERMS AND CONDITIONS_ ### APPENDIX: How to apply the Apache License to your work To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets `[]` replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same “printed page” as the copyright notice for easier identification within third-party archives. Copyright 2023 Dmitry Orlov Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. python-aio-pika-9.5.5/MANIFEST.in000066400000000000000000000001311476164671100163110ustar00rootroot00000000000000recursive-exclude tests * recursive-exclude __pycache__ * exclude .* include README.rst python-aio-pika-9.5.5/Makefile000066400000000000000000000007031476164671100162200ustar00rootroot00000000000000all: test RABBITMQ_IMAGE:=mosquito/aiormq-rabbitmq test: find . -name "*.pyc" -type f -delete tox rabbitmq: docker kill $(docker ps -f label=aio-pika.rabbitmq -q) || true docker pull $(RABBITMQ_IMAGE) docker run --rm -d \ -l aio-pika.rabbitmq \ -p 5671:5671 \ -p 5672:5672 \ -p 15671:15671 \ -p 15672:15672 \ $(RABBITMQ_IMAGE) upload: python3.7 setup.py sdist bdist_wheel twine upload dist/*$(shell python3 setup.py --version)* python-aio-pika-9.5.5/README.rst000066400000000000000000000355761476164671100162670ustar00rootroot00000000000000.. _documentation: https://docs.aio-pika.com/ .. _adopted official RabbitMQ tutorial: https://docs.aio-pika.com/rabbitmq-tutorial/index.html aio-pika ======== .. image:: https://coveralls.io/repos/github/mosquito/aio-pika/badge.svg?branch=master :target: https://coveralls.io/github/mosquito/aio-pika :alt: Coveralls .. image:: https://github.com/mosquito/aio-pika/workflows/tests/badge.svg :target: https://github.com/mosquito/aio-pika/actions?query=workflow%3Atests :alt: Github Actions .. image:: https://img.shields.io/pypi/v/aio-pika.svg :target: https://pypi.python.org/pypi/aio-pika/ :alt: Latest Version .. image:: https://img.shields.io/pypi/wheel/aio-pika.svg :target: https://pypi.python.org/pypi/aio-pika/ .. image:: https://img.shields.io/pypi/pyversions/aio-pika.svg :target: https://pypi.python.org/pypi/aio-pika/ .. image:: https://img.shields.io/pypi/l/aio-pika.svg :target: https://pypi.python.org/pypi/aio-pika/ A wrapper around `aiormq`_ for asyncio and humans. Check out the examples and the tutorial in the `documentation`_. If you are a newcomer to RabbitMQ, please start with the `adopted official RabbitMQ tutorial`_. .. _aiormq: http://github.com/mosquito/aiormq/ .. note:: Since version ``5.0.0`` this library doesn't use ``pika`` as AMQP connector. Versions below ``5.0.0`` contains or requires ``pika``'s source code. .. note:: The version 7.0.0 has breaking API changes, see CHANGELOG.md for migration hints. Features -------- * Completely asynchronous API. * Object oriented API. * Transparent auto-reconnects with complete state recovery with `connect_robust` (e.g. declared queues or exchanges, consuming state and bindings). * Python 3.7+ compatible. * For python 3.5 users, aio-pika is available via `aio-pika<7`. * Transparent `publisher confirms`_ support. * `Transactions`_ support. * Complete type-hints coverage. .. _Transactions: https://www.rabbitmq.com/semantics.html .. _publisher confirms: https://www.rabbitmq.com/confirms.html Installation ------------ .. code-block:: shell pip install aio-pika Usage example ------------- Simple consumer: .. code-block:: python import asyncio import aio_pika import aio_pika.abc async def main(loop): # Connecting with the given parameters is also possible. # aio_pika.connect_robust(host="host", login="login", password="password") # You can only choose one option to create a connection, url or kw-based params. connection = await aio_pika.connect_robust( "amqp://guest:guest@127.0.0.1/", loop=loop ) async with connection: queue_name = "test_queue" # Creating channel channel: aio_pika.abc.AbstractChannel = await connection.channel() # Declaring queue queue: aio_pika.abc.AbstractQueue = await channel.declare_queue( queue_name, auto_delete=True ) async with queue.iterator() as queue_iter: # Cancel consuming after __aexit__ async for message in queue_iter: async with message.process(): print(message.body) if queue.name in message.body.decode(): break if __name__ == "__main__": loop = asyncio.get_event_loop() loop.run_until_complete(main(loop)) loop.close() Simple publisher: .. code-block:: python import asyncio import aio_pika import aio_pika.abc async def main(loop): # Explicit type annotation connection: aio_pika.RobustConnection = await aio_pika.connect_robust( "amqp://guest:guest@127.0.0.1/", loop=loop ) routing_key = "test_queue" channel: aio_pika.abc.AbstractChannel = await connection.channel() await channel.default_exchange.publish( aio_pika.Message( body='Hello {}'.format(routing_key).encode() ), routing_key=routing_key ) await connection.close() if __name__ == "__main__": loop = asyncio.get_event_loop() loop.run_until_complete(main(loop)) loop.close() Get single message example: .. code-block:: python import asyncio from aio_pika import connect_robust, Message async def main(loop): connection = await connect_robust( "amqp://guest:guest@127.0.0.1/", loop=loop ) queue_name = "test_queue" routing_key = "test_queue" # Creating channel channel = await connection.channel() # Declaring exchange exchange = await channel.declare_exchange('direct', auto_delete=True) # Declaring queue queue = await channel.declare_queue(queue_name, auto_delete=True) # Binding queue await queue.bind(exchange, routing_key) await exchange.publish( Message( bytes('Hello', 'utf-8'), content_type='text/plain', headers={'foo': 'bar'} ), routing_key ) # Receiving message incoming_message = await queue.get(timeout=5) # Confirm message await incoming_message.ack() await queue.unbind(exchange, routing_key) await queue.delete() await connection.close() if __name__ == "__main__": loop = asyncio.get_event_loop() loop.run_until_complete(main(loop)) There are more examples and the RabbitMQ tutorial in the `documentation`_. See also ========== `aiormq`_ --------- `aiormq` is a pure python AMQP client library. It is under the hood of **aio-pika** and might to be used when you really loving works with the protocol low level. Following examples demonstrates the user API. Simple consumer: .. code-block:: python import asyncio import aiormq async def on_message(message): """ on_message doesn't necessarily have to be defined as async. Here it is to show that it's possible. """ print(f" [x] Received message {message!r}") print(f"Message body is: {message.body!r}") print("Before sleep!") await asyncio.sleep(5) # Represents async I/O operations print("After sleep!") async def main(): # Perform connection connection = await aiormq.connect("amqp://guest:guest@localhost/") # Creating a channel channel = await connection.channel() # Declaring queue declare_ok = await channel.queue_declare('helo') consume_ok = await channel.basic_consume( declare_ok.queue, on_message, no_ack=True ) loop = asyncio.get_event_loop() loop.run_until_complete(main()) loop.run_forever() Simple publisher: .. code-block:: python import asyncio from typing import Optional import aiormq from aiormq.abc import DeliveredMessage MESSAGE: Optional[DeliveredMessage] = None async def main(): global MESSAGE body = b'Hello World!' # Perform connection connection = await aiormq.connect("amqp://guest:guest@localhost//") # Creating a channel channel = await connection.channel() declare_ok = await channel.queue_declare("hello", auto_delete=True) # Sending the message await channel.basic_publish(body, routing_key='hello') print(f" [x] Sent {body}") MESSAGE = await channel.basic_get(declare_ok.queue) print(f" [x] Received message from {declare_ok.queue!r}") loop = asyncio.get_event_loop() loop.run_until_complete(main()) assert MESSAGE is not None assert MESSAGE.routing_key == "hello" assert MESSAGE.body == b'Hello World!' The `patio`_ and the `patio-rabbitmq`_ -------------------------------------- **PATIO** is an acronym for Python Asynchronous Tasks for AsyncIO - an easily extensible library, for distributed task execution, like celery, only targeting asyncio as the main design approach. **patio-rabbitmq** provides you with the ability to use *RPC over RabbitMQ* services with extremely simple implementation: .. code-block:: python from patio import Registry, ThreadPoolExecutor from patio_rabbitmq import RabbitMQBroker rpc = Registry(project="patio-rabbitmq", auto_naming=False) @rpc("sum") def sum(*args): return sum(args) async def main(): async with ThreadPoolExecutor(rpc, max_workers=16) as executor: async with RabbitMQBroker( executor, amqp_url="amqp://guest:guest@localhost/", ) as broker: await broker.join() And the caller side might be written like this: .. code-block:: python import asyncio from patio import NullExecutor, Registry from patio_rabbitmq import RabbitMQBroker async def main(): async with NullExecutor(Registry(project="patio-rabbitmq")) as executor: async with RabbitMQBroker( executor, amqp_url="amqp://guest:guest@localhost/", ) as broker: print(await asyncio.gather( *[ broker.call("mul", i, i, timeout=1) for i in range(10) ] )) `FastStream`_ --------------- **FastStream** is a powerful and easy-to-use Python library for building asynchronous services that interact with event streams.. If you need no deep dive into **RabbitMQ** details, you can use more high-level **FastStream** interfaces: .. code-block:: python from faststream import FastStream from faststream.rabbit import RabbitBroker broker = RabbitBroker("amqp://guest:guest@localhost:5672/") app = FastStream(broker) @broker.subscriber("user") async def user_created(user_id: int): assert isinstance(user_id, int) return f"user-{user_id}: created" @app.after_startup async def pub_smth(): assert ( await broker.publish(1, "user", rpc=True) ) == "user-1: created" Also, **FastStream** validates messages by **pydantic**, generates your project **AsyncAPI** spec, supports In-Memory testing, RPC calls, and more. In fact, it is a high-level wrapper on top of **aio-pika**, so you can use both of these libraries' advantages at the same time. `python-socketio`_ ------------------ `Socket.IO`_ is a transport protocol that enables real-time bidirectional event-based communication between clients (typically, though not always, web browsers) and a server. This package provides Python implementations of both, each with standard and asyncio variants. Also this package is suitable for building messaging services over **RabbitMQ** via **aio-pika** adapter: .. code-block:: python import socketio from aiohttp import web sio = socketio.AsyncServer(client_manager=socketio.AsyncAioPikaManager()) app = web.Application() sio.attach(app) @sio.event async def chat_message(sid, data): print("message ", data) if __name__ == '__main__': web.run_app(app) And a client is able to call `chat_message` the following way: .. code-block:: python import asyncio import socketio sio = socketio.AsyncClient() async def main(): await sio.connect('http://localhost:8080') await sio.emit('chat_message', {'response': 'my response'}) if __name__ == '__main__': asyncio.run(main()) The `taskiq`_ and the `taskiq-aio-pika`_ ---------------------------------------- **Taskiq** is an asynchronous distributed task queue for python. The project takes inspiration from big projects such as Celery and Dramatiq. But taskiq can send and run both the sync and async functions. The library provides you with **aio-pika** broker for running tasks too. .. code-block:: python from taskiq_aio_pika import AioPikaBroker broker = AioPikaBroker() @broker.task async def test() -> None: print("nothing") async def main(): await broker.startup() await test.kiq() `Rasa`_ ------- With over 25 million downloads, Rasa Open Source is the most popular open source framework for building chat and voice-based AI assistants. With **Rasa**, you can build contextual assistants on: * Facebook Messenger * Slack * Google Hangouts * Webex Teams * Microsoft Bot Framework * Rocket.Chat * Mattermost * Telegram * Twilio Your own custom conversational channels or voice assistants as: * Alexa Skills * Google Home Actions **Rasa** helps you build contextual assistants capable of having layered conversations with lots of back-and-forth. In order for a human to have a meaningful exchange with a contextual assistant, the assistant needs to be able to use context to build on things that were previously discussed – **Rasa** enables you to build assistants that can do this in a scalable way. And it also uses **aio-pika** to interact with **RabbitMQ** deep inside! Versioning ========== This software follows `Semantic Versioning`_ For contributors ---------------- Setting up development environment __________________________________ Clone the project: .. code-block:: shell git clone https://github.com/mosquito/aio-pika.git cd aio-pika Create a new virtualenv for `aio-pika`_: .. code-block:: shell python3 -m venv env source env/bin/activate Install all requirements for `aio-pika`_: .. code-block:: shell pip install -e '.[develop]' Running Tests _____________ **NOTE: In order to run the tests locally you need to run a RabbitMQ instance with default user/password (guest/guest) and port (5672).** The Makefile provides a command to run an appropriate RabbitMQ Docker image: .. code-block:: bash make rabbitmq To test just run: .. code-block:: bash make test Editing Documentation _____________________ To iterate quickly on the documentation live in your browser, try: .. code-block:: bash nox -s docs -- serve Creating Pull Requests ______________________ Please feel free to create pull requests, but you should describe your use cases and add some examples. Changes should follow a few simple rules: * When your changes break the public API, you must increase the major version. * When your changes are safe for public API (e.g. added an argument with default value) * You have to add test cases (see `tests/` folder) * You must add docstrings * Feel free to add yourself to `"thank's to" section`_ .. _"thank's to" section: https://github.com/mosquito/aio-pika/blob/master/docs/source/index.rst#thanks-for-contributing .. _Semantic Versioning: http://semver.org/ .. _aio-pika: https://github.com/mosquito/aio-pika/ .. _faststream: https://github.com/airtai/faststream .. _patio: https://github.com/patio-python/patio .. _patio-rabbitmq: https://github.com/patio-python/patio-rabbitmq .. _Socket.IO: https://socket.io/ .. _python-socketio: https://python-socketio.readthedocs.io/en/latest/intro.html .. _taskiq: https://github.com/taskiq-python/taskiq .. _taskiq-aio-pika: https://github.com/taskiq-python/taskiq-aio-pika .. _Rasa: https://rasa.com/docs/rasa/ python-aio-pika-9.5.5/aio_pika/000077500000000000000000000000001476164671100163345ustar00rootroot00000000000000python-aio-pika-9.5.5/aio_pika/__init__.py000066400000000000000000000020021476164671100204370ustar00rootroot00000000000000from . import abc, patterns, pool from .abc import DeliveryMode from .channel import Channel from .connection import Connection, connect from .exceptions import AMQPException, MessageProcessError from .exchange import Exchange, ExchangeType from .log import logger from .message import IncomingMessage, Message from .queue import Queue from .robust_channel import RobustChannel from .robust_connection import RobustConnection, connect_robust from .robust_exchange import RobustExchange from .robust_queue import RobustQueue from importlib.metadata import Distribution __version__ = Distribution.from_name("aio-pika").version __all__ = ( "AMQPException", "Channel", "Connection", "DeliveryMode", "Exchange", "ExchangeType", "IncomingMessage", "Message", "MessageProcessError", "Queue", "RobustChannel", "RobustConnection", "RobustExchange", "RobustQueue", "__version__", "abc", "connect", "connect_robust", "logger", "patterns", "pool", ) python-aio-pika-9.5.5/aio_pika/abc.py000066400000000000000000000622741476164671100174460ustar00rootroot00000000000000from __future__ import annotations import asyncio import dataclasses from abc import ABC, abstractmethod from dataclasses import dataclass from datetime import datetime, timedelta from enum import Enum, IntEnum, unique from functools import singledispatch from types import TracebackType from typing import ( Any, AsyncContextManager, AsyncIterable, Awaitable, Callable, Dict, Generator, Iterator, Literal, Mapping, Optional, Tuple, Type, TypedDict, TypeVar, Union, overload, ) import aiormq.abc from aiormq.abc import ExceptionType from pamqp.common import Arguments, FieldValue from yarl import URL from .pool import PoolInstance from .tools import ( CallbackCollection, CallbackSetType, CallbackType, OneShotCallback, ) TimeoutType = Optional[Union[int, float]] NoneType = type(None) DateType = Optional[Union[int, datetime, float, timedelta]] ExchangeParamType = Union["AbstractExchange", str] ConsumerTag = str MILLISECONDS = 1000 class SSLOptions(TypedDict, total=False): cafile: str capath: str cadata: str keyfile: str certfile: str no_verify_ssl: int @unique class ExchangeType(str, Enum): FANOUT = "fanout" DIRECT = "direct" TOPIC = "topic" HEADERS = "headers" X_DELAYED_MESSAGE = "x-delayed-message" X_CONSISTENT_HASH = "x-consistent-hash" X_MODULUS_HASH = "x-modulus-hash" @unique class DeliveryMode(IntEnum): NOT_PERSISTENT = 1 PERSISTENT = 2 @unique class TransactionState(str, Enum): CREATED = "created" COMMITED = "commited" ROLLED_BACK = "rolled back" STARTED = "started" @dataclasses.dataclass(frozen=True) class DeclarationResult: message_count: int consumer_count: int class AbstractTransaction: state: TransactionState @abstractmethod async def select( self, timeout: TimeoutType = None, ) -> aiormq.spec.Tx.SelectOk: raise NotImplementedError @abstractmethod async def rollback( self, timeout: TimeoutType = None, ) -> aiormq.spec.Tx.RollbackOk: raise NotImplementedError async def commit( self, timeout: TimeoutType = None, ) -> aiormq.spec.Tx.CommitOk: raise NotImplementedError async def __aenter__(self) -> "AbstractTransaction": raise NotImplementedError async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: raise NotImplementedError HeadersType = Dict[str, FieldValue] class MessageInfo(TypedDict, total=False): app_id: Optional[str] body_size: int cluster_id: Optional[str] consumer_tag: Optional[str] content_encoding: Optional[str] content_type: Optional[str] correlation_id: Optional[str] delivery_mode: DeliveryMode delivery_tag: Optional[int] exchange: Optional[str] expiration: Optional[DateType] headers: HeadersType message_id: Optional[str] priority: Optional[int] redelivered: Optional[bool] routing_key: Optional[str] reply_to: Optional[str] timestamp: Optional[datetime] type: str user_id: Optional[str] class AbstractMessage(ABC): __slots__ = () body: bytes body_size: int headers: HeadersType content_type: Optional[str] content_encoding: Optional[str] delivery_mode: DeliveryMode priority: Optional[int] correlation_id: Optional[str] reply_to: Optional[str] expiration: Optional[DateType] message_id: Optional[str] timestamp: Optional[datetime] type: Optional[str] user_id: Optional[str] app_id: Optional[str] @abstractmethod def info(self) -> MessageInfo: raise NotImplementedError @property @abstractmethod def locked(self) -> bool: raise NotImplementedError @property @abstractmethod def properties(self) -> aiormq.spec.Basic.Properties: raise NotImplementedError @abstractmethod def __iter__(self) -> Iterator[int]: raise NotImplementedError @abstractmethod def lock(self) -> None: raise NotImplementedError def __copy__(self) -> "AbstractMessage": raise NotImplementedError class AbstractIncomingMessage(AbstractMessage, ABC): __slots__ = () cluster_id: Optional[str] consumer_tag: Optional["ConsumerTag"] delivery_tag: Optional[int] redelivered: Optional[bool] message_count: Optional[int] routing_key: Optional[str] exchange: Optional[str] @property @abstractmethod def channel(self) -> aiormq.abc.AbstractChannel: raise NotImplementedError @abstractmethod def process( self, requeue: bool = False, reject_on_redelivered: bool = False, ignore_processed: bool = False, ) -> "AbstractProcessContext": raise NotImplementedError @abstractmethod async def ack(self, multiple: bool = False) -> None: raise NotImplementedError @abstractmethod async def reject(self, requeue: bool = False) -> None: raise NotImplementedError @abstractmethod async def nack(self, multiple: bool = False, requeue: bool = True) -> None: raise NotImplementedError def info(self) -> MessageInfo: raise NotImplementedError @property @abstractmethod def processed(self) -> bool: raise NotImplementedError class AbstractProcessContext(AsyncContextManager): @abstractmethod async def __aenter__(self) -> AbstractIncomingMessage: raise NotImplementedError @abstractmethod async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: raise NotImplementedError class AbstractQueue: __slots__ = () channel: "AbstractChannel" name: str durable: bool exclusive: bool auto_delete: bool arguments: Arguments passive: bool declaration_result: aiormq.spec.Queue.DeclareOk close_callbacks: CallbackCollection[ AbstractQueue, [Optional[BaseException]], ] @abstractmethod def __init__( self, channel: aiormq.abc.AbstractChannel, name: Optional[str], durable: bool, exclusive: bool, auto_delete: bool, arguments: Arguments, passive: bool = False, ): raise NotImplementedError( dict( channel=channel, name=name, durable=durable, exclusive=exclusive, auto_delete=auto_delete, arguments=arguments, passive=passive, ), ) @abstractmethod async def declare( self, timeout: TimeoutType = None, ) -> aiormq.spec.Queue.DeclareOk: raise NotImplementedError @abstractmethod async def bind( self, exchange: ExchangeParamType, routing_key: Optional[str] = None, *, arguments: Arguments = None, timeout: TimeoutType = None, ) -> aiormq.spec.Queue.BindOk: raise NotImplementedError @abstractmethod async def unbind( self, exchange: ExchangeParamType, routing_key: Optional[str] = None, arguments: Arguments = None, timeout: TimeoutType = None, ) -> aiormq.spec.Queue.UnbindOk: raise NotImplementedError @abstractmethod async def consume( self, callback: Callable[[AbstractIncomingMessage], Awaitable[Any]], no_ack: bool = False, exclusive: bool = False, arguments: Arguments = None, consumer_tag: Optional[ConsumerTag] = None, timeout: TimeoutType = None, ) -> ConsumerTag: raise NotImplementedError @abstractmethod async def cancel( self, consumer_tag: ConsumerTag, timeout: TimeoutType = None, nowait: bool = False, ) -> aiormq.spec.Basic.CancelOk: raise NotImplementedError @overload async def get( self, *, no_ack: bool = False, fail: Literal[True] = ..., timeout: TimeoutType = ..., ) -> AbstractIncomingMessage: ... @overload async def get( self, *, no_ack: bool = False, fail: Literal[False] = ..., timeout: TimeoutType = ..., ) -> Optional[AbstractIncomingMessage]: ... @abstractmethod async def get( self, *, no_ack: bool = False, fail: bool = True, timeout: TimeoutType = 5, ) -> Optional[AbstractIncomingMessage]: raise NotImplementedError @abstractmethod async def purge( self, no_wait: bool = False, timeout: TimeoutType = None, ) -> aiormq.spec.Queue.PurgeOk: raise NotImplementedError @abstractmethod async def delete( self, *, if_unused: bool = True, if_empty: bool = True, timeout: TimeoutType = None, ) -> aiormq.spec.Queue.DeleteOk: raise NotImplementedError @abstractmethod def iterator(self, **kwargs: Any) -> "AbstractQueueIterator": raise NotImplementedError class AbstractQueueIterator(AsyncIterable[AbstractIncomingMessage]): _amqp_queue: AbstractQueue _queue: asyncio.Queue _consumer_tag: ConsumerTag _consume_kwargs: Dict[str, Any] @abstractmethod def close(self) -> Awaitable[Any]: raise NotImplementedError @abstractmethod async def on_message(self, message: AbstractIncomingMessage) -> None: raise NotImplementedError @abstractmethod async def consume(self) -> None: raise NotImplementedError @abstractmethod def __aiter__(self) -> "AbstractQueueIterator": raise NotImplementedError @abstractmethod def __aenter__(self) -> Awaitable["AbstractQueueIterator"]: raise NotImplementedError @abstractmethod async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: raise NotImplementedError @abstractmethod async def __anext__(self) -> AbstractIncomingMessage: raise NotImplementedError class AbstractExchange(ABC): name: str @abstractmethod def __init__( self, channel: "AbstractChannel", name: str, type: Union[ExchangeType, str] = ExchangeType.DIRECT, *, auto_delete: bool = False, durable: bool = False, internal: bool = False, passive: bool = False, arguments: Arguments = None, ): raise NotImplementedError @abstractmethod async def declare( self, timeout: TimeoutType = None, ) -> aiormq.spec.Exchange.DeclareOk: raise NotImplementedError @abstractmethod async def bind( self, exchange: ExchangeParamType, routing_key: str = "", *, arguments: Arguments = None, timeout: TimeoutType = None, ) -> aiormq.spec.Exchange.BindOk: raise NotImplementedError @abstractmethod async def unbind( self, exchange: ExchangeParamType, routing_key: str = "", arguments: Arguments = None, timeout: TimeoutType = None, ) -> aiormq.spec.Exchange.UnbindOk: raise NotImplementedError @abstractmethod async def publish( self, message: "AbstractMessage", routing_key: str, *, mandatory: bool = True, immediate: bool = False, timeout: TimeoutType = None, ) -> Optional[aiormq.abc.ConfirmationFrameType]: raise NotImplementedError @abstractmethod async def delete( self, if_unused: bool = False, timeout: TimeoutType = None, ) -> aiormq.spec.Exchange.DeleteOk: raise NotImplementedError @dataclasses.dataclass(frozen=True) class UnderlayChannel: channel: aiormq.abc.AbstractChannel close_callback: OneShotCallback @classmethod async def create( cls, connection: aiormq.abc.AbstractConnection, close_callback: Callable[..., Awaitable[Any]], **kwargs: Any, ) -> "UnderlayChannel": close_callback = OneShotCallback(close_callback) await connection.ready() connection.closing.add_done_callback(close_callback) channel = await connection.channel(**kwargs) channel.closing.add_done_callback(close_callback) return cls( channel=channel, close_callback=close_callback, ) async def close(self, exc: Optional[ExceptionType] = None) -> Any: if self.close_callback.finished.is_set(): return # close callbacks must be fired when closing # and should be deleted later to prevent memory leaks await self.channel.close(exc) await self.close_callback.wait() self.channel.closing.remove_done_callback(self.close_callback) self.channel.connection.closing.remove_done_callback( self.close_callback, ) class AbstractChannel(PoolInstance, ABC): QUEUE_CLASS: Type[AbstractQueue] EXCHANGE_CLASS: Type[AbstractExchange] close_callbacks: CallbackCollection[ AbstractChannel, [Optional[BaseException]], ] return_callbacks: CallbackCollection[ AbstractChannel, [AbstractIncomingMessage], ] default_exchange: AbstractExchange publisher_confirms: bool @property @abstractmethod def is_initialized(self) -> bool: return hasattr(self, "_channel") @property @abstractmethod def is_closed(self) -> bool: raise NotImplementedError @abstractmethod def close(self, exc: Optional[ExceptionType] = None) -> Awaitable[None]: raise NotImplementedError @abstractmethod def closed(self) -> Awaitable[Literal[True]]: raise NotImplementedError @abstractmethod async def get_underlay_channel(self) -> aiormq.abc.AbstractChannel: raise NotImplementedError @property @abstractmethod def number(self) -> Optional[int]: raise NotImplementedError @abstractmethod async def __aenter__(self) -> "AbstractChannel": raise NotImplementedError @abstractmethod def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> Awaitable[None]: raise NotImplementedError @abstractmethod async def initialize(self, timeout: TimeoutType = None) -> None: raise NotImplementedError @abstractmethod def reopen(self) -> Awaitable[None]: raise NotImplementedError @abstractmethod async def declare_exchange( self, name: str, type: Union[ExchangeType, str] = ExchangeType.DIRECT, *, durable: bool = False, auto_delete: bool = False, internal: bool = False, passive: bool = False, arguments: Arguments = None, timeout: TimeoutType = None, ) -> AbstractExchange: raise NotImplementedError @abstractmethod async def get_exchange( self, name: str, *, ensure: bool = True, ) -> AbstractExchange: raise NotImplementedError @abstractmethod async def declare_queue( self, name: Optional[str] = None, *, durable: bool = False, exclusive: bool = False, passive: bool = False, auto_delete: bool = False, arguments: Arguments = None, timeout: TimeoutType = None, ) -> AbstractQueue: raise NotImplementedError @abstractmethod async def get_queue( self, name: str, *, ensure: bool = True, ) -> AbstractQueue: raise NotImplementedError @abstractmethod async def set_qos( self, prefetch_count: int = 0, prefetch_size: int = 0, global_: bool = False, timeout: TimeoutType = None, all_channels: Optional[bool] = None, ) -> aiormq.spec.Basic.QosOk: raise NotImplementedError @abstractmethod async def queue_delete( self, queue_name: str, timeout: TimeoutType = None, if_unused: bool = False, if_empty: bool = False, nowait: bool = False, ) -> aiormq.spec.Queue.DeleteOk: raise NotImplementedError @abstractmethod async def exchange_delete( self, exchange_name: str, timeout: TimeoutType = None, if_unused: bool = False, nowait: bool = False, ) -> aiormq.spec.Exchange.DeleteOk: raise NotImplementedError @abstractmethod def transaction(self) -> AbstractTransaction: raise NotImplementedError @abstractmethod async def flow(self, active: bool = True) -> aiormq.spec.Channel.FlowOk: raise NotImplementedError @abstractmethod def __await__(self) -> Generator[Any, Any, "AbstractChannel"]: raise NotImplementedError @dataclasses.dataclass(frozen=True) class UnderlayConnection: connection: aiormq.abc.AbstractConnection close_callback: OneShotCallback @classmethod async def make_connection( cls, url: URL, timeout: TimeoutType = None, **kwargs: Any, ) -> aiormq.abc.AbstractConnection: connection: aiormq.abc.AbstractConnection = await asyncio.wait_for( aiormq.connect(url, **kwargs), timeout=timeout, ) await connection.ready() return connection @classmethod async def connect( cls, url: URL, close_callback: Callable[..., Awaitable[Any]], timeout: TimeoutType = None, **kwargs: Any, ) -> "UnderlayConnection": try: connection = await cls.make_connection( url, timeout=timeout, **kwargs, ) close_callback = OneShotCallback(close_callback) connection.closing.add_done_callback(close_callback) except Exception as e: closing = asyncio.get_event_loop().create_future() closing.set_exception(e) await close_callback(closing) raise await connection.ready() return cls( connection=connection, close_callback=close_callback, ) def ready(self) -> Awaitable[Any]: return self.connection.ready() async def close(self, exc: Optional[aiormq.abc.ExceptionType]) -> Any: if self.close_callback.finished.is_set(): return try: return await self.connection.close(exc) except asyncio.CancelledError: raise finally: await self.close_callback.wait() @dataclass class ConnectionParameter: name: str parser: Callable[[str], Any] default: Optional[str] = None is_kwarg: bool = False def parse(self, value: Optional[str]) -> Any: if value is None: return self.default try: return self.parser(value) except ValueError: return self.default class AbstractConnection(PoolInstance, ABC): PARAMETERS: Tuple[ConnectionParameter, ...] close_callbacks: CallbackCollection[ AbstractConnection, [Optional[BaseException]], ] connected: asyncio.Event transport: Optional[UnderlayConnection] kwargs: Mapping[str, Any] @abstractmethod def __init__( self, url: URL, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any, ): raise NotImplementedError( f"Method not implemented, passed: url={url}, loop={loop!r}", ) @property @abstractmethod def is_closed(self) -> bool: raise NotImplementedError @abstractmethod async def close(self, exc: ExceptionType = asyncio.CancelledError) -> None: raise NotImplementedError @abstractmethod def closed(self) -> Awaitable[Literal[True]]: raise NotImplementedError @abstractmethod async def connect(self, timeout: TimeoutType = None) -> None: raise NotImplementedError @abstractmethod def channel( self, channel_number: Optional[int] = None, publisher_confirms: bool = True, on_return_raises: bool = False, ) -> AbstractChannel: raise NotImplementedError @abstractmethod async def ready(self) -> None: raise NotImplementedError @abstractmethod async def __aenter__(self) -> "AbstractConnection": raise NotImplementedError @abstractmethod async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: raise NotImplementedError @abstractmethod async def update_secret( self, new_secret: str, *, reason: str = "", timeout: TimeoutType = None, ) -> aiormq.spec.Connection.UpdateSecretOk: raise NotImplementedError class AbstractRobustQueue(AbstractQueue): __slots__ = () @abstractmethod def restore(self) -> Awaitable[None]: raise NotImplementedError @abstractmethod async def bind( self, exchange: ExchangeParamType, routing_key: Optional[str] = None, *, arguments: Arguments = None, timeout: TimeoutType = None, robust: bool = True, ) -> aiormq.spec.Queue.BindOk: raise NotImplementedError @abstractmethod async def consume( self, callback: Callable[[AbstractIncomingMessage], Any], no_ack: bool = False, exclusive: bool = False, arguments: Arguments = None, consumer_tag: Optional[ConsumerTag] = None, timeout: TimeoutType = None, robust: bool = True, ) -> ConsumerTag: raise NotImplementedError class AbstractRobustExchange(AbstractExchange): @abstractmethod def restore(self) -> Awaitable[None]: raise NotImplementedError @abstractmethod async def bind( self, exchange: ExchangeParamType, routing_key: str = "", *, arguments: Arguments = None, timeout: TimeoutType = None, robust: bool = True, ) -> aiormq.spec.Exchange.BindOk: raise NotImplementedError class AbstractRobustChannel(AbstractChannel): reopen_callbacks: CallbackCollection[AbstractRobustChannel, []] @abstractmethod def reopen(self) -> Awaitable[None]: raise NotImplementedError @abstractmethod async def restore(self) -> None: raise NotImplementedError @abstractmethod async def declare_exchange( self, name: str, type: Union[ExchangeType, str] = ExchangeType.DIRECT, *, durable: bool = False, auto_delete: bool = False, internal: bool = False, passive: bool = False, arguments: Arguments = None, timeout: TimeoutType = None, robust: bool = True, ) -> AbstractRobustExchange: raise NotImplementedError @abstractmethod async def declare_queue( self, name: Optional[str] = None, *, durable: bool = False, exclusive: bool = False, passive: bool = False, auto_delete: bool = False, arguments: Optional[Dict[str, Any]] = None, timeout: TimeoutType = None, robust: bool = True, ) -> AbstractRobustQueue: raise NotImplementedError class AbstractRobustConnection(AbstractConnection): reconnect_callbacks: CallbackCollection[AbstractRobustConnection, []] @property @abstractmethod def reconnecting(self) -> bool: raise NotImplementedError @abstractmethod def reconnect(self) -> Awaitable[None]: raise NotImplementedError @abstractmethod def channel( self, channel_number: Optional[int] = None, publisher_confirms: bool = True, on_return_raises: bool = False, ) -> AbstractRobustChannel: raise NotImplementedError ChannelCloseCallback = Callable[ [Optional[AbstractChannel], Optional[BaseException]], Any, ] ConnectionCloseCallback = Callable[ [Optional[AbstractConnection], Optional[BaseException]], Any, ] ConnectionType = TypeVar("ConnectionType", bound=AbstractConnection) @singledispatch def get_exchange_name(value: Any) -> str: raise ValueError( "exchange argument must be an exchange " f"instance or str not {value!r}", ) @get_exchange_name.register(AbstractExchange) def _get_exchange_name_from_exchnage(value: AbstractExchange) -> str: return value.name @get_exchange_name.register(str) def _get_exchange_name_from_str(value: str) -> str: return value __all__ = ( "AbstractChannel", "AbstractConnection", "AbstractExchange", "AbstractIncomingMessage", "AbstractMessage", "AbstractProcessContext", "AbstractQueue", "AbstractQueueIterator", "AbstractRobustChannel", "AbstractRobustConnection", "AbstractRobustExchange", "AbstractRobustQueue", "AbstractTransaction", "CallbackSetType", "CallbackType", "ChannelCloseCallback", "ConnectionCloseCallback", "ConnectionParameter", "ConsumerTag", "DateType", "DeclarationResult", "DeliveryMode", "ExchangeParamType", "ExchangeType", "FieldValue", "HeadersType", "MILLISECONDS", "MessageInfo", "NoneType", "SSLOptions", "TimeoutType", "TransactionState", "UnderlayChannel", "UnderlayConnection", "get_exchange_name", ) python-aio-pika-9.5.5/aio_pika/channel.py000066400000000000000000000362021476164671100203210ustar00rootroot00000000000000import asyncio import contextlib import warnings from abc import ABC from types import TracebackType from typing import ( Any, AsyncContextManager, Awaitable, Generator, Literal, Optional, Type, Union, ) from warnings import warn import aiormq import aiormq.abc from pamqp.common import Arguments from .abc import ( AbstractChannel, AbstractConnection, AbstractExchange, AbstractQueue, TimeoutType, UnderlayChannel, ) from .exceptions import ChannelInvalidStateError from .exchange import Exchange, ExchangeType from .log import get_logger from .message import IncomingMessage from .queue import Queue from .tools import CallbackCollection from .transaction import Transaction log = get_logger(__name__) class ChannelContext(AsyncContextManager, AbstractChannel, ABC): async def __aenter__(self) -> "AbstractChannel": if not self.is_initialized: await self.initialize() return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: return await self.close(exc_val) def __await__(self) -> Generator[Any, Any, AbstractChannel]: yield from self.initialize().__await__() return self class Channel(ChannelContext): """Channel abstraction""" QUEUE_CLASS = Queue EXCHANGE_CLASS = Exchange _channel: Optional[UnderlayChannel] def __init__( self, connection: AbstractConnection, channel_number: Optional[int] = None, publisher_confirms: bool = True, on_return_raises: bool = False, ): """ :param connection: :class:`aio_pika.adapter.AsyncioConnection` instance :param loop: Event loop (:func:`asyncio.get_event_loop()` when :class:`None`) :param future_store: :class:`aio_pika.common.FutureStore` instance :param publisher_confirms: False if you don't need delivery confirmations (in pursuit of performance) """ if not publisher_confirms and on_return_raises: raise RuntimeError( '"on_return_raises" not applicable ' 'without "publisher_confirms"', ) self._connection: AbstractConnection = connection self._closed: asyncio.Future = ( asyncio.get_running_loop().create_future() ) self._channel: Optional[UnderlayChannel] = None self._channel_number = channel_number self.close_callbacks = CallbackCollection(self) self.return_callbacks = CallbackCollection(self) self.publisher_confirms = publisher_confirms self.on_return_raises = on_return_raises self.close_callbacks.add(self._set_closed_callback) @property def is_initialized(self) -> bool: """Returns True when the channel has been opened and ready for interaction""" return self._channel is not None @property def is_closed(self) -> bool: """Returns True when the channel has been closed from the broker side or after the close() method has been called.""" if not self.is_initialized or self._closed.done(): return True channel = self._channel if channel is None: return True return channel.channel.is_closed async def close( self, exc: Optional[aiormq.abc.ExceptionType] = None, ) -> None: if not self.is_initialized: log.warning("Channel not opened") return if not self._channel: log.warning("Transport is not ready") return log.debug("Closing channel %r", self) await self._channel.close() if not self._closed.done(): self._closed.set_result(True) def closed(self) -> Awaitable[Literal[True]]: return self._closed async def get_underlay_channel(self) -> aiormq.abc.AbstractChannel: if not self.is_initialized or not self._channel: raise aiormq.exceptions.ChannelInvalidStateError( "Channel was not opened", ) return self._channel.channel @property def channel(self) -> aiormq.abc.AbstractChannel: warnings.warn( "This property is deprecated, do not use this anymore.", DeprecationWarning, ) if self._channel is None: raise aiormq.exceptions.ChannelInvalidStateError return self._channel.channel @property def number(self) -> Optional[int]: if self._channel is None: return self._channel_number underlay_channel: UnderlayChannel = self._channel return underlay_channel.channel.number def __str__(self) -> str: return "{}".format(self.number or "Not initialized channel") async def _open(self) -> None: transport = self._connection.transport if transport is None: raise ChannelInvalidStateError("No active transport in channel") await transport.ready() channel = await UnderlayChannel.create( transport.connection, self._on_close, publisher_confirms=self.publisher_confirms, on_return_raises=self.on_return_raises, channel_number=self._channel_number, ) self._channel = channel try: await self._on_open() except BaseException as e: await channel.close(e) self._channel = None raise if self._closed.done(): self._closed = asyncio.get_running_loop().create_future() async def initialize(self, timeout: TimeoutType = None) -> None: if self.is_initialized: raise RuntimeError("Already initialized") elif self._closed.done(): raise RuntimeError("Can't initialize closed channel") await self._open() await self._on_initialized() async def _on_open(self) -> None: self.default_exchange: Exchange = self.EXCHANGE_CLASS( channel=self, arguments=None, auto_delete=False, durable=False, internal=False, name="", passive=False, type=ExchangeType.DIRECT, ) async def _on_close( self, closing: asyncio.Future ) -> Optional[BaseException]: try: exc = closing.exception() except asyncio.CancelledError as e: exc = e await self.close_callbacks(exc) if self._channel and self._channel.channel: self._channel.channel.on_return_callbacks.discard(self._on_return) return exc async def _set_closed_callback( self, _: Optional[AbstractChannel], exc: Optional[BaseException], ) -> None: if not self._closed.done(): self._closed.set_result(True) async def _on_initialized(self) -> None: channel = await self.get_underlay_channel() channel.on_return_callbacks.add(self._on_return) def _on_return(self, message: aiormq.abc.DeliveredMessage) -> None: self.return_callbacks(IncomingMessage(message, no_ack=True)) async def reopen(self) -> None: log.debug("Start reopening channel %r", self) await self._open() def __del__(self) -> None: with contextlib.suppress(AttributeError): # might raise because an Exception was raised in __init__ if not self._closed.done(): self._closed.set_result(True) self._channel = None async def declare_exchange( self, name: str, type: Union[ExchangeType, str] = ExchangeType.DIRECT, *, durable: bool = False, auto_delete: bool = False, internal: bool = False, passive: bool = False, arguments: Arguments = None, timeout: TimeoutType = None, ) -> AbstractExchange: """ Declare an exchange. :param name: string with exchange name or :class:`aio_pika.exchange.Exchange` instance :param type: Exchange type. Enum ExchangeType value or string. String values must be one of 'fanout', 'direct', 'topic', 'headers', 'x-delayed-message', 'x-consistent-hash'. :param durable: Durability (exchange survive broker restart) :param auto_delete: Delete queue when channel will be closed. :param internal: Do not send it to broker just create an object :param passive: Do not fail when entity was declared previously but has another params. Raises :class:`aio_pika.exceptions.ChannelClosed` when exchange doesn't exist. :param arguments: additional arguments :param timeout: execution timeout :return: :class:`aio_pika.exchange.Exchange` instance """ if auto_delete and durable is None: durable = False exchange = self.EXCHANGE_CLASS( channel=self, name=name, type=type, durable=durable, auto_delete=auto_delete, internal=internal, passive=passive, arguments=arguments, ) await exchange.declare(timeout=timeout) log.debug("Exchange declared %r", exchange) return exchange async def get_exchange( self, name: str, *, ensure: bool = True, ) -> AbstractExchange: """ With ``ensure=True``, it's a shortcut for ``.declare_exchange(..., passive=True)``; otherwise, it returns an exchange instance without checking its existence. When the exchange does not exist, if ``ensure=True``, will raise :class:`aio_pika.exceptions.ChannelClosed`. Use this method in a separate channel (or as soon as channel created). This is only a way to get an exchange without declaring a new one. :param name: exchange name :param ensure: ensure that the exchange exists :return: :class:`aio_pika.exchange.Exchange` instance :raises: :class:`aio_pika.exceptions.ChannelClosed` instance """ if ensure: return await self.declare_exchange(name=name, passive=True) else: return self.EXCHANGE_CLASS( channel=self, name=name, durable=False, auto_delete=False, internal=False, passive=True, arguments=None, ) async def declare_queue( self, name: Optional[str] = None, *, durable: bool = False, exclusive: bool = False, passive: bool = False, auto_delete: bool = False, arguments: Arguments = None, timeout: TimeoutType = None, ) -> AbstractQueue: """ :param name: queue name :param durable: Durability (queue survive broker restart) :param exclusive: Makes this queue exclusive. Exclusive queues may only be accessed by the current connection, and are deleted when that connection closes. Passive declaration of an exclusive queue by other connections are not allowed. :param passive: Do not fail when entity was declared previously but has another params. Raises :class:`aio_pika.exceptions.ChannelClosed` when queue doesn't exist. :param auto_delete: Delete queue when channel will be closed. :param arguments: additional arguments :param timeout: execution timeout :return: :class:`aio_pika.queue.Queue` instance :raises: :class:`aio_pika.exceptions.ChannelClosed` instance """ queue: AbstractQueue = self.QUEUE_CLASS( channel=self, name=name, durable=durable, exclusive=exclusive, auto_delete=auto_delete, arguments=arguments, passive=passive, ) await queue.declare(timeout=timeout) self.close_callbacks.add(queue.close_callbacks, weak=True) return queue async def get_queue( self, name: str, *, ensure: bool = True, ) -> AbstractQueue: """ With ``ensure=True``, it's a shortcut for ``.declare_queue(..., passive=True)``; otherwise, it returns a queue instance without checking its existence. When the queue does not exist, if ``ensure=True``, will raise :class:`aio_pika.exceptions.ChannelClosed`. Use this method in a separate channel (or as soon as channel created). This is only a way to get a queue without declaring a new one. :param name: queue name :param ensure: ensure that the queue exists :return: :class:`aio_pika.queue.Queue` instance :raises: :class:`aio_pika.exceptions.ChannelClosed` instance """ if ensure: return await self.declare_queue(name=name, passive=True) else: return self.QUEUE_CLASS( channel=self, name=name, durable=False, exclusive=False, auto_delete=False, arguments=None, passive=True, ) async def set_qos( self, prefetch_count: int = 0, prefetch_size: int = 0, global_: bool = False, timeout: TimeoutType = None, all_channels: Optional[bool] = None, ) -> aiormq.spec.Basic.QosOk: if all_channels is not None: warn('Use "global_" instead of "all_channels"', DeprecationWarning) global_ = all_channels channel = await self.get_underlay_channel() return await channel.basic_qos( prefetch_count=prefetch_count, prefetch_size=prefetch_size, global_=global_, timeout=timeout, ) async def queue_delete( self, queue_name: str, timeout: TimeoutType = None, if_unused: bool = False, if_empty: bool = False, nowait: bool = False, ) -> aiormq.spec.Queue.DeleteOk: channel = await self.get_underlay_channel() return await channel.queue_delete( queue=queue_name, if_unused=if_unused, if_empty=if_empty, nowait=nowait, timeout=timeout, ) async def exchange_delete( self, exchange_name: str, timeout: TimeoutType = None, if_unused: bool = False, nowait: bool = False, ) -> aiormq.spec.Exchange.DeleteOk: channel = await self.get_underlay_channel() return await channel.exchange_delete( exchange=exchange_name, if_unused=if_unused, nowait=nowait, timeout=timeout, ) def transaction(self) -> Transaction: if self.publisher_confirms: raise RuntimeError( "Cannot create transaction when publisher " "confirms are enabled", ) return Transaction(self) async def flow(self, active: bool = True) -> aiormq.spec.Channel.FlowOk: channel = await self.get_underlay_channel() return await channel.flow(active=active) __all__ = ("Channel",) python-aio-pika-9.5.5/aio_pika/connection.py000066400000000000000000000264011476164671100210500ustar00rootroot00000000000000import asyncio from ssl import SSLContext from types import TracebackType from typing import ( Any, Awaitable, Dict, Literal, Optional, Tuple, Type, TypeVar, Union ) import aiormq.abc from aiormq.connection import parse_int from pamqp.common import FieldTable from yarl import URL from .abc import ( AbstractChannel, AbstractConnection, ConnectionParameter, SSLOptions, TimeoutType, UnderlayConnection, ) from .channel import Channel from .exceptions import ConnectionClosed from .log import get_logger from .tools import CallbackCollection log = get_logger(__name__) T = TypeVar("T") class Connection(AbstractConnection): """ Connection abstraction """ CHANNEL_CLASS: Type[Channel] = Channel PARAMETERS: Tuple[ConnectionParameter, ...] = ( ConnectionParameter( name="interleave", parser=parse_int, is_kwarg=True, ), ConnectionParameter( name="happy_eyeballs_delay", parser=float, is_kwarg=True, ), ) _closed: asyncio.Future @property def is_closed(self) -> bool: return self._closed.done() async def close( self, exc: Optional[aiormq.abc.ExceptionType] = ConnectionClosed, ) -> None: transport, self.transport = self.transport, None self._close_called = True if not transport: return await transport.close(exc) if not self._closed.done(): self._closed.set_result(True) def closed(self) -> Awaitable[Literal[True]]: return self._closed @classmethod def _parse_parameters(cls, kwargs: Dict[str, Any]) -> Dict[str, Any]: result = {} for parameter in cls.PARAMETERS: value = kwargs.get(parameter.name, parameter.default) if parameter.is_kwarg and value is None: # skip optional value continue result[parameter.name] = parameter.parse(value) return result def __init__( self, url: URL, loop: Optional[asyncio.AbstractEventLoop] = None, ssl_context: Optional[SSLContext] = None, **kwargs: Any, ): self.loop = loop or asyncio.get_event_loop() self.transport = None self._closed = self.loop.create_future() self._close_called = False self.url = URL(url) self.kwargs: Dict[str, Any] = self._parse_parameters( kwargs or dict(self.url.query), ) self.kwargs["context"] = ssl_context self.close_callbacks = CallbackCollection(self) self.connected: asyncio.Event = asyncio.Event() def __str__(self) -> str: url = self.url if url.password: url = url.with_password("******") return str(url) def __repr__(self) -> str: return f'<{self.__class__.__name__}: "{self}">' async def _on_connection_close(self, closing: asyncio.Future) -> None: try: exc = closing.exception() except asyncio.CancelledError as e: exc = e self.connected.clear() await self.close_callbacks(exc) async def _on_connected(self) -> None: self.connected.set() async def connect(self, timeout: TimeoutType = None) -> None: """ Connect to AMQP server. This method should be called after :func:`aio_pika.connection.Connection.__init__` .. note:: This method is called by :func:`connect`. You shouldn't call it explicitly. """ self.transport = await UnderlayConnection.connect( self.url, self._on_connection_close, timeout=timeout, **self.kwargs, ) await self._on_connected() def channel( self, channel_number: Optional[int] = None, publisher_confirms: bool = True, on_return_raises: bool = False, ) -> AbstractChannel: """ Coroutine which returns new instance of :class:`Channel`. Example: .. code-block:: python import aio_pika async def main(loop): connection = await aio_pika.connect( "amqp://guest:guest@127.0.0.1/" ) channel1 = connection.channel() await channel1.close() # Creates channel with specific channel number channel42 = connection.channel(42) await channel42.close() # For working with transactions channel_no_confirms = await connection.channel( publisher_confirms=False ) await channel_no_confirms.close() Also available as an asynchronous context manager: .. code-block:: python import aio_pika async def main(loop): connection = await aio_pika.connect( "amqp://guest:guest@127.0.0.1/" ) async with connection.channel() as channel: # channel is open and available # channel is now closed :param channel_number: specify the channel number explicit :param publisher_confirms: if `True` the :func:`aio_pika.Exchange.publish` method will be return :class:`bool` after publish is complete. Otherwise the :func:`aio_pika.Exchange.publish` method will be return :class:`None` :param on_return_raises: raise an :class:`aio_pika.exceptions.DeliveryError` when mandatory message will be returned """ if not self.transport: raise RuntimeError("Connection was not opened") log.debug("Creating AMQP channel for connection: %r", self) channel = self.CHANNEL_CLASS( connection=self, channel_number=channel_number, publisher_confirms=publisher_confirms, on_return_raises=on_return_raises, ) log.debug("Channel created: %r", channel) return channel async def ready(self) -> None: await self.connected.wait() def __del__(self) -> None: if ( self.is_closed or self.loop.is_closed() ): return asyncio.ensure_future(self.close()) async def __aenter__(self) -> "Connection": return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: await self.close() async def update_secret( self, new_secret: str, *, reason: str = "", timeout: TimeoutType = None, ) -> aiormq.spec.Connection.UpdateSecretOk: if self.transport is None: raise RuntimeError("Connection is not ready") result = await self.transport.connection.update_secret( new_secret=new_secret, reason=reason, timeout=timeout, ) self.url = self.url.with_password(new_secret) return result def make_url( url: Union[str, URL, None] = None, *, host: str = "localhost", port: int = 5672, login: str = "guest", password: str = "guest", virtualhost: str = "/", ssl: bool = False, ssl_options: Optional[SSLOptions] = None, client_properties: Optional[FieldTable] = None, **kwargs: Any, ) -> URL: if url is not None: if not isinstance(url, URL): return URL(url) return url kw = kwargs kw.update(ssl_options or {}) kw.update(client_properties or {}) # sanitize keywords kw = {k: v for k, v in kw.items() if v is not None} return URL.build( scheme="amqps" if ssl else "amqp", host=host, port=port, user=login, password=password, # yarl >= 1.3.0 requires path beginning with slash path="/" + virtualhost, query=kw, ) async def connect( url: Union[str, URL, None] = None, *, host: str = "localhost", port: int = 5672, login: str = "guest", password: str = "guest", virtualhost: str = "/", ssl: bool = False, loop: Optional[asyncio.AbstractEventLoop] = None, ssl_options: Optional[SSLOptions] = None, ssl_context: Optional[SSLContext] = None, timeout: TimeoutType = None, client_properties: Optional[FieldTable] = None, connection_class: Type[AbstractConnection] = Connection, **kwargs: Any, ) -> AbstractConnection: """ Make connection to the broker. Example: .. code-block:: python import aio_pika async def main(): connection = await aio_pika.connect( "amqp://guest:guest@127.0.0.1/" ) Connect to localhost with default credentials: .. code-block:: python import aio_pika async def main(): connection = await aio_pika.connect() .. note:: The available keys for ssl_options parameter are: * cert_reqs * certfile * keyfile * ssl_version For an information on what the ssl_options can be set to reference the `official Python documentation`_ . Set connection name for RabbitMQ admin panel: .. code-block:: python # As URL parameter method read_connection = await connect( "amqp://guest:guest@localhost/?name=Read%20connection" ) write_connection = await connect( client_properties={ 'connection_name': 'Write connection' } ) .. note: ``client_properties`` argument requires ``aiormq>=2.9`` URL string might be containing ssl parameters e.g. `amqps://user:pass@host//?ca_certs=ca.pem&certfile=crt.pem&keyfile=key.pem` :param client_properties: add custom client capability. :param url: RFC3986_ formatted broker address. When :class:`None` will be used keyword arguments. :param host: hostname of the broker :param port: broker port 5672 by default :param login: username string. `'guest'` by default. :param password: password string. `'guest'` by default. :param virtualhost: virtualhost parameter. `'/'` by default :param ssl: use SSL for connection. Should be used with addition kwargs. :param ssl_options: A dict of values for the SSL connection. :param timeout: connection timeout in seconds :param loop: Event loop (:func:`asyncio.get_event_loop()` when :class:`None`) :param ssl_context: ssl.SSLContext instance :param connection_class: Factory of a new connection :param kwargs: addition parameters which will be passed to the connection. :return: :class:`aio_pika.connection.Connection` .. _RFC3986: https://goo.gl/MzgYAs .. _official Python documentation: https://goo.gl/pty9xA """ connection: AbstractConnection = connection_class( make_url( url, host=host, port=port, login=login, password=password, virtualhost=virtualhost, ssl=ssl, ssl_options=ssl_options, client_properties=client_properties, **kwargs, ), loop=loop, ssl_context=ssl_context, **kwargs, ) await connection.connect(timeout=timeout) return connection __all__ = ("Connection", "connect", "make_url") python-aio-pika-9.5.5/aio_pika/exceptions.py000066400000000000000000000024301476164671100210660ustar00rootroot00000000000000import asyncio import pamqp.exceptions from aiormq.exceptions import ( AMQPChannelError, AMQPConnectionError, AMQPError, AMQPException, AuthenticationError, ChannelClosed, ChannelInvalidStateError, ChannelNotFoundEntity, ChannelPreconditionFailed, ConnectionClosed, DeliveryError, DuplicateConsumerTag, IncompatibleProtocolError, InvalidFrameError, MethodNotImplemented, ProbableAuthenticationError, ProtocolSyntaxError, PublishError, ) CONNECTION_EXCEPTIONS = ( AMQPError, ConnectionError, OSError, RuntimeError, StopAsyncIteration, pamqp.exceptions.PAMQPException, ) class MessageProcessError(AMQPError): reason = "%s: %r" class QueueEmpty(AMQPError, asyncio.QueueEmpty): pass __all__ = ( "AMQPChannelError", "AMQPConnectionError", "AMQPError", "AMQPException", "AuthenticationError", "CONNECTION_EXCEPTIONS", "ChannelClosed", "ChannelInvalidStateError", "ChannelNotFoundEntity", "ChannelPreconditionFailed", "ConnectionClosed", "DeliveryError", "DuplicateConsumerTag", "IncompatibleProtocolError", "InvalidFrameError", "MessageProcessError", "MethodNotImplemented", "ProbableAuthenticationError", "ProtocolSyntaxError", "PublishError", "QueueEmpty", ) python-aio-pika-9.5.5/aio_pika/exchange.py000066400000000000000000000153461476164671100205010ustar00rootroot00000000000000from typing import Optional, Union import aiormq from pamqp.common import Arguments from .abc import ( AbstractChannel, AbstractExchange, AbstractMessage, ExchangeParamType, ExchangeType, TimeoutType, get_exchange_name, ) from .log import get_logger log = get_logger(__name__) class Exchange(AbstractExchange): """ Exchange abstraction """ channel: AbstractChannel def __init__( self, channel: AbstractChannel, name: str, type: Union[ExchangeType, str] = ExchangeType.DIRECT, *, auto_delete: bool = False, durable: bool = False, internal: bool = False, passive: bool = False, arguments: Arguments = None, ): self._type = type.value if isinstance(type, ExchangeType) else type self.channel = channel self.name = name self.auto_delete = auto_delete self.durable = durable self.internal = internal self.passive = passive self.arguments = arguments or {} def __str__(self) -> str: return self.name def __repr__(self) -> str: return ( f"<{self.__class__.__name__}({self}):" f" auto_delete={self.auto_delete}," f" durable={self.durable}," f" arguments={self.arguments!r})>" ) async def declare( self, timeout: TimeoutType = None, ) -> aiormq.spec.Exchange.DeclareOk: channel = await self.channel.get_underlay_channel() return await channel.exchange_declare( self.name, exchange_type=self._type, durable=self.durable, auto_delete=self.auto_delete, internal=self.internal, passive=self.passive, arguments=self.arguments, timeout=timeout, ) async def bind( self, exchange: ExchangeParamType, routing_key: str = "", *, arguments: Arguments = None, timeout: TimeoutType = None, ) -> aiormq.spec.Exchange.BindOk: """ A binding can also be a relationship between two exchanges. This can be simply read as: this exchange is interested in messages from another exchange. Bindings can take an extra routing_key parameter. To avoid the confusion with a basic_publish parameter we're going to call it a binding key. .. code-block:: python client = await connect() routing_key = 'simple_routing_key' src_exchange_name = "source_exchange" dest_exchange_name = "destination_exchange" channel = await client.channel() src_exchange = await channel.declare_exchange( src_exchange_name, auto_delete=True ) dest_exchange = await channel.declare_exchange( dest_exchange_name, auto_delete=True ) queue = await channel.declare_queue(auto_delete=True) await queue.bind(dest_exchange, routing_key) await dest_exchange.bind(src_exchange, routing_key) :param exchange: :class:`aio_pika.exchange.Exchange` instance :param routing_key: routing key :param arguments: additional arguments :param timeout: execution timeout :return: :class:`None` """ log.debug( "Binding exchange %r to exchange %r, routing_key=%r, arguments=%r", self, exchange, routing_key, arguments, ) channel = await self.channel.get_underlay_channel() return await channel.exchange_bind( arguments=arguments, destination=self.name, routing_key=routing_key, source=get_exchange_name(exchange), timeout=timeout, ) async def unbind( self, exchange: ExchangeParamType, routing_key: str = "", arguments: Arguments = None, timeout: TimeoutType = None, ) -> aiormq.spec.Exchange.UnbindOk: """ Remove exchange-to-exchange binding for this :class:`Exchange` instance :param exchange: :class:`aio_pika.exchange.Exchange` instance :param routing_key: routing key :param arguments: additional arguments :param timeout: execution timeout :return: :class:`None` """ log.debug( "Unbinding exchange %r from exchange %r, " "routing_key=%r, arguments=%r", self, exchange, routing_key, arguments, ) channel = await self.channel.get_underlay_channel() return await channel.exchange_unbind( arguments=arguments, destination=self.name, routing_key=routing_key, source=get_exchange_name(exchange), timeout=timeout, ) async def publish( self, message: AbstractMessage, routing_key: str, *, mandatory: bool = True, immediate: bool = False, timeout: TimeoutType = None, ) -> Optional[aiormq.abc.ConfirmationFrameType]: """ Publish the message to the queue. `aio-pika` uses `publisher confirms`_ extension for message delivery. .. _publisher confirms: https://www.rabbitmq.com/confirms.html """ log.debug( "Publishing message with routing key %r via exchange %r: %r", routing_key, self, message, ) if self.internal: # Caught on the client side to prevent channel closure raise ValueError( f"Can not publish to internal exchange: '{self.name}'!", ) if self.channel.is_closed: raise aiormq.exceptions.ChannelInvalidStateError( "%r closed" % self.channel, ) channel = await self.channel.get_underlay_channel() return await channel.basic_publish( exchange=self.name, routing_key=routing_key, body=message.body, properties=message.properties, mandatory=mandatory, immediate=immediate, timeout=timeout, ) async def delete( self, if_unused: bool = False, timeout: TimeoutType = None, ) -> aiormq.spec.Exchange.DeleteOk: """ Delete the queue :param timeout: operation timeout :param if_unused: perform deletion when queue has no bindings. """ log.info("Deleting %r", self) channel = await self.channel.get_underlay_channel() result = await channel.exchange_delete( self.name, if_unused=if_unused, timeout=timeout, ) del self.channel return result __all__ = ("Exchange", "ExchangeType", "ExchangeParamType") python-aio-pika-9.5.5/aio_pika/log.py000066400000000000000000000003661476164671100174740ustar00rootroot00000000000000import logging logger: logging.Logger = logging.getLogger("aio_pika") def get_logger(name: str) -> logging.Logger: package, module = name.split(".", 1) if package == logger.name: name = module return logger.getChild(name) python-aio-pika-9.5.5/aio_pika/message.py000066400000000000000000000450741476164671100203440ustar00rootroot00000000000000import warnings from datetime import datetime, timedelta, timezone from functools import singledispatch from pprint import pformat from types import TracebackType from typing import Any, Callable, Iterator, Optional, Type, TypeVar, Union import aiormq from aiormq.abc import DeliveredMessage from pamqp.common import FieldValue from .abc import ( MILLISECONDS, AbstractChannel, AbstractIncomingMessage, AbstractMessage, AbstractProcessContext, DateType, DeliveryMode, HeadersType, MessageInfo, NoneType, ) from .exceptions import ChannelInvalidStateError, MessageProcessError from .log import get_logger log = get_logger(__name__) def to_milliseconds(seconds: Union[float, int]) -> int: return int(seconds * MILLISECONDS) @singledispatch def encode_expiration(value: Any) -> Optional[str]: raise ValueError("Invalid timestamp type: %r" % type(value), value) @encode_expiration.register(datetime) def encode_expiration_datetime(value: datetime) -> str: now = datetime.now(tz=value.tzinfo) return str(to_milliseconds((value - now).total_seconds())) @encode_expiration.register(int) @encode_expiration.register(float) def encode_expiration_number(value: Union[int, float]) -> str: return str(to_milliseconds(value)) @encode_expiration.register(timedelta) def encode_expiration_timedelta(value: timedelta) -> str: return str(int(value.total_seconds() * MILLISECONDS)) @encode_expiration.register(NoneType) def encode_expiration_none(_: Any) -> None: return None @singledispatch def decode_expiration(t: Any) -> Optional[float]: raise ValueError("Invalid expiration type: %r" % type(t), t) @decode_expiration.register(str) def decode_expiration_str(t: str) -> float: return float(t) / MILLISECONDS @decode_expiration.register(NoneType) def decode_expiration_none(_: Any) -> None: return None @singledispatch def encode_timestamp(value: Any) -> Optional[datetime]: raise ValueError("Invalid timestamp type: %r" % type(value), value) @encode_timestamp.register(datetime) def encode_timestamp_datetime(value: datetime) -> datetime: return value @encode_timestamp.register(float) @encode_timestamp.register(int) def encode_timestamp_number(value: Union[int, float]) -> datetime: return datetime.fromtimestamp(value, tz=timezone.utc) @encode_timestamp.register(timedelta) def encode_timestamp_timedelta(value: timedelta) -> datetime: return datetime.now(tz=timezone.utc) + value @encode_timestamp.register(NoneType) def encode_timestamp_none(_: Any) -> None: return None @singledispatch def decode_timestamp(value: Any) -> Optional[datetime]: raise ValueError("Invalid timestamp type: %r" % type(value), value) @decode_timestamp.register(datetime) def decode_timestamp_datetime(value: datetime) -> datetime: return value @decode_timestamp.register(NoneType) def decode_timestamp_none(_: Any) -> None: return None V = TypeVar("V") D = TypeVar("D") T = TypeVar("T") def optional( value: V, func: Union[Callable[[V], T], Type[T]], default: Optional[D] = None, ) -> Union[T, D]: return func(value) if value else default # type: ignore class Message(AbstractMessage): """ AMQP message abstraction """ __slots__ = ( "app_id", "body", "body_size", "content_encoding", "content_type", "correlation_id", "delivery_mode", "expiration", "_headers", "headers", "message_id", "priority", "reply_to", "timestamp", "type", "user_id", "__lock", ) def __init__( self, body: bytes, *, headers: Optional[HeadersType] = None, content_type: Optional[str] = None, content_encoding: Optional[str] = None, delivery_mode: Union[DeliveryMode, int, None] = None, priority: Optional[int] = None, correlation_id: Optional[str] = None, reply_to: Optional[str] = None, expiration: Optional[DateType] = None, message_id: Optional[str] = None, timestamp: Optional[DateType] = None, type: Optional[str] = None, user_id: Optional[str] = None, app_id: Optional[str] = None, ): """ Creates a new instance of Message :param body: message body :param headers: message headers :param content_type: content type :param content_encoding: content encoding :param delivery_mode: delivery mode :param priority: priority :param correlation_id: correlation id :param reply_to: reply to :param expiration: expiration in seconds (or datetime or timedelta) :param message_id: message id :param timestamp: timestamp :param type: type :param user_id: user id :param app_id: app id """ self.__lock = False self.body = body if isinstance(body, bytes) else bytes(body) self.body_size = len(self.body) if self.body else 0 self.headers: HeadersType = headers or {} self.content_type = content_type self.content_encoding = content_encoding self.delivery_mode: DeliveryMode = DeliveryMode( optional( delivery_mode, int, DeliveryMode.NOT_PERSISTENT, ), ) self.priority = optional(priority, int, 0) self.correlation_id = optional(correlation_id, str) self.reply_to = optional(reply_to, str) self.expiration = expiration self.message_id = optional(message_id, str) self.timestamp = encode_timestamp(timestamp) self.type = optional(type, str) self.user_id = optional(user_id, str) self.app_id = optional(app_id, str) @property def headers_raw(self) -> HeadersType: warnings.warn( f"{self.__class__.__name__}.headers_raw deprecated, please use " f"{self.__class__.__name__}.headers instead.", DeprecationWarning, ) return self.headers @staticmethod def _as_bytes(value: Any) -> bytes: if isinstance(value, bytes): return value elif isinstance(value, str): return value.encode() elif value is None: return b"" else: return str(value).encode() def info(self) -> MessageInfo: return MessageInfo( app_id=self.app_id, body_size=self.body_size, cluster_id=None, consumer_tag=None, content_encoding=self.content_encoding, content_type=self.content_type, correlation_id=self.correlation_id, delivery_mode=self.delivery_mode, delivery_tag=None, exchange=None, expiration=self.expiration, headers=self.headers, message_id=self.message_id, priority=self.priority, redelivered=None, reply_to=self.reply_to, routing_key=None, timestamp=decode_timestamp(self.timestamp), type=str(self.type), user_id=self.user_id, ) @property def locked(self) -> bool: """ is message locked :return: :class:`bool` """ return bool(self.__lock) @property def properties(self) -> aiormq.spec.Basic.Properties: """ Build :class:`aiormq.spec.Basic.Properties` object """ return aiormq.spec.Basic.Properties( app_id=self.app_id, content_encoding=self.content_encoding, content_type=self.content_type, correlation_id=self.correlation_id, delivery_mode=self.delivery_mode, expiration=encode_expiration(self.expiration), headers=self.headers, message_id=self.message_id, message_type=self.type, priority=self.priority, reply_to=self.reply_to, timestamp=self.timestamp, user_id=self.user_id, ) def __repr__(self) -> str: return "{name}:{repr}".format( name=self.__class__.__name__, repr=pformat(self.info()), ) def __setattr__(self, key: str, value: FieldValue) -> None: if not key.startswith("_") and self.locked: raise ValueError("Message is locked") return super().__setattr__(key, value) def __iter__(self) -> Iterator[int]: return iter(self.body) def lock(self) -> None: """ Set lock flag to `True`""" self.__lock = True def __copy__(self) -> "Message": return Message( body=self.body, headers=self.headers, content_encoding=self.content_encoding, content_type=self.content_type, delivery_mode=self.delivery_mode, priority=self.priority, correlation_id=self.correlation_id, reply_to=self.reply_to, expiration=self.expiration, message_id=self.message_id, timestamp=self.timestamp, type=self.type, user_id=self.user_id, app_id=self.app_id, ) class IncomingMessage(Message, AbstractIncomingMessage): """ Incoming message is seems like Message but has additional methods for message acknowledgement. Depending on the acknowledgement mode used, RabbitMQ can consider a message to be successfully delivered either immediately after it is sent out (written to a TCP socket) or when an explicit ("manual") client acknowledgement is received. Manually sent acknowledgements can be positive or negative and use one of the following protocol methods: * basic.ack is used for positive acknowledgements * basic.nack is used for negative acknowledgements (note: this is a RabbitMQ extension to AMQP 0-9-1) * basic.reject is used for negative acknowledgements but has one limitation compared to basic.nack Positive acknowledgements simply instruct RabbitMQ to record a message as delivered. Negative acknowledgements with basic.reject have the same effect. The difference is primarily in the semantics: positive acknowledgements assume a message was successfully processed while their negative counterpart suggests that a delivery wasn't processed but still should be deleted. """ __slots__ = ( "_loop", "__channel", "cluster_id", "consumer_tag", "delivery_tag", "exchange", "routing_key", "redelivered", "__no_ack", "__processed", "message_count", ) def __init__(self, message: DeliveredMessage, no_ack: bool = False): """ Create an instance of :class:`IncomingMessage` """ self.__channel = message.channel self.__no_ack = no_ack self.__processed = False super().__init__( body=message.body, content_type=message.header.properties.content_type, content_encoding=message.header.properties.content_encoding, headers=message.header.properties.headers, delivery_mode=message.header.properties.delivery_mode, priority=message.header.properties.priority, correlation_id=message.header.properties.correlation_id, reply_to=message.header.properties.reply_to, expiration=decode_expiration(message.header.properties.expiration), message_id=message.header.properties.message_id, timestamp=decode_timestamp(message.header.properties.timestamp), type=message.header.properties.message_type, user_id=message.header.properties.user_id, app_id=message.header.properties.app_id, ) self.cluster_id = message.header.properties.cluster_id self.consumer_tag = message.consumer_tag self.delivery_tag = message.delivery_tag self.exchange = message.exchange self.message_count = message.message_count self.redelivered = message.redelivered self.routing_key = message.routing_key if no_ack or not self.delivery_tag: self.lock() self.__processed = True @property def channel(self) -> aiormq.abc.AbstractChannel: if self.__channel.is_closed: raise ChannelInvalidStateError return self.__channel def process( self, requeue: bool = False, reject_on_redelivered: bool = False, ignore_processed: bool = False, ) -> AbstractProcessContext: """ Context manager for processing the message >>> async def on_message_received(message: IncomingMessage): ... async with message.process(): ... # When exception will be raised ... # the message will be rejected ... print(message.body) Example with ignore_processed=True >>> async def on_message_received(message: IncomingMessage): ... async with message.process(ignore_processed=True): ... # Now (with ignore_processed=True) you may reject ... # (or ack) message manually too ... if True: # some reasonable condition here ... await message.reject() ... print(message.body) :param requeue: Requeue message when exception. :param reject_on_redelivered: When True message will be rejected only when message was redelivered. :param ignore_processed: Do nothing if message already processed """ return ProcessContext( self, requeue=requeue, reject_on_redelivered=reject_on_redelivered, ignore_processed=ignore_processed, ) async def ack(self, multiple: bool = False) -> None: """ Send basic.ack is used for positive acknowledgements .. note:: This method looks like a blocking-method, but actually it just sends bytes to the socket and doesn't require any responses from the broker. :param multiple: If set to True, the message's delivery tag is treated as "up to and including", so that multiple messages can be acknowledged with a single method. If set to False, the ack refers to a single message. :return: None """ if self.__no_ack: raise TypeError('Can\'t ack message with "no_ack" flag') if self.__processed: raise MessageProcessError("Message already processed", self) if self.delivery_tag is not None: await self.channel.basic_ack( delivery_tag=self.delivery_tag, multiple=multiple, ) self.__processed = True if not self.locked: self.lock() async def reject(self, requeue: bool = False) -> None: """ When `requeue=True` the message will be returned to queue. Otherwise, message will be dropped. .. note:: This method looks like a blocking-method, but actually it just sends bytes to the socket and doesn't require any responses from the broker. :param requeue: bool """ if self.__no_ack: raise TypeError('This message has "no_ack" flag.') if self.__processed: raise MessageProcessError("Message already processed", self) if self.delivery_tag is not None: await self.channel.basic_reject( delivery_tag=self.delivery_tag, requeue=requeue, ) self.__processed = True if not self.locked: self.lock() async def nack( self, multiple: bool = False, requeue: bool = True, ) -> None: if not self.channel.connection.basic_nack: raise RuntimeError("Method not supported on server") if self.__no_ack: raise TypeError('Can\'t nack message with "no_ack" flag') if self.__processed: raise MessageProcessError("Message already processed", self) if self.delivery_tag is not None: await self.channel.basic_nack( delivery_tag=self.delivery_tag, multiple=multiple, requeue=requeue, ) self.__processed = True if not self.locked: self.lock() def info(self) -> MessageInfo: """ Method returns dict representation of the message """ info = super().info() info["cluster_id"] = self.cluster_id info["consumer_tag"] = self.consumer_tag info["delivery_tag"] = self.delivery_tag info["exchange"] = self.exchange info["redelivered"] = self.redelivered info["routing_key"] = self.routing_key return info @property def processed(self) -> bool: return self.__processed class ReturnedMessage(IncomingMessage): pass ReturnCallback = Callable[[AbstractChannel, ReturnedMessage], Any] class ProcessContext(AbstractProcessContext): def __init__( self, message: IncomingMessage, *, requeue: bool, reject_on_redelivered: bool, ignore_processed: bool, ): self.message = message self.requeue = requeue self.reject_on_redelivered = reject_on_redelivered self.ignore_processed = ignore_processed async def __aenter__(self) -> IncomingMessage: return self.message async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: if not exc_type: if not self.ignore_processed or not self.message.processed: await self.message.ack() return if not self.ignore_processed or not self.message.processed: if self.reject_on_redelivered and self.message.redelivered: if not self.message.channel.is_closed: log.info( "Message %r was redelivered and will be rejected", self.message, ) await self.message.reject(requeue=False) return log.warning( "Message %r was redelivered and reject is not sent " "since channel is closed", self.message, ) else: if not self.message.channel.is_closed: await self.message.reject(requeue=self.requeue) return log.warning("Reject is not sent since channel is closed") __all__ = "Message", "IncomingMessage", "ReturnedMessage", python-aio-pika-9.5.5/aio_pika/patterns/000077500000000000000000000000001476164671100201745ustar00rootroot00000000000000python-aio-pika-9.5.5/aio_pika/patterns/__init__.py000066400000000000000000000003511476164671100223040ustar00rootroot00000000000000from .master import JsonMaster, Master, NackMessage, RejectMessage, Worker from .rpc import RPC, JsonRPC __all__ = ( "Master", "NackMessage", "RejectMessage", "RPC", "Worker", "JsonMaster", "JsonRPC", ) python-aio-pika-9.5.5/aio_pika/patterns/base.py000066400000000000000000000026471476164671100214710ustar00rootroot00000000000000import pickle from typing import Any, Awaitable, Callable, TypeVar T = TypeVar("T") CallbackType = Callable[..., Awaitable[T]] class Method: __slots__ = ( "name", "func", ) def __init__(self, name: str, func: Callable[..., Any]): self.name = name self.func = func def __getattr__(self, item: str) -> "Method": return Method(".".join((self.name, item)), func=self.func) def __call__(self, **kwargs: Any) -> Any: return self.func(self.name, kwargs=kwargs) class Proxy: __slots__ = ("func",) def __init__(self, func: Callable[..., Any]): self.func = func def __getattr__(self, item: str) -> Method: return Method(item, self.func) class Base: __slots__ = () SERIALIZER = pickle CONTENT_TYPE = "application/python-pickle" def serialize(self, data: Any) -> bytes: """ Serialize data to the bytes. Uses `pickle` by default. You should overlap this method when you want to change serializer :param data: Data which will be serialized """ return self.SERIALIZER.dumps(data) def deserialize(self, data: bytes) -> Any: """ Deserialize data from bytes. Uses `pickle` by default. You should overlap this method when you want to change serializer :param data: Data which will be deserialized """ return self.SERIALIZER.loads(data) python-aio-pika-9.5.5/aio_pika/patterns/master.py000066400000000000000000000141671476164671100220520ustar00rootroot00000000000000import asyncio import gzip import json import logging from functools import partial from types import MappingProxyType from typing import Any, Awaitable, Mapping, Optional import aiormq from aio_pika.abc import ( AbstractChannel, AbstractExchange, AbstractIncomingMessage, AbstractQueue, ConsumerTag, DeliveryMode, ) from aio_pika.message import Message from ..tools import create_task, ensure_awaitable from .base import Base, CallbackType, Proxy, T log = logging.getLogger(__name__) class MessageProcessingError(Exception): pass class NackMessage(MessageProcessingError): def __init__(self, requeue: bool = False): self.requeue = requeue class RejectMessage(MessageProcessingError): def __init__(self, requeue: bool = False): self.requeue = requeue class Worker: __slots__ = ( "queue", "consumer_tag", "loop", ) def __init__( self, queue: AbstractQueue, consumer_tag: ConsumerTag, loop: asyncio.AbstractEventLoop, ): self.queue = queue self.consumer_tag = consumer_tag self.loop = loop def close(self) -> Awaitable[None]: """ Cancel subscription to the channel :return: :class:`asyncio.Task` """ async def closer() -> None: await self.queue.cancel(self.consumer_tag) return create_task(closer) class Master(Base): __slots__ = ( "channel", "loop", "proxy", "_requeue", "_reject_on_redelivered", ) DELIVERY_MODE = DeliveryMode.PERSISTENT __doc__ = """ Implements Master/Worker pattern. Usage example: `worker.py` :: master = Master(channel) worker = await master.create_worker('test_worker', lambda x: print(x)) `master.py` :: master = Master(channel) await master.proxy.test_worker('foo') """ def __init__( self, channel: AbstractChannel, requeue: bool = True, reject_on_redelivered: bool = False, ): """ Creates a new :class:`Master` instance. :param channel: Initialized instance of :class:`aio_pika.Channel` """ self.channel: AbstractChannel = channel self.loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() self.proxy = Proxy(self.create_task) self.channel.return_callbacks.add(self.on_message_returned) self._requeue = requeue self._reject_on_redelivered = reject_on_redelivered @property def exchange(self) -> AbstractExchange: return self.channel.default_exchange @staticmethod def on_message_returned( channel: Optional[AbstractChannel], message: AbstractIncomingMessage, ) -> None: log.warning( "Message returned. Probably destination queue does not exists: %r", message, ) def serialize(self, data: Any) -> bytes: """ Serialize data to the bytes. Uses `pickle` by default. You should overlap this method when you want to change serializer :param data: Data which will be serialized :returns: bytes """ return super().serialize(data) def deserialize(self, data: bytes) -> Any: """ Deserialize data from bytes. Uses `pickle` by default. You should overlap this method when you want to change serializer :param data: Data which will be deserialized :returns: :class:`Any` """ return super().deserialize(data) @classmethod async def execute( cls, func: CallbackType, kwargs: Any, ) -> T: kwargs = kwargs or {} if not isinstance(kwargs, dict): logging.error("Bad kwargs %r received for the %r", kwargs, func) raise RejectMessage(requeue=False) return await func(**kwargs) async def on_message( self, func: CallbackType, message: AbstractIncomingMessage, ) -> None: async with message.process( requeue=self._requeue, reject_on_redelivered=self._reject_on_redelivered, ignore_processed=True, ): try: await self.execute(func, self.deserialize(message.body)) except RejectMessage as e: await message.reject(requeue=e.requeue) except NackMessage as e: await message.nack(requeue=e.requeue) async def create_queue( self, queue_name: str, **kwargs: Any, ) -> AbstractQueue: return await self.channel.declare_queue(queue_name, **kwargs) async def create_worker( self, queue_name: str, func: CallbackType, **kwargs: Any, ) -> Worker: """ Creates a new :class:`Worker` instance. """ queue = await self.create_queue(queue_name, **kwargs) consumer_tag = await queue.consume( partial(self.on_message, ensure_awaitable(func)), ) return Worker(queue, consumer_tag, self.loop) async def create_task( self, channel_name: str, kwargs: Mapping[str, Any] = MappingProxyType({}), **message_kwargs: Any, ) -> Optional[aiormq.abc.ConfirmationFrameType]: """ Creates a new task for the worker """ message = Message( body=self.serialize(kwargs), content_type=self.CONTENT_TYPE, delivery_mode=self.DELIVERY_MODE, **message_kwargs, ) return await self.exchange.publish( message, channel_name, mandatory=True, ) class JsonMaster(Master): SERIALIZER = json CONTENT_TYPE = "application/json" def serialize(self, data: Any) -> bytes: return self.SERIALIZER.dumps(data, ensure_ascii=False).encode() class CompressedJsonMaster(Master): SERIALIZER = json CONTENT_TYPE = "application/json;compression=gzip" COMPRESS_LEVEL = 6 def serialize(self, data: Any) -> bytes: return gzip.compress( self.SERIALIZER.dumps(data, ensure_ascii=False).encode(), compresslevel=self.COMPRESS_LEVEL, ) def deserialize(self, data: bytes) -> Any: return self.SERIALIZER.loads(gzip.decompress(data)) python-aio-pika-9.5.5/aio_pika/patterns/rpc.py000066400000000000000000000370701476164671100213410ustar00rootroot00000000000000import asyncio import json import logging import time import uuid from enum import Enum from functools import partial from typing import Any, Callable, Dict, Optional, Tuple from aiormq.abc import ExceptionType from aio_pika.abc import ( AbstractChannel, AbstractExchange, AbstractIncomingMessage, AbstractQueue, ConsumerTag, DeliveryMode, ) from aio_pika.exceptions import MessageProcessError from aio_pika.exchange import ExchangeType from aio_pika.message import IncomingMessage, Message from ..tools import ensure_awaitable from .base import Base, CallbackType, Proxy, T log = logging.getLogger(__name__) class RPCException(RuntimeError): pass class RPCMessageType(str, Enum): ERROR = "error" RESULT = "result" CALL = "call" # This needed only for migration from 6.x to 7.x # TODO: Remove this in 8.x release RPCMessageTypes = RPCMessageType # noqa class RPC(Base): __slots__ = ( "channel", "loop", "proxy", "futures", "result_queue", "result_consumer_tag", "routes", "queues", "consumer_tags", "dlx_exchange", "rpc_exchange", "host_exceptions", ) DLX_NAME = "rpc.dlx" DELIVERY_MODE = DeliveryMode.NOT_PERSISTENT __doc__ = """ Remote Procedure Call helper. Create an instance :: rpc = await RPC.create(channel, host_exceptions=False) Registering python function :: # RPC instance passes only keyword arguments def multiply(*, x, y): return x * y await rpc.register("multiply", multiply) Call function through proxy :: assert await rpc.proxy.multiply(x=2, y=3) == 6 Call function explicit :: assert await rpc.call('multiply', dict(x=2, y=3)) == 6 Show exceptions on remote side :: rpc = await RPC.create(channel, host_exceptions=True) """ result_queue: AbstractQueue result_consumer_tag: ConsumerTag dlx_exchange: AbstractExchange rpc_exchange: Optional[AbstractExchange] def __init__( self, channel: AbstractChannel, host_exceptions: bool = False, ) -> None: self.channel = channel self.loop = asyncio.get_event_loop() self.proxy = Proxy(self.call) self.futures: Dict[str, asyncio.Future] = {} self.routes: Dict[str, Callable[..., Any]] = {} self.queues: Dict[Callable[..., Any], AbstractQueue] = {} self.consumer_tags: Dict[Callable[..., Any], ConsumerTag] = {} self.host_exceptions = host_exceptions def __remove_future( self, correlation_id: str, ) -> Callable[[asyncio.Future], None]: def do_remove(future: asyncio.Future) -> None: log.debug("Remove done future %r", future) self.futures.pop(correlation_id, None) return do_remove def create_future(self) -> Tuple[asyncio.Future, str]: future = self.loop.create_future() log.debug("Create future for RPC call") correlation_id = str(uuid.uuid4()) self.futures[correlation_id] = future future.add_done_callback(self.__remove_future(correlation_id)) return future, correlation_id def _format_routing_key(self, method_name: str) -> str: return ( f"{self.rpc_exchange.name}::{method_name}" if self.rpc_exchange else method_name ) async def close(self) -> None: if not hasattr(self, "result_queue"): log.warning("RPC already closed") return log.debug("Cancelling listening %r", self.result_queue) await self.result_queue.cancel(self.result_consumer_tag) del self.result_consumer_tag log.debug("Unbinding %r", self.result_queue) await self.result_queue.unbind( self.dlx_exchange, "", arguments={"From": self.result_queue.name, "x-match": "any"}, ) log.debug("Cancelling undone futures %r", self.futures) for future in self.futures.values(): if future.done(): continue future.set_exception(asyncio.CancelledError) log.debug("Deleting %r", self.result_queue) await self.result_queue.delete() del self.result_queue del self.dlx_exchange if self.rpc_exchange: del self.rpc_exchange async def initialize( self, auto_delete: bool = True, durable: bool = False, exchange: str = "", **kwargs: Any, ) -> None: if hasattr(self, "result_queue"): return self.rpc_exchange = await self.channel.declare_exchange( exchange, type=ExchangeType.DIRECT, auto_delete=True, durable=durable, ) if exchange else None self.result_queue = await self.channel.declare_queue( None, auto_delete=auto_delete, durable=durable, **kwargs, ) self.dlx_exchange = await self.channel.declare_exchange( self.DLX_NAME, type=ExchangeType.HEADERS, auto_delete=True, ) await self.result_queue.bind( self.dlx_exchange, "", arguments={"From": self.result_queue.name, "x-match": "any"}, ) self.result_consumer_tag = await self.result_queue.consume( self.on_result_message, exclusive=True, no_ack=True, ) self.channel.close_callbacks.add(self.on_close) self.channel.return_callbacks.add(self.on_message_returned) def on_close( self, channel: Optional[AbstractChannel], exc: Optional[ExceptionType] = None, ) -> None: log.debug("Closing RPC futures because %r", exc) for future in self.futures.values(): if future.done(): continue future.set_exception(exc or Exception) @classmethod async def create(cls, channel: AbstractChannel, **kwargs: Any) -> "RPC": """ Creates a new instance of :class:`aio_pika.patterns.RPC`. You should use this method instead of :func:`__init__`, because :func:`create` returns coroutine and makes async initialize :param channel: initialized instance of :class:`aio_pika.Channel` :returns: :class:`RPC` """ rpc = cls(channel) await rpc.initialize(**kwargs) return rpc def on_message_returned( self, channel: Optional[AbstractChannel], message: AbstractIncomingMessage, ) -> None: if message.correlation_id is None: log.warning( "Message without correlation_id was returned: %r", message, ) return future = self.futures.pop(message.correlation_id, None) if not future or future.done(): log.warning("Unknown message was returned: %r", message) return future.set_exception( MessageProcessError("Message has been returned", message), ) async def on_result_message(self, message: AbstractIncomingMessage) -> None: if message.correlation_id is None: log.warning( "Message without correlation_id was received: %r", message, ) return future = self.futures.pop(message.correlation_id, None) if future is None: log.warning("Unknown message: %r", message) return try: payload = await self.deserialize_message(message) except Exception as e: log.error("Failed to deserialize response on message: %r", message) future.set_exception(e) return if message.type == RPCMessageType.RESULT.value: future.set_result(payload) elif message.type == RPCMessageType.ERROR.value: if not isinstance(payload, Exception): payload = RPCException("Wrapped non-exception object", payload) future.set_exception(payload) elif message.type == RPCMessageType.CALL.value: future.set_exception( asyncio.TimeoutError("Message timed-out", message), ) else: future.set_exception( RuntimeError("Unknown message type %r" % message.type), ) async def on_call_message( self, method_name: str, message: IncomingMessage, ) -> None: routing_key = self._format_routing_key(method_name) if routing_key not in self.routes: log.warning("Method %r not registered in %r", method_name, self) return try: payload = await self.deserialize_message(message) func = self.routes[routing_key] result: Any = await self.execute(func, payload) message_type = RPCMessageType.RESULT except Exception as e: result = self.serialize_exception(e) message_type = RPCMessageType.ERROR if self.host_exceptions is True: log.exception(e) if not message.reply_to: log.info( 'RPC message without "reply_to" header %r call result ' "will be lost", message, ) await message.ack() return try: result_message = await self.serialize_message( payload=result, message_type=message_type, correlation_id=message.correlation_id, delivery_mode=message.delivery_mode, ) except asyncio.CancelledError: raise except Exception as e: result_message = await self.serialize_message( payload=e, message_type=RPCMessageType.ERROR, correlation_id=message.correlation_id, delivery_mode=message.delivery_mode, ) try: await self.channel.default_exchange.publish( result_message, message.reply_to, mandatory=False, ) except Exception: log.exception("Failed to send reply %r", result_message) await message.reject(requeue=False) return if message_type == RPCMessageType.ERROR.value: await message.ack() return await message.ack() def serialize_exception(self, exception: Exception) -> Any: """ Make python exception serializable """ return exception async def execute(self, func: CallbackType, payload: Dict[str, Any]) -> T: """ Executes rpc call. Might be overlapped. """ return await func(**payload) async def deserialize_message( self, message: AbstractIncomingMessage, ) -> Any: return self.deserialize(message.body) async def serialize_message( self, payload: Any, message_type: RPCMessageType, correlation_id: Optional[str], delivery_mode: DeliveryMode, **kwargs: Any, ) -> Message: return Message( self.serialize(payload), content_type=self.CONTENT_TYPE, correlation_id=correlation_id, delivery_mode=delivery_mode, timestamp=time.time(), type=message_type.value, **kwargs, ) async def call( self, method_name: str, kwargs: Optional[Dict[str, Any]] = None, *, expiration: Optional[int] = None, priority: int = 5, delivery_mode: DeliveryMode = DELIVERY_MODE, ) -> Any: """ Call remote method and awaiting result. :param method_name: Name of method :param kwargs: Methos kwargs :param expiration: If not `None` messages which staying in queue longer will be returned and :class:`asyncio.TimeoutError` will be raised. :param priority: Message priority :param delivery_mode: Call message delivery mode :raises asyncio.TimeoutError: when message expired :raises CancelledError: when called :func:`RPC.cancel` :raises RuntimeError: internal error """ future, correlation_id = self.create_future() message = await self.serialize_message( payload=kwargs or {}, message_type=RPCMessageType.CALL, correlation_id=correlation_id, delivery_mode=delivery_mode, reply_to=self.result_queue.name, headers={"From": self.result_queue.name}, priority=priority, ) if expiration is not None: message.expiration = expiration routing_key = self._format_routing_key(method_name) log.debug("Publishing calls for %s(%r)", routing_key, kwargs) exchange = self.rpc_exchange or self.channel.default_exchange await exchange.publish( message, routing_key=routing_key, mandatory=True, ) log.debug("Waiting RPC result for %s(%r)", routing_key, kwargs) return await future async def register( self, method_name: str, func: CallbackType, **kwargs: Any, ) -> Any: """ Method creates a queue with name which equal of `method_name` argument. Then subscribes this queue. :param method_name: Method name :param func: target function. Function **MUST** accept only keyword arguments. :param kwargs: arguments which will be passed to `queue_declare` :raises RuntimeError: Function already registered in this :class:`RPC` instance or method_name already used. """ arguments = kwargs.pop("arguments", {}) arguments.update({"x-dead-letter-exchange": self.DLX_NAME}) func = ensure_awaitable(func) kwargs["arguments"] = arguments routing_key = self._format_routing_key(method_name) queue = await self.channel.declare_queue(routing_key, **kwargs) if self.rpc_exchange: await queue.bind( self.rpc_exchange, routing_key, ) if func in self.consumer_tags: raise RuntimeError("Function already registered") if routing_key in self.routes: raise RuntimeError( "Method name already used for %r" % self.routes[routing_key], ) self.consumer_tags[func] = await queue.consume( partial(self.on_call_message, method_name), ) self.routes[routing_key] = func self.queues[func] = queue async def unregister(self, func: CallbackType) -> None: """ Cancels subscription to the method-queue. :param func: Function """ if func not in self.consumer_tags: return consumer_tag = self.consumer_tags.pop(func) queue = self.queues.pop(func) await queue.cancel(consumer_tag) self.routes.pop(queue.name) class JsonRPCError(RuntimeError): pass class JsonRPC(RPC): SERIALIZER = json CONTENT_TYPE = "application/json" def serialize(self, data: Any) -> bytes: return self.SERIALIZER.dumps( data, ensure_ascii=False, default=repr, ).encode() def serialize_exception(self, exception: Exception) -> Any: return { "error": { "type": exception.__class__.__name__, "message": repr(exception), "args": exception.args, }, } async def deserialize_message( self, message: AbstractIncomingMessage, ) -> Any: payload = await super().deserialize_message(message) if message.type == RPCMessageType.ERROR: payload = JsonRPCError("RPC exception", payload) return payload __all__ = ( "JsonRPC", "RPC", "RPCException", "RPCMessageType", ) python-aio-pika-9.5.5/aio_pika/pool.py000066400000000000000000000101211476164671100176520ustar00rootroot00000000000000import abc import asyncio from types import TracebackType from typing import ( Any, AsyncContextManager, Awaitable, Callable, Generic, Optional, Set, Tuple, Type, TypeVar, ) from aio_pika.log import get_logger from aio_pika.tools import create_task log = get_logger(__name__) class PoolInstance(abc.ABC): @abc.abstractmethod def close(self) -> Awaitable[None]: raise NotImplementedError T = TypeVar("T") ConstructorType = Callable[ ..., Awaitable[PoolInstance], ] class PoolInvalidStateError(RuntimeError): pass class Pool(Generic[T]): __slots__ = ( "loop", "__max_size", "__items", "__constructor", "__created", "__lock", "__constructor_args", "__item_set", "__closed", ) def __init__( self, constructor: ConstructorType, *args: Any, max_size: Optional[int] = None, loop: Optional[asyncio.AbstractEventLoop] = None, ): self.loop = loop or asyncio.get_event_loop() self.__closed = False self.__constructor: Callable[..., Awaitable[Any]] = constructor self.__constructor_args: Tuple[Any, ...] = args or () self.__created: int = 0 self.__item_set: Set[PoolInstance] = set() self.__items: asyncio.Queue = asyncio.Queue() self.__lock: asyncio.Lock = asyncio.Lock() self.__max_size: Optional[int] = max_size @property def is_closed(self) -> bool: return self.__closed def acquire(self) -> "PoolItemContextManager[T]": if self.__closed: raise PoolInvalidStateError("acquire operation on closed pool") return PoolItemContextManager[T](self) @property def _has_released(self) -> bool: return self.__items.qsize() > 0 @property def _is_overflow(self) -> bool: if self.__max_size: return self.__created >= self.__max_size or self._has_released return self._has_released async def _create_item(self) -> T: if self.__closed: raise PoolInvalidStateError("create item operation on closed pool") async with self.__lock: if self._is_overflow: return await self.__items.get() log.debug("Creating a new instance of %r", self.__constructor) item = await self.__constructor(*self.__constructor_args) self.__created += 1 self.__item_set.add(item) return item async def _get(self) -> T: if self.__closed: raise PoolInvalidStateError("get operation on closed pool") if self._is_overflow: return await self.__items.get() return await self._create_item() def put(self, item: T) -> None: if self.__closed: raise PoolInvalidStateError("put operation on closed pool") self.__items.put_nowait(item) async def close(self) -> None: async with self.__lock: self.__closed = True tasks = [] for item in self.__item_set: tasks.append(create_task(item.close)) if tasks: await asyncio.gather(*tasks, return_exceptions=True) async def __aenter__(self) -> "Pool": return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: if self.__closed: return await asyncio.ensure_future(self.close()) class PoolItemContextManager(Generic[T], AsyncContextManager): __slots__ = "pool", "item" def __init__(self, pool: Pool): self.pool = pool self.item: T async def __aenter__(self) -> T: # noinspection PyProtectedMember self.item = await self.pool._get() return self.item async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: if self.item is not None: self.pool.put(self.item) python-aio-pika-9.5.5/aio_pika/py.typed000066400000000000000000000000011476164671100200220ustar00rootroot00000000000000 python-aio-pika-9.5.5/aio_pika/queue.py000066400000000000000000000461241476164671100200410ustar00rootroot00000000000000import asyncio from asyncio import Future from functools import partial from types import TracebackType from typing import ( Any, Awaitable, Callable, Literal, Optional, Type, cast, overload, ) import aiormq from aiormq.abc import DeliveredMessage from exceptiongroup import ExceptionGroup from pamqp.common import Arguments from .abc import ( AbstractChannel, AbstractIncomingMessage, AbstractQueue, AbstractQueueIterator, ConsumerTag, TimeoutType, get_exchange_name, ) from .exceptions import QueueEmpty from .exchange import ExchangeParamType from .log import get_logger from .message import IncomingMessage from .tools import CallbackCollection, create_task, ensure_awaitable log = get_logger(__name__) async def consumer( callback: Callable[[AbstractIncomingMessage], Any], msg: DeliveredMessage, *, no_ack: bool, ) -> Any: message = IncomingMessage(msg, no_ack=no_ack) return await create_task(callback, message) class Queue(AbstractQueue): """ AMQP queue abstraction """ __slots__ = ( "__weakref__", "__get_lock", "close_callbacks", "channel", "name", "durable", "exclusive", "auto_delete", "arguments", "passive", "declaration_result", ) def __init__( self, channel: AbstractChannel, name: Optional[str], durable: bool, exclusive: bool, auto_delete: bool, arguments: Arguments, passive: bool = False, ): self.__get_lock = asyncio.Lock() self.close_callbacks = CallbackCollection(self) self.channel = channel self.name = name or "" self.durable = durable self.exclusive = exclusive self.auto_delete = auto_delete self.arguments = arguments self.passive = passive def __str__(self) -> str: return f"{self.name}" def __repr__(self) -> str: return ( f"<{self.__class__.__name__}({self}): " f"auto_delete={self.auto_delete}, " f"durable={self.durable}, " f"exclusive={self.exclusive}, " f"arguments={self.arguments!r}" ) async def declare( self, timeout: TimeoutType = None, ) -> aiormq.spec.Queue.DeclareOk: """ Declare queue. :param timeout: execution timeout :return: :class:`None` """ log.debug("Declaring queue: %r", self) channel = await self.channel.get_underlay_channel() self.declaration_result = await channel.queue_declare( queue=self.name, durable=self.durable, exclusive=self.exclusive, auto_delete=self.auto_delete, arguments=self.arguments, passive=self.passive, timeout=timeout, ) if self.declaration_result.queue is not None: self.name = self.declaration_result.queue else: self.name = "" return self.declaration_result async def bind( self, exchange: ExchangeParamType, routing_key: Optional[str] = None, *, arguments: Arguments = None, timeout: TimeoutType = None, ) -> aiormq.spec.Queue.BindOk: """ A binding is a relationship between an exchange and a queue. This can be simply read as: the queue is interested in messages from this exchange. Bindings can take an extra routing_key parameter. To avoid the confusion with a basic_publish parameter we're going to call it a binding key. :param exchange: :class:`aio_pika.exchange.Exchange` instance :param routing_key: routing key :param arguments: additional arguments :param timeout: execution timeout :raises asyncio.TimeoutError: when the binding timeout period has elapsed. :return: :class:`None` """ if routing_key is None: routing_key = self.name log.debug( "Binding queue %r: exchange=%r, routing_key=%r, arguments=%r", self, exchange, routing_key, arguments, ) channel = await self.channel.get_underlay_channel() return await channel.queue_bind( self.name, exchange=get_exchange_name(exchange), routing_key=routing_key, arguments=arguments, timeout=timeout, ) async def unbind( self, exchange: ExchangeParamType, routing_key: Optional[str] = None, arguments: Arguments = None, timeout: TimeoutType = None, ) -> aiormq.spec.Queue.UnbindOk: """ Remove binding from exchange for this :class:`Queue` instance :param exchange: :class:`aio_pika.exchange.Exchange` instance :param routing_key: routing key :param arguments: additional arguments :param timeout: execution timeout :raises asyncio.TimeoutError: when the unbinding timeout period has elapsed. :return: :class:`None` """ if routing_key is None: routing_key = self.name log.debug( "Unbinding queue %r: exchange=%r, routing_key=%r, arguments=%r", self, exchange, routing_key, arguments, ) channel = await self.channel.get_underlay_channel() return await channel.queue_unbind( queue=self.name, exchange=get_exchange_name(exchange), routing_key=routing_key, arguments=arguments, timeout=timeout, ) async def consume( self, callback: Callable[[AbstractIncomingMessage], Awaitable[Any]], no_ack: bool = False, exclusive: bool = False, arguments: Arguments = None, consumer_tag: Optional[ConsumerTag] = None, timeout: TimeoutType = None, ) -> ConsumerTag: """ Start to consuming the :class:`Queue`. :param timeout: :class:`asyncio.TimeoutError` will be raises when the Future was not finished after this time. :param callback: Consuming callback. Should be a coroutine function. :param no_ack: if :class:`True` you don't need to call :func:`aio_pika.message.IncomingMessage.ack` :param exclusive: Makes this queue exclusive. Exclusive queues may only be accessed by the current connection, and are deleted when that connection closes. Passive declaration of an exclusive queue by other connections are not allowed. :param arguments: additional arguments :param consumer_tag: optional consumer tag :raises asyncio.TimeoutError: when the consuming timeout period has elapsed. :return str: consumer tag :class:`str` """ log.debug("Start to consuming queue: %r", self) callback = ensure_awaitable(callback) channel = await self.channel.get_underlay_channel() consume_result = await channel.basic_consume( queue=self.name, consumer_callback=partial( consumer, callback, no_ack=no_ack, ), exclusive=exclusive, no_ack=no_ack, arguments=arguments, consumer_tag=consumer_tag, timeout=timeout, ) # consumer_tag property is Optional[str] in practice this check # should never take place, however, it protects against the case # if the `None` comes from pamqp if consume_result.consumer_tag is None: raise RuntimeError("Consumer tag is None") return consume_result.consumer_tag async def cancel( self, consumer_tag: ConsumerTag, timeout: TimeoutType = None, nowait: bool = False, ) -> aiormq.spec.Basic.CancelOk: """ This method cancels a consumer. This does not affect already delivered messages, but it does mean the server will not send any more messages for that consumer. The client may receive an arbitrary number of messages in between sending the cancel method and receiving the cancel-ok reply. It may also be sent from the server to the client in the event of the consumer being unexpectedly cancelled (i.e. cancelled for any reason other than the server receiving the corresponding basic.cancel from the client). This allows clients to be notified of the loss of consumers due to events such as queue deletion. :param consumer_tag: consumer tag returned by :func:`~aio_pika.Queue.consume` :param timeout: execution timeout :param bool nowait: Do not expect a Basic.CancelOk response :return: Basic.CancelOk when operation completed successfully """ channel = await self.channel.get_underlay_channel() return await channel.basic_cancel( consumer_tag=consumer_tag, nowait=nowait, timeout=timeout, ) @overload async def get( self, *, no_ack: bool = False, fail: Literal[True] = ..., timeout: TimeoutType = ..., ) -> IncomingMessage: ... @overload async def get( self, *, no_ack: bool = False, fail: Literal[False] = ..., timeout: TimeoutType = ..., ) -> Optional[IncomingMessage]: ... async def get( self, *, no_ack: bool = False, fail: bool = True, timeout: TimeoutType = 5, ) -> Optional[IncomingMessage]: """ Get message from the queue. :param no_ack: if :class:`True` you don't need to call :func:`aio_pika.message.IncomingMessage.ack` :param timeout: execution timeout :param fail: Should return :class:`None` instead of raise an exception :class:`aio_pika.exceptions.QueueEmpty`. :return: :class:`aio_pika.message.IncomingMessage` """ channel = await self.channel.get_underlay_channel() msg: DeliveredMessage = await channel.basic_get( self.name, no_ack=no_ack, timeout=timeout, ) if isinstance(msg.delivery, aiormq.spec.Basic.GetEmpty): if fail: raise QueueEmpty return None return IncomingMessage(msg, no_ack=no_ack) async def purge( self, no_wait: bool = False, timeout: TimeoutType = None, ) -> aiormq.spec.Queue.PurgeOk: """ Purge all messages from the queue. :param no_wait: no wait response :param timeout: execution timeout :return: :class:`None` """ log.info("Purging queue: %r", self) channel = await self.channel.get_underlay_channel() return await channel.queue_purge( self.name, nowait=no_wait, timeout=timeout, ) async def delete( self, *, if_unused: bool = True, if_empty: bool = True, timeout: TimeoutType = None, ) -> aiormq.spec.Queue.DeleteOk: """ Delete the queue. :param if_unused: Perform delete only when unused :param if_empty: Perform delete only when empty :param timeout: execution timeout :return: :class:`None` """ log.info("Deleting %r", self) channel = await self.channel.get_underlay_channel() return await channel.queue_delete( self.name, if_unused=if_unused, if_empty=if_empty, timeout=timeout, ) def __aiter__(self) -> "AbstractQueueIterator": return self.iterator() def iterator(self, **kwargs: Any) -> "AbstractQueueIterator": """ Returns an iterator for async for expression. Full example: .. code-block:: python import aio_pika async def main(): connection = await aio_pika.connect() async with connection: channel = await connection.channel() queue = await channel.declare_queue('test') async with queue.iterator() as q: async for message in q: print(message.body) When your program runs with run_forever the iterator will be closed in background. In this case the context processor for iterator might be skipped and the queue might be used in the "async for" expression directly. .. code-block:: python import aio_pika async def main(): connection = await aio_pika.connect() async with connection: channel = await connection.channel() queue = await channel.declare_queue('test') async for message in queue: print(message.body) :return: QueueIterator """ return QueueIterator(self, **kwargs) class QueueIterator(AbstractQueueIterator): DEFAULT_CLOSE_TIMEOUT = 5 @property def consumer_tag(self) -> Optional[ConsumerTag]: return getattr(self, "_consumer_tag", None) async def close(self) -> None: await self._on_close(self._amqp_queue, None) if not self._closed.done(): self._closed.set_result(True) async def _set_closed( self, _channel: Optional[AbstractQueue], exc: Optional[BaseException] ) -> None: if not self._closed.done(): self._closed.set_result(True) async def _on_close( self, _channel: Optional[AbstractQueue], _exc: Optional[BaseException] ) -> None: log.debug("Cancelling queue iterator %r", self) if not hasattr(self, "_consumer_tag"): log.debug("Queue iterator %r already cancelled", self) return if self._amqp_queue.channel.is_closed: log.debug("Queue iterator %r channel closed", self) return log.debug("Basic.cancel for %r", self.consumer_tag) consumer_tag = self._consumer_tag del self._consumer_tag self._amqp_queue.close_callbacks.discard(self._on_close) await self._amqp_queue.cancel(consumer_tag) log.debug("Queue iterator %r closed", self) if self._queue.empty(): return exceptions = [] # Reject all messages that have been received and in the buffer/cache. while not self._queue.empty(): msg = self._queue.get_nowait() if self._amqp_queue.channel.is_closed: log.warning( "Message %r lost when queue iterator %r channel closed", msg, self, ) elif self._consume_kwargs.get("no_ack", False): log.warning( "Message %r lost for consumer with no_ack %r", msg, self, ) else: try: await msg.nack(requeue=True, multiple=False) except Exception as e: log.warning( "Failed to nack message %r", msg, exc_info=e, ) exceptions.append(e) if exceptions: raise ExceptionGroup( "Unable to nack all messages", exceptions, ) def __str__(self) -> str: return f"queue[{self._amqp_queue}](...)" def __repr__(self) -> str: return ( f"<{self.__class__.__name__}: " f"queue={self._amqp_queue.name!r} " f"ctag={self.consumer_tag!r}>" ) def __init__(self, queue: Queue, **kwargs: Any): self._consumer_tag: ConsumerTag self._amqp_queue: Queue = queue self._queue = asyncio.Queue() self._closed = asyncio.get_running_loop().create_future() self._message_or_closed = asyncio.Event() self._timeout_event = asyncio.Event() self._consume_kwargs = kwargs cast( asyncio.Future, self._amqp_queue.channel.closed() ).add_done_callback(self._propagate_closed) self._closed.add_done_callback(self._propagate_closed) self._amqp_queue.close_callbacks.add(self._on_close, weak=True) self._amqp_queue.close_callbacks.add( self._set_closed, weak=True ) def _propagate_closed(self, _: Future) -> None: self._message_or_closed.set() async def on_message(self, message: AbstractIncomingMessage) -> None: await self._queue.put(message) self._message_or_closed.set() async def consume(self) -> None: self._consumer_tag = await self._amqp_queue.consume( self.on_message, **self._consume_kwargs, ) def __aiter__(self) -> "AbstractQueueIterator": return self async def __aenter__(self) -> "AbstractQueueIterator": if not hasattr(self, "_consumer_tag"): await self.consume() return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: if hasattr(self, "__closing"): try: await self.__closing finally: del self.__closing else: await self.close() async def __anext__(self) -> AbstractIncomingMessage: if self._closed.done(): raise StopAsyncIteration if not hasattr(self, "_consumer_tag"): await self.consume() timeout: Optional[float] = self._consume_kwargs.get("timeout") if not self._message_or_closed.is_set(): coroutine: Awaitable[Any] = self._message_or_closed.wait() if timeout is not None and timeout > 0: coroutine = asyncio.wait_for(coroutine, timeout=timeout) try: await coroutine except (asyncio.TimeoutError, asyncio.CancelledError): if timeout is not None: timeout = ( timeout if timeout > 0 else self.DEFAULT_CLOSE_TIMEOUT ) log.info( "%r closing with timeout %d seconds", self, timeout, ) task = asyncio.create_task(self.close()) coroutine = task if timeout is not None: coroutine = asyncio.wait_for( asyncio.shield(coroutine), timeout=timeout, ) try: await coroutine except asyncio.TimeoutError: self.__closing = task raise if self._queue.empty(): raise StopAsyncIteration msg = self._queue.get_nowait() if ( self._queue.empty() and not self._amqp_queue.channel.is_closed and not self._closed.done() ): self._message_or_closed.clear() return msg __all__ = ("Queue", "QueueIterator", "ConsumerTag") python-aio-pika-9.5.5/aio_pika/robust_channel.py000066400000000000000000000211561476164671100217210ustar00rootroot00000000000000import asyncio import warnings from collections import defaultdict from itertools import chain from typing import Any, DefaultDict, Dict, Optional, Set, Type, Union from warnings import warn import aiormq from .abc import ( AbstractConnection, AbstractRobustChannel, AbstractRobustExchange, AbstractRobustQueue, TimeoutType, ) from .channel import Channel from .exchange import Exchange, ExchangeType from .log import get_logger from .queue import Queue from .robust_exchange import RobustExchange from .robust_queue import RobustQueue from .tools import CallbackCollection log = get_logger(__name__) class RobustChannel(Channel, AbstractRobustChannel): # type: ignore """ Channel abstraction """ QUEUE_CLASS: Type[Queue] = RobustQueue EXCHANGE_CLASS: Type[Exchange] = RobustExchange RESTORE_RETRY_DELAY: int = 2 _exchanges: DefaultDict[str, Set[AbstractRobustExchange]] _queues: DefaultDict[str, Set[RobustQueue]] default_exchange: RobustExchange def __init__( self, connection: AbstractConnection, channel_number: Optional[int] = None, publisher_confirms: bool = True, on_return_raises: bool = False, ): """ :param connection: :class:`aio_pika.adapter.AsyncioConnection` instance :param loop: Event loop (:func:`asyncio.get_event_loop()` when :class:`None`) :param future_store: :class:`aio_pika.common.FutureStore` instance :param publisher_confirms: False if you don't need delivery confirmations (in pursuit of performance) """ super().__init__( connection=connection, channel_number=channel_number, publisher_confirms=publisher_confirms, on_return_raises=on_return_raises, ) self._exchanges = defaultdict(set) self._queues = defaultdict(set) self._prefetch_count: int = 0 self._prefetch_size: int = 0 self._global_qos: bool = False self.reopen_callbacks = CallbackCollection(self) self.__restore_lock = asyncio.Lock() self.__restored = asyncio.Event() self.close_callbacks.remove(self._set_closed_callback) async def ready(self) -> None: await self._connection.ready() await self.__restored.wait() async def get_underlay_channel(self) -> aiormq.abc.AbstractChannel: await self._connection.ready() return await super().get_underlay_channel() async def restore(self, channel: Any = None) -> None: if channel is not None: warnings.warn( "Channel argument will be ignored because you " "don't need to pass this anymore.", DeprecationWarning, ) async with self.__restore_lock: if self.__restored.is_set(): return await self.reopen() self.__restored.set() async def _on_close( self, closing: asyncio.Future ) -> Optional[BaseException]: exc = await super()._on_close(closing) if isinstance(exc, asyncio.CancelledError): # This happens only if the channel is forced to close from the # outside, for example, if the connection is closed. # Of course, here you need to exit from this function # as soon as possible and to avoid a recovery attempt. self.__restored.clear() if not self._closed.done(): self._closed.set_result(True) return exc in_restore_state = not self.__restored.is_set() self.__restored.clear() if self._closed.done() or in_restore_state: return exc await self.restore() return exc async def close( self, exc: Optional[aiormq.abc.ExceptionType] = None, ) -> None: # Avoid recovery when channel is explicitely closed using this method self.__restored.clear() await super().close(exc) async def reopen(self) -> None: await super().reopen() await self.reopen_callbacks() async def _on_open(self) -> None: if not hasattr(self, "default_exchange"): await super()._on_open() exchanges = tuple(chain(*self._exchanges.values())) queues = tuple(chain(*self._queues.values())) channel = await self.get_underlay_channel() await channel.basic_qos( prefetch_count=self._prefetch_count, prefetch_size=self._prefetch_size, ) for exchange in exchanges: await exchange.restore() for queue in queues: await queue.restore() if hasattr(self, "default_exchange"): self.default_exchange.channel = self self.__restored.set() async def set_qos( self, prefetch_count: int = 0, prefetch_size: int = 0, global_: bool = False, timeout: TimeoutType = None, all_channels: Optional[bool] = None, ) -> aiormq.spec.Basic.QosOk: if all_channels is not None: warn('Use "global_" instead of "all_channels"', DeprecationWarning) global_ = all_channels await self.ready() self._prefetch_count = prefetch_count self._prefetch_size = prefetch_size self._global_qos = global_ return await super().set_qos( prefetch_count=prefetch_count, prefetch_size=prefetch_size, global_=global_, timeout=timeout, ) async def declare_exchange( self, name: str, type: Union[ExchangeType, str] = ExchangeType.DIRECT, durable: bool = False, auto_delete: bool = False, internal: bool = False, passive: bool = False, arguments: Optional[Dict[str, Any]] = None, timeout: TimeoutType = None, robust: bool = True, ) -> AbstractRobustExchange: """ :param robust: If True, the exchange will be re-declared during reconnection. Set to False for temporary exchanges that should not be restored. """ await self.ready() exchange = ( await super().declare_exchange( name=name, type=type, durable=durable, auto_delete=auto_delete, internal=internal, passive=passive, arguments=arguments, timeout=timeout, ) ) if not internal and robust: # noinspection PyTypeChecker self._exchanges[name].add(exchange) # type: ignore return exchange # type: ignore async def exchange_delete( self, exchange_name: str, timeout: TimeoutType = None, if_unused: bool = False, nowait: bool = False, ) -> aiormq.spec.Exchange.DeleteOk: await self.ready() result = await super().exchange_delete( exchange_name=exchange_name, timeout=timeout, if_unused=if_unused, nowait=nowait, ) self._exchanges.pop(exchange_name, None) return result async def declare_queue( self, name: Optional[str] = None, *, durable: bool = False, exclusive: bool = False, passive: bool = False, auto_delete: bool = False, arguments: Optional[Dict[str, Any]] = None, timeout: TimeoutType = None, robust: bool = True, ) -> AbstractRobustQueue: """ :param robust: If True, the queue will be re-declared during reconnection. Set to False for temporary queues that should not be restored. """ await self.ready() queue: RobustQueue = await super().declare_queue( # type: ignore name=name, durable=durable, exclusive=exclusive, passive=passive, auto_delete=auto_delete, arguments=arguments, timeout=timeout, ) if robust: self._queues[queue.name].add(queue) return queue async def queue_delete( self, queue_name: str, timeout: TimeoutType = None, if_unused: bool = False, if_empty: bool = False, nowait: bool = False, ) -> aiormq.spec.Queue.DeleteOk: await self.ready() result = await super().queue_delete( queue_name=queue_name, timeout=timeout, if_unused=if_unused, if_empty=if_empty, nowait=nowait, ) self._queues.pop(queue_name, None) return result __all__ = ("RobustChannel",) python-aio-pika-9.5.5/aio_pika/robust_connection.py000066400000000000000000000244131476164671100224470ustar00rootroot00000000000000import asyncio from ssl import SSLContext from typing import Any, Optional, Tuple, Type, Union from weakref import WeakSet import aiormq.abc from aiormq.connection import parse_bool, parse_timeout from pamqp.common import FieldTable from yarl import URL from .abc import ( AbstractRobustChannel, AbstractRobustConnection, ConnectionParameter, SSLOptions, TimeoutType, ) from .connection import Connection, make_url from .exceptions import CONNECTION_EXCEPTIONS from .log import get_logger from .robust_channel import RobustChannel from .tools import CallbackCollection log = get_logger(__name__) class RobustConnection(Connection, AbstractRobustConnection): """Robust connection""" CHANNEL_REOPEN_PAUSE = 1 CHANNEL_CLASS: Type[RobustChannel] = RobustChannel PARAMETERS: Tuple[ConnectionParameter, ...] = Connection.PARAMETERS + ( ConnectionParameter( name="reconnect_interval", parser=parse_timeout, default="5", ), ConnectionParameter( name="fail_fast", parser=parse_bool, default="1", ), ) def __init__( self, url: URL, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any, ): super().__init__(url=url, loop=loop, **kwargs) self.reconnect_interval = self.kwargs.pop("reconnect_interval") self.connection_attempt: int = 0 self.__fail_fast_future = self.loop.create_future() self.fail_fast = self.kwargs.pop("fail_fast", True) if not self.fail_fast: self.__fail_fast_future.set_result(None) self.__channels: WeakSet[AbstractRobustChannel] = WeakSet() self.__connection_close_event = asyncio.Event() self.__connect_timeout: Optional[TimeoutType] = None self.__reconnection_task: Optional[asyncio.Task] = None self._reconnect_lock = asyncio.Lock() self.reconnect_callbacks = CallbackCollection(self) self.__connection_close_event.set() @property def reconnecting(self) -> bool: return self._reconnect_lock.locked() def __repr__(self) -> str: return ( f'<{self.__class__.__name__}: "{self}" ' f"{len(self.__channels)} channels>" ) async def _on_connection_close(self, closing: asyncio.Future) -> None: await super()._on_connection_close(closing) if self._close_called or self.is_closed: return log.info( "Connection to %s closed. Reconnecting after %r seconds.", self, self.reconnect_interval, ) self.__connection_close_event.set() async def _on_connected(self) -> None: await super()._on_connected() transport = self.transport if transport is None: raise RuntimeError("No active transport for connection %r", self) try: # Make a copy of the channels to iterate on, to guard from # concurrent updates to the set. for channel in tuple(self.__channels): try: await channel.restore() except Exception: log.exception("Failed to reopen channel") raise except Exception as e: await self.close_callbacks(e) await asyncio.gather( transport.connection.close(e), return_exceptions=True, ) raise if self.connection_attempt: await self.reconnect_callbacks() self.connection_attempt += 1 self.__connection_close_event.clear() async def __connection_factory(self) -> None: log.debug("Starting connection factory for %r", self) while not self.is_closed and not self._close_called: log.debug("Waiting for connection close event for %r", self) await self.__connection_close_event.wait() if self.is_closed or self._close_called: return # noinspection PyBroadException try: self.transport = None self.connected.clear() log.debug("Connection attempt for %r", self) await Connection.connect(self, self.__connect_timeout) if not self.__fail_fast_future.done(): self.__fail_fast_future.set_result(None) log.debug("Connection made on %r", self) except CONNECTION_EXCEPTIONS as e: if not self.__fail_fast_future.done(): self.__fail_fast_future.set_exception(e) return log.warning( 'Connection attempt to "%s" failed: %s. ' "Reconnecting after %r seconds.", self, e, self.reconnect_interval, ) except Exception: log.exception( "Reconnect attempt failed %s. " "Retrying after %r seconds.", self, self.reconnect_interval, ) await asyncio.sleep(self.reconnect_interval) async def connect(self, timeout: TimeoutType = None) -> None: self.__connect_timeout = timeout if self.is_closed: raise RuntimeError(f"{self!r} connection closed") if self.reconnecting: raise RuntimeError( ( "Connect method called but connection " f"{self!r} is reconnecting right now." ), self, ) if not self.__reconnection_task: self.__reconnection_task = self.loop.create_task( self.__connection_factory(), ) await self.__fail_fast_future await self.connected.wait() async def reconnect(self) -> None: if self.transport: await self.transport.connection.close() await self.connect() await self.reconnect_callbacks() def channel( self, channel_number: Optional[int] = None, publisher_confirms: bool = True, on_return_raises: bool = False, ) -> AbstractRobustChannel: channel: AbstractRobustChannel = super().channel( channel_number=channel_number, publisher_confirms=publisher_confirms, on_return_raises=on_return_raises, ) # type: ignore self.__channels.add(channel) return channel async def close( self, exc: Optional[aiormq.abc.ExceptionType] = asyncio.CancelledError, ) -> None: if self.__reconnection_task is not None: self.__reconnection_task.cancel() await asyncio.gather( self.__reconnection_task, return_exceptions=True, ) self.__reconnection_task = None return await super().close(exc) async def connect_robust( url: Union[str, URL, None] = None, *, host: str = "localhost", port: int = 5672, login: str = "guest", password: str = "guest", virtualhost: str = "/", ssl: bool = False, loop: Optional[asyncio.AbstractEventLoop] = None, ssl_options: Optional[SSLOptions] = None, ssl_context: Optional[SSLContext] = None, timeout: TimeoutType = None, client_properties: Optional[FieldTable] = None, connection_class: Type[AbstractRobustConnection] = RobustConnection, **kwargs: Any, ) -> AbstractRobustConnection: """Make connection to the broker. Example: .. code-block:: python import aio_pika async def main(): connection = await aio_pika.connect( "amqp://guest:guest@127.0.0.1/" ) Connect to localhost with default credentials: .. code-block:: python import aio_pika async def main(): connection = await aio_pika.connect() .. note:: The available keys for ssl_options parameter are: * cert_reqs * certfile * keyfile * ssl_version For an information on what the ssl_options can be set to reference the `official Python documentation`_ . Set connection name for RabbitMQ admin panel: .. code-block:: python # As URL parameter method read_connection = await connect( "amqp://guest:guest@localhost/?name=Read%20connection" ) # keyword method write_connection = await connect( client_properties={ 'connection_name': 'Write connection' } ) .. note: ``client_properties`` argument requires ``aiormq>=2.9`` URL string might contain ssl parameters e.g. `amqps://user:pass@host//?ca_certs=ca.pem&certfile=crt.pem&keyfile=key.pem` :param client_properties: add custom client capability. :param url: RFC3986_ formatted broker address. When :class:`None` will be used keyword arguments. :param host: hostname of the broker :param port: broker port 5672 by default :param login: username string. `'guest'` by default. :param password: password string. `'guest'` by default. :param virtualhost: virtualhost parameter. `'/'` by default :param ssl: use SSL for connection. Should be used with addition kwargs. :param ssl_options: A dict of values for the SSL connection. :param timeout: connection timeout in seconds :param loop: Event loop (:func:`asyncio.get_event_loop()` when :class:`None`) :param ssl_context: ssl.SSLContext instance :param connection_class: Factory of a new connection :param kwargs: addition parameters which will be passed to the connection. :return: :class:`aio_pika.connection.Connection` .. _RFC3986: https://goo.gl/MzgYAs .. _official Python documentation: https://goo.gl/pty9xA """ connection: AbstractRobustConnection = connection_class( make_url( url, host=host, port=port, login=login, password=password, virtualhost=virtualhost, ssl=ssl, ssl_options=ssl_options, client_properties=client_properties, **kwargs, ), loop=loop, ssl_context=ssl_context, **kwargs, ) await connection.connect(timeout=timeout) return connection __all__ = ( "RobustConnection", "connect_robust", ) python-aio-pika-9.5.5/aio_pika/robust_exchange.py000066400000000000000000000055111476164671100220700ustar00rootroot00000000000000import asyncio import warnings from typing import Any, Dict, Union import aiormq from pamqp.common import Arguments from .abc import ( AbstractChannel, AbstractExchange, AbstractRobustExchange, ExchangeParamType, TimeoutType, ) from .exchange import Exchange, ExchangeType from .log import get_logger log = get_logger(__name__) class RobustExchange(Exchange, AbstractRobustExchange): """ Exchange abstraction """ _bindings: Dict[Union[AbstractExchange, str], Dict[str, Any]] def __init__( self, channel: AbstractChannel, name: str, type: Union[ExchangeType, str] = ExchangeType.DIRECT, *, auto_delete: bool = False, durable: bool = False, internal: bool = False, passive: bool = False, arguments: Arguments = None, ): super().__init__( channel=channel, name=name, type=type, auto_delete=auto_delete, durable=durable, internal=internal, passive=passive, arguments=arguments, ) self._bindings = {} self.__restore_lock = asyncio.Lock() async def restore(self, channel: Any = None) -> None: if channel is not None: warnings.warn( "Channel argument will be ignored because you " "don't need to pass this anymore.", DeprecationWarning, ) async with self.__restore_lock: try: # special case for default exchange if self.name == "": return await self.declare() for exchange, kwargs in tuple(self._bindings.items()): await self.bind(exchange, **kwargs) except Exception: raise async def bind( self, exchange: ExchangeParamType, routing_key: str = "", *, arguments: Arguments = None, timeout: TimeoutType = None, robust: bool = True, ) -> aiormq.spec.Exchange.BindOk: result = await super().bind( exchange, routing_key=routing_key, arguments=arguments, timeout=timeout, ) if robust: self._bindings[exchange] = dict( routing_key=routing_key, arguments=arguments, ) return result async def unbind( self, exchange: ExchangeParamType, routing_key: str = "", arguments: Arguments = None, timeout: TimeoutType = None, ) -> aiormq.spec.Exchange.UnbindOk: result = await super().unbind( exchange, routing_key, arguments=arguments, timeout=timeout, ) self._bindings.pop(exchange, None) return result __all__ = ("RobustExchange",) python-aio-pika-9.5.5/aio_pika/robust_queue.py000066400000000000000000000113571476164671100214370ustar00rootroot00000000000000import uuid import warnings from typing import Any, Awaitable, Callable, Dict, Optional, Tuple, Union import aiormq from aiormq import ChannelInvalidStateError from pamqp.common import Arguments from .abc import ( AbstractChannel, AbstractExchange, AbstractIncomingMessage, AbstractQueueIterator, AbstractRobustQueue, ConsumerTag, TimeoutType, ) from .exchange import ExchangeParamType from .log import get_logger from .queue import Queue, QueueIterator log = get_logger(__name__) class RobustQueue(Queue, AbstractRobustQueue): __slots__ = ("_consumers", "_bindings") _consumers: Dict[ConsumerTag, Dict[str, Any]] _bindings: Dict[Tuple[Union[AbstractExchange, str], str], Dict[str, Any]] def __init__( self, channel: AbstractChannel, name: Optional[str], durable: bool = False, exclusive: bool = False, auto_delete: bool = False, arguments: Arguments = None, passive: bool = False, ): super().__init__( channel=channel, name=name or f"amq_{uuid.uuid4().hex}", durable=durable, exclusive=exclusive, auto_delete=auto_delete, arguments=arguments, passive=passive, ) self._consumers = {} self._bindings = {} async def restore(self, channel: Any = None) -> None: if channel is not None: warnings.warn( "Channel argument will be ignored because you " "don't need to pass this anymore.", DeprecationWarning, ) await self.declare() bindings = tuple(self._bindings.items()) consumers = tuple(self._consumers.items()) for (exchange, routing_key), kwargs in bindings: await self.bind(exchange, routing_key, **kwargs) for consumer_tag, kwargs in consumers: await self.consume(consumer_tag=consumer_tag, **kwargs) async def bind( self, exchange: ExchangeParamType, routing_key: Optional[str] = None, *, arguments: Arguments = None, timeout: TimeoutType = None, robust: bool = True, ) -> aiormq.spec.Queue.BindOk: if routing_key is None: routing_key = self.name result = await super().bind( exchange=exchange, routing_key=routing_key, arguments=arguments, timeout=timeout, ) if robust: self._bindings[(exchange, routing_key)] = dict( arguments=arguments, ) return result async def unbind( self, exchange: ExchangeParamType, routing_key: Optional[str] = None, arguments: Arguments = None, timeout: TimeoutType = None, ) -> aiormq.spec.Queue.UnbindOk: if routing_key is None: routing_key = self.name result = await super().unbind( exchange, routing_key, arguments, timeout, ) self._bindings.pop((exchange, routing_key), None) return result async def consume( self, callback: Callable[[AbstractIncomingMessage], Awaitable[Any]], no_ack: bool = False, exclusive: bool = False, arguments: Arguments = None, consumer_tag: Optional[ConsumerTag] = None, timeout: TimeoutType = None, robust: bool = True, ) -> ConsumerTag: consumer_tag = await super().consume( consumer_tag=consumer_tag, timeout=timeout, callback=callback, no_ack=no_ack, exclusive=exclusive, arguments=arguments, ) if robust: self._consumers[consumer_tag] = dict( callback=callback, no_ack=no_ack, exclusive=exclusive, arguments=arguments, ) return consumer_tag async def cancel( self, consumer_tag: ConsumerTag, timeout: TimeoutType = None, nowait: bool = False, ) -> aiormq.spec.Basic.CancelOk: result = await super().cancel(consumer_tag, timeout, nowait) self._consumers.pop(consumer_tag, None) return result def iterator(self, **kwargs: Any) -> AbstractQueueIterator: return RobustQueueIterator(self, **kwargs) class RobustQueueIterator(QueueIterator): def __init__(self, queue: Queue, **kwargs: Any): super().__init__(queue, **kwargs) self._amqp_queue.close_callbacks.discard(self._set_closed) async def consume(self) -> None: while True: try: return await super().consume() except ChannelInvalidStateError: await self._amqp_queue.channel.get_underlay_channel() __all__ = ("RobustQueue",) python-aio-pika-9.5.5/aio_pika/tools.py000066400000000000000000000243311476164671100200510ustar00rootroot00000000000000from __future__ import annotations import asyncio import inspect import warnings from functools import wraps from itertools import chain from threading import Lock from typing import ( AbstractSet, Any, Awaitable, Callable, Coroutine, Generator, Iterator, List, MutableSet, Optional, TypeVar, Union, Generic, ) from weakref import ReferenceType, WeakSet, ref from aio_pika.log import get_logger try: from typing import ParamSpec, Protocol except ImportError: from typing_extensions import ParamSpec, Protocol # type: ignore log = get_logger(__name__) T = TypeVar("T") def iscoroutinepartial(fn: Callable[..., Any]) -> bool: """ Use Python 3.8's inspect.iscoroutinefunction() instead """ warnings.warn( "Use inspect.iscoroutinefunction() instead.", DeprecationWarning ) return asyncio.iscoroutinefunction(fn) def _task_done(future: asyncio.Future) -> None: try: exc = future.exception() if exc is not None: raise exc except asyncio.CancelledError: pass def create_task( func: Callable[..., Union[Coroutine[Any, Any, T], Awaitable[T]]], *args: Any, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any, ) -> Awaitable[T]: loop = loop or asyncio.get_event_loop() if inspect.iscoroutinefunction(func): task = loop.create_task(func(*args, **kwargs)) task.add_done_callback(_task_done) return task def run(future: asyncio.Future) -> Optional[asyncio.Future]: if future.done(): return None try: future.set_result(func(*args, **kwargs)) except Exception as e: future.set_exception(e) return future future = loop.create_future() future.add_done_callback(_task_done) loop.call_soon(run, future) return future _Sender = TypeVar("_Sender", contravariant=True) _Params = ParamSpec("_Params") _Return = TypeVar("_Return", covariant=True) class CallbackType(Protocol[_Sender, _Params, _Return]): def __call__( self, __sender: Optional[_Sender], /, *args: _Params.args, **kwargs: _Params.kwargs, ) -> Union[_Return, Awaitable[_Return]]: ... class StubAwaitable: __slots__ = () def __await__(self) -> Generator[Any, Any, None]: yield STUB_AWAITABLE = StubAwaitable() class CallbackCollection( MutableSet[ Union[ CallbackType[_Sender, _Params, Any], "CallbackCollection[Any, _Params]", ], ], Generic[_Sender, _Params], ): __slots__ = ( "__weakref__", "__sender", "__callbacks", "__weak_callbacks", "__lock", ) def __init__(self, sender: Union[_Sender, ReferenceType[_Sender]]): self.__sender: ReferenceType if isinstance(sender, ReferenceType): self.__sender = sender else: self.__sender = ref(sender) self.__callbacks: CallbackSetType = set() self.__weak_callbacks: MutableSet[ Union[ CallbackType[_Sender, _Params, Any], CallbackCollection[Any, _Params], ], ] = WeakSet() self.__lock: Lock = Lock() def add( self, callback: Union[ CallbackType[_Sender, _Params, Any], CallbackCollection[Any, _Params], ], weak: bool = False ) -> None: if self.is_frozen: raise RuntimeError("Collection frozen") if not callable(callback): raise ValueError("Callback is not callable") with self.__lock: if weak or isinstance(callback, CallbackCollection): self.__weak_callbacks.add(callback) else: self.__callbacks.add(callback) # type: ignore def remove( self, callback: Union[ CallbackType[_Sender, _Params, Any], CallbackCollection[Any, _Params], ], ) -> None: if self.is_frozen: raise RuntimeError("Collection frozen") with self.__lock: try: self.__callbacks.remove(callback) # type: ignore except KeyError: self.__weak_callbacks.remove(callback) def discard( self, callback: Union[ CallbackType[_Sender, _Params, Any], CallbackCollection[Any, _Params], ], ) -> None: if self.is_frozen: raise RuntimeError("Collection frozen") with self.__lock: if callback in self.__callbacks: self.__callbacks.remove(callback) # type: ignore elif callback in self.__weak_callbacks: self.__weak_callbacks.remove(callback) def clear(self) -> None: if self.is_frozen: raise RuntimeError("Collection frozen") with self.__lock: self.__callbacks.clear() # type: ignore self.__weak_callbacks.clear() @property def is_frozen(self) -> bool: return isinstance(self.__callbacks, frozenset) def freeze(self) -> None: if self.is_frozen: raise RuntimeError("Collection already frozen") with self.__lock: self.__callbacks = frozenset(self.__callbacks) self.__weak_callbacks = WeakSet(self.__weak_callbacks) def unfreeze(self) -> None: if not self.is_frozen: raise RuntimeError("Collection is not frozen") with self.__lock: self.__callbacks = set(self.__callbacks) self.__weak_callbacks = WeakSet(self.__weak_callbacks) def __contains__(self, x: object) -> bool: return x in self.__callbacks or x in self.__weak_callbacks def __len__(self) -> int: return len(self.__callbacks) + len(self.__weak_callbacks) def __iter__(self) -> Iterator[ Union[ CallbackType[_Sender, _Params, Any], CallbackCollection[_Sender, _Params], ], ]: return iter(chain(self.__callbacks, self.__weak_callbacks)) def __bool__(self) -> bool: return bool(self.__callbacks) or bool(self.__weak_callbacks) def __copy__(self) -> CallbackCollection[_Sender, _Params]: instance = self.__class__(self.__sender) with self.__lock: for cb in self.__callbacks: instance.add(cb, weak=False) for cb in self.__weak_callbacks: instance.add(cb, weak=True) if self.is_frozen: instance.freeze() return instance def __call__( self, *args: _Params.args, **kwargs: _Params.kwargs, ) -> Awaitable[Any]: futures: List[asyncio.Future] = [] with self.__lock: sender = self.__sender() for cb in self: try: if isinstance(cb, CallbackCollection): result = cb(*args, **kwargs) else: result = cb(sender, *args, **kwargs) if inspect.isawaitable(result): futures.append(asyncio.ensure_future(result)) except Exception: log.exception("Callback %r error", cb) if not futures: return STUB_AWAITABLE return asyncio.gather(*futures, return_exceptions=True) def __hash__(self) -> int: return id(self) class OneShotCallback: __slots__ = ("loop", "finished", "__lock", "callback", "__task") def __init__(self, callback: Callable[..., Awaitable[T]]): self.callback: Callable[..., Awaitable[T]] = callback self.loop = asyncio.get_event_loop() self.finished: asyncio.Event = asyncio.Event() self.__lock: asyncio.Lock = asyncio.Lock() self.__task: Optional[asyncio.Future] = None def __repr__(self) -> str: return f"<{self.__class__.__name__}: cb={self.callback!r}>" def wait(self) -> Awaitable[Any]: try: return self.finished.wait() except asyncio.CancelledError: if self.__task is not None: self.__task.cancel() raise async def __task_inner(self, *args: Any, **kwargs: Any) -> None: async with self.__lock: if self.finished.is_set(): return try: await self.callback(*args, **kwargs) finally: self.loop.call_soon(self.finished.set) del self.callback def __call__(self, *args: Any, **kwargs: Any) -> Awaitable[Any]: if self.finished.is_set() or self.__task is not None: return STUB_AWAITABLE self.__task = self.loop.create_task( self.__task_inner(*args, **kwargs), ) return self.__task def ensure_awaitable( func: Callable[_Params, Union[T, Awaitable[T]]], ) -> Callable[_Params, Awaitable[T]]: if inspect.iscoroutinefunction(func): return func if inspect.isfunction(func): warnings.warn( f"You probably registering the non-coroutine function {func!r}. " "This is deprecated and will be removed in future releases. " "Moreover, it can block the event loop", DeprecationWarning, ) @wraps(func) async def wrapper(*args: _Params.args, **kwargs: _Params.kwargs) -> T: nonlocal func result = func(*args, **kwargs) if not hasattr(result, "__await__"): warnings.warn( f"Function {func!r} returned a non awaitable result." "This may be bad for performance or may blocks the " "event loop, you should pay attention to this. This " "warning is here in an attempt to maintain backwards " "compatibility and will simply be removed in " "future releases.", DeprecationWarning, ) return result return await result return wrapper CallbackSetType = AbstractSet[ Union[ CallbackType[_Sender, _Params, None], CallbackCollection[_Sender, _Params], ], ] __all__ = ( "CallbackCollection", "CallbackSetType", "CallbackType", "OneShotCallback", "create_task", "ensure_awaitable", "iscoroutinepartial", ) python-aio-pika-9.5.5/aio_pika/transaction.py000066400000000000000000000040211476164671100212300ustar00rootroot00000000000000from types import TracebackType from typing import Optional, Type import aiormq from pamqp import commands from .abc import ( AbstractChannel, AbstractTransaction, TimeoutType, TransactionState, ) class Transaction(AbstractTransaction): def __repr__(self) -> str: return f"<{self.__class__.__name__} {self.state.value}>" def __str__(self) -> str: return self.state.value def __init__(self, channel: AbstractChannel): self.__channel = channel self.state: TransactionState = TransactionState.CREATED @property def channel(self) -> AbstractChannel: if self.__channel is None: raise RuntimeError("Channel not opened") if self.__channel.is_closed: raise RuntimeError("Closed channel") return self.__channel async def select( self, timeout: TimeoutType = None, ) -> aiormq.spec.Tx.SelectOk: channel = await self.channel.get_underlay_channel() result = await channel.tx_select(timeout=timeout) self.state = TransactionState.STARTED return result async def rollback( self, timeout: TimeoutType = None, ) -> commands.Tx.RollbackOk: channel = await self.channel.get_underlay_channel() result = await channel.tx_rollback(timeout=timeout) self.state = TransactionState.ROLLED_BACK return result async def commit( self, timeout: TimeoutType = None, ) -> commands.Tx.CommitOk: channel = await self.channel.get_underlay_channel() result = await channel.tx_commit(timeout=timeout) self.state = TransactionState.COMMITED return result async def __aenter__(self) -> "Transaction": await self.select() return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: if exc_type: await self.rollback() else: await self.commit() python-aio-pika-9.5.5/docs/000077500000000000000000000000001476164671100155105ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/Makefile000066400000000000000000000011411476164671100171450ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SPHINXPROJ = aio-pika SOURCEDIR = source BUILDDIR = build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)python-aio-pika-9.5.5/docs/requirements.txt000066400000000000000000000000441476164671100207720ustar00rootroot00000000000000# ONLY FOR ReadTheDocs autodoc furo python-aio-pika-9.5.5/docs/source/000077500000000000000000000000001476164671100170105ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/_static/000077500000000000000000000000001476164671100204365ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/_static/.DS_Store000066400000000000000000000240041476164671100221210ustar00rootroot00000000000000Bud1    conbwspbl  @ @ @ @ faviconbwspblobbplist00  ]ShowStatusBar[ShowSidebar[ShowToolbar[ShowTabView_ContainerShowSidebar\WindowBounds[ShowPathbar  _{{164, 108}, {770, 436}}%1=I`myz{|}~faviconlsvCblobbplist00 GHGI _viewOptionsVersion_showIconPreviewWcolumns_calculateAllSizes_scrollPositionYXtextSize_scrollPositionXZsortColumnXiconSize_useRelativeDates !%*/49=B  WvisibleUwidthYascendingZidentifier , TnameXubiquity#  \dateModified$[dateCreated ') aTsize , . s Tkind1 3d Ulabel6 8K Wversion < Xcomments?A^dateLastOpenedCYdateAdded##@(#@0 2DL`r{  )234@IJLMR[\^_dmnpqwKfaviconlsvpblobbplist00 FGFH _viewOptionsVersion_showIconPreviewWcolumns_calculateAllSizes_scrollPositionYXtextSize_scrollPositionXZsortColumnXiconSize_useRelativeDates  %)-27 K B  ##@(#@0 2DL`r{ !'-7?ADEFOQSTU^`abkmnoxz|}~JfaviconvSrnlongtutorialbwspblobbplist00 ]ShowStatusBar[ShowToolbar[ShowTabView_ContainerShowSidebar\WindowBounds[ShowSidebar  _{{223, 59}, {1289, 758}} #/;R_klmno tutorialdsclbooltutoriallsvCblobbplist00 VW YXiconSize_showIconPreviewWcolumns_calculateAllSizesXtextSizeZsortColumn_useRelativeDates_viewOptionsVersion#@0 "&+05:?CHLP  ZidentifierUwidthYascendingWvisibleTnameA  WvisibleUwidthYascending#Xubiquity  \dateModified #[dateCreated '( Tsizea ,- Tkinds 12 Ulabeld  67 WversionK  ;< Xcomments,  @^dateLastOpened DEZshareOwner IE_shareLastEditor OYdateAdded QR_invitationStatus#@*Tkind "4<PYdw !"#,89:CHJKLUZ\]^gmopqz  !"+01ZBtutoriallsvpblobmbplist00 EF HXiconSize_showIconPreviewWcolumns_calculateAllSizesXtextSizeZsortColumn_useRelativeDates_viewOptionsVersion#@0 #(-26;@XcommentsUlabelWversion[dateCreatedTsize\dateModifiedTkindTname^dateLastOpened WvisibleUwidthYascendingUindex,  "d % 'K *, /1 a *5  8 : s  = ? A *C#@*Tkind "4<PYdw"#%./124=>@ACLMOPR[\^_ajklnwxz{}ItutorialvSrnlong E DSDB `(0@ @ @ascendingWvisible, !" &  *&./ a 34 d 89 s => K B  ##@(#@0 2DL`r{ !'-7?ADEFOQSTU^`abkmnoxz|}~JfaviconvSrnlongtutorialbwspblobbplist00 ]ShowStatusBar[ShowToolbar[ShowTabView_ContainerShowSidebar\WindowBounds[ShowSidebar  _{{223, 59}, {1289, 758}} #/;R_klmno tutorialdsclbooltutoriallsvCblobbplist00 VW YXiconSize_showIconPreviewWcolumns_calculateAllSizesXtextSizeZsortColumn_useRelativeDatpython-aio-pika-9.5.5/docs/source/_static/custom.css000066400000000000000000000002301476164671100224550ustar00rootroot00000000000000@import url('https://fonts.googleapis.com/css?family=Roboto|Roboto+Condensed|Roboto+Mono|Roboto+Slab'); h1.logo { text-align: center !important; } python-aio-pika-9.5.5/docs/source/_static/favicon/000077500000000000000000000000001476164671100220635ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/_static/favicon/android-icon-144x144.png000066400000000000000000000132431476164671100260710ustar00rootroot00000000000000PNG  IHDRИPLTEr+W!̩V q)Us+<(Y;o*و["q(p'ւVͨӄX!t/TĿɋe7Y$Ⱥ[(w4;'Ϋy8W:Jb3Ŭ˪ºC_.:&ϼǫ|>T ¼qGh=]+wOqK㶳`X{=]=riaVxUyR~BlAmHfCӿɿþ̱ǯͬrjrMnEi;m(μθ˷ɭœyra|[|Ra@E.@+Ž̾}xa[Qj'b%ǾŻ|nd\[vOMHGe&^#·еǵ˹ɸ±èiieuUTOR7K1ѽôܽɦ}}r͢o>k9e1O4ǣǰְ{m‘WsAĩvĕ]m~^IDATxkQƝi+d;3dĠDAWDЍjDABFǢ.ؕ(ZZ7.\.n DWdq".%;9Κ!hh֐&p1W>C5fddd+)ٴz\7c)/#mgRMë@Ҥpx[P10%\3*R9p H'B$p :$"&H6z< it57UZv:aj3Q$;/!;h4V)@4`a555ZZxЩ)*)M:E+cӔe4ͱ'SScZYӲYXk۔7_1;qЛ-d=%k>S9<㽯 Wtn("yD}CMcj3uSqNhC8 6e!ܮwkD7/d^N:&ؽP#{*~<4lFJ?%v{ct t`*,wzhK:vubx8,.yc탳BֵdОۅPtt`DLh~/@:8A:p`р&rKwA\ @͹ uf^O] :fwhğtFqbs }tSo%C;ܻ`骽ؽ(炖tt 5ՂV.x*}\ZKa'B)ڪ#׺y\6ۏؐ -(t ˰w fe0^ Aכe*یF An(A>< H)a3|ҍV Q)jZ8CRā}0R_!3N tusZ 5Uw;k:}*ZP; CU=+IKuT :يZKρPmш660 QAH3M;28צQLLG D!XiNbp;@>>{loo'3uaH@T OO@06L Bܠ }~)Ķ;V ?5? ٵ-aO/{a332Dᒎܑyr`/ kV!zD6^D;i~DO@@)aT@-y¶MJʬ-o@ J ɿKk < Y{אK+VL<9dL w|\9& ^sW?4<9%([Ш( (HS9{oػ{=]c,cK݅AapuјLL0͛oVfD+ ClBDC QyGh&H:8Dդ!@w4`hȍ<ͷ,C9kSa܂ځWTyQj7D $s&"y ɉƒFK! h d $ȎnX-[/xAuLa *$Re2rSm=oGN>"Ws嚼E6-nP2G46<+˼(.Ep0 mtx&ec{Lnd9p /rz QPJlN:S傁Fcz7$ܥ$ o\cU&@.5߀Y)B@Ѫhh )_'8\x?Ы  WlxA DC<ХK/*J QSI@ wQ$zTήW)T#3~ܝ 4;j.5b}* P8kM5}ZZC5WLFYG-guT=81U+k*!PxFE=T|V~P8kٖ3c~-] "}$Hua/!/KE>Ee8Lv-?/&^zEjGSʧၺN$atQIU'ܢC̙q) h=jg\'Q+JEio_bťGrcv|2G.S} C߇ZSg/FWP[j7\9N(Rgng;}hu#ԩhhvfChEc=4}]hd {VgD>7l_e+ p@[< 1u ۿpql wڕ8/OA*Qُ5nr,t#!b?T Y$(U u@)ŕhճYF=dP$"{ l:2NWi,Ns=  ÇΝ?knbrѵk s? Kk^aa.Q9F,M|F lkm$[@09(Uj_v>>q@40M+R#cǸҮl֬YffuD݉mgh@ WF 72 En>1@ H,y;s<0(Vѕcfb"^R "nU"&DEQӀ.h_?#㗈NsRgKz6qŋ[q, J+[}X 1nn:tdt~-[7}ٽy~[7zMU$n *V=1Qy_ĸ_Re2BmtS-5jVM(ۺ #,C=l.(պ4n%\x"֠g=VȈiZ8pidzB]W{'C,4QQ WJ y,d2fTuN P)<&ΒW{qQy7 ^O-UYPkFӦ=ECʡc+: 01jDaS,%[,QsUˇ)oNYIF.9QɖS EVYIj! &$b5d wWπȱ_?J?DUݒykvKhZUF:P.+UtcyH)P+ݳMQ@_4`$ʃm^F|jN3Ž@MM1KpL^y𔲁t>GBycwP) < Ek e[V%Ux&wֽe~ H)˨t,p&V9Z;øo$~Oֻ˽ôǹh;y8e7v3_-ΨS7ѽѯb16$ Ŀk}Dk@T^>:&Ǻɴ̫wӺͶ¹ɬ|?ҷϻȾaIB\)uMbAʰ¯phEűϭ;|n_yWqI䵷}`ZStQ{:}S¾whNMv,k(ҼۺohRnFɾroCp*\#xPoIķԱձ{wng[e&ȹYTK2ǡŠdzícvOb%C-ʢɧ}ɜfy=ѿ{KMNt=IDATxVKhQ5Q6D!qZјq!( ( U(QB  P¶B]*AU7nD qgu}I٭B_8%u@,cbq{/`8x]y8?1?tJ:d/GN:A aCXrʥ,;yÙC,{q(vX : 9w.`o/wtc!$`DM[-$e(P% `WڼݔJٞ!ͣc2j>7?)<9? %(@fQp$͢4璳?2 DlHh6x#A7_OAÆZz __Az|;E8K濾ܹa$i,dvqRVna!C]H,cghB џT#n+N߇ Bhl~aᄏo]'F9V0?G`+HMm$ah:R0#oBȌˉua/D'H,0;FHh2 !wP7Y$ܵuad Ja (O 5'ŠNfX.R~WE R=Oz-A &;ȏ 0#頂\#* 0`lzN|=i|4}w h9J1kW(pyƁAwُ-!ш&()0dzS '#5dD(^6! sdeW/B!:B41|L^h\D(6#>%XG;6T*UA 1KlG<fC wؼB_7S`(فoL(G; ЮhцVRXKsy+7w7Ƕ Ru 9\6uW;e}Y+pisT_|}qj@A;޽k˿i&(oJVc̈iڄmր4MI\hQ7" hLݘon][jPgx+y;3s}pnZd!@-yn= =:)tVj'ٲ$ōF!< ,`Kduv0˥kCU'i"b!nNﹴ1Ip9Qw[/}]c #.IxRb!bk-`r6 cbȴރu'Mv•d1֙ :lĮ$0e18޲SЋ\ kJ 1%,f~ĠބR uY?&8ĎbI1iUD Rm !0 lG`D1H:7Y;$gyF҆;U¾Ukw藬{raW(7%NcUȀOm b/E:Ġet8U^]I >̽1P_y؏sfe0D 7[7H%>h+5% lDv xH7HB>… 5[ATlj&41t<`q2Vx-wQV0PKTJFnb(v|C+\F!k2p8O̧Fȯ5!;/x#fB7Arnr3S#|V/QE&L pбBC_ +.W+K\MPjÿ/Ob<8 ;)zЃΌb:V-P6w ["3q H:W0;"W?m}'6m<}w>m1o`vt-\qVCy! `C֤Ǻq&%DRaHw|rk=qL.]ɲVSw`m)x{gcI@{W10d#믑h,(#9?ߥT@珯ԯuMlhImUS:S/D鸁}:mt_!EAډU Z+a1Y2x.p,YAO ztU/=a^7p%Y-S5mc:r!'HeAK@%ǫm*V'}b+.FB`*sw@S7g9*C^W<!q ʴ0^9 +h @hLrdFRG *B"5PQ4>,-hۍ"@F6[us:,dg(+ՙp9;[,绝)c/E ({%T l Ì,Ñդ%`9P|Ab9:x onl-qԌwycJ݁8爞jsPAp?%$fv\nD6qGŋ*-0ϤW /7[ Hc4i;-:fNQD; -V+l0PuXBNZAfdh,~.& Q^Gc'Ǫ;u `x,h-_V+;(!k >bt!zD"V'*_9! v;1 3e7T-Nӻd;!subA  {tZ*uhs쬃M@ECrR`EWd Uk2M'BzDEڍTemΓ6iTZvF09ev¢S[ =joBWjFvڨ" E ''$_͛!J9]$82^zS]LI,߱n/-4MӐCh SETa'Y(5>=rvjh.W+ E\E[.MFǓi:PCz-``BPfӖD˼zEPިFkd @Tm ܖ.')x[R͹^BIH}.c  Lbx!GS+RSO0nz*vU$8Lj4EEH OFr -ӧLv"5`JZY0U~s׋#DI&-{Hc*_p4Jȑ]S Yji%p%Lw7dPi(ÃuTQtץKko\(?'$2hwN0@GhZB( 51F4&1? #`hPL"*K*tH)B-UV`ZBĝ"t%y>1)3!P<Λ{s/'i/ 5Ӡ/WLG:kP4?}򵝶m跓X CfE͠8Oå6|2*rc8*iƀذzl|ódJ/ !n{NOD `Jz4p%aZt @&1@Xٍy}pxesIr#jM܆C>\x˕Hivlp8ĕBP- lPђZЋP6/?*+Ç i_c7Z@(_pvC:v퓹LʯZVN$\>Ƃ I>zmϔ!WS`Ce`试^H7Jx7BG|7Z( C,4W?Bv} 䈈o5W/|޹\;79EDru36A48q"dr׼"ELN+'߱(#V28`z]M.k:`p,-d\h?'6 ,F_ar ?Ǐ0́tMX#ex&wEa Dvm],`W墤hv?-T-.,FVɮB V_Kb3fr=]a]J/9:4v,"+ӕ^TSf-jS UAW<9: mJMz WhjjsA ʡx k_r8V?ҿQ 2l) z'qhz>Y=V*i8 1VfcS x,,*=(|m]d~p8JvoUd9{!ITiM\GU"<]qiO(jھjivwKù#Y$?M_J# J.SIp(/>ypԾP(V;{8igEq[{^TΕJh˰HYx|Vx3 V99/_˞-^{|a4W9udٻ_NR{G>H /JYm-q9Rdv-Z\\,4#::itFHN'K>|d7G9HɎT<Dm$e%2Zvn|}|&"-:JN4c܍LE׾gSb~a`C7X aI_MNHju& /Q+zӳ/^H%RAleAf5 e ʃ׫A ɽs$xy/Q}$cCSW+ -z}Uy2a?u//yNR~IENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/android-icon-36x36.png000066400000000000000000000024451476164671100257330ustar00rootroot00000000000000PNG  IHDR$$hPLTEW!r+V Uq*q(p(o*s+Z"u1լt/['Y$̨Ȱ~Bj>W!sL\=ļͬˬʸ{nmxRrIy8h8c3s,\*n)j'Y!V N4ɾ˳ɲͱɼ˼ȫĢxxwtrpg__[|ZSPNJmC}?xU@E}`?8A:<@ \h2Gѥ"9+$$724 f裑GQ H@gD@/ZH H:B[͹Y`rCbp:*{3 np]j NGª-GZʛf>v Z#|3W1ϛ T >M_Dp(&UȑhWm.c#"DLMp6 >`uW:ifVFˠIT;^$k4lkdna nтbS鱐T &̀BV!GdfPrPKwJF Žsj kywHԏPF }w ! 4;DOxٍ:ZQq )%'eE`[20RzJOJi+%64Hۙ\ FBY=*dkΖ/eK+Q C%]N7ƪ Q};N w%;p )衳C[ CⱟAtgᖙzm{6G&J]{A&8L ONt5~h5 fbEVX?KPvIENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/android-icon-48x48.png000066400000000000000000000034431476164671100257400ustar00rootroot00000000000000PNG  IHDR00` PLTEW!r+q)VZ"q*o)s+s.['̨Y$U;'̫t0ϭx6p*ͬG}?z:f8_-]+W!ÿvRoGAh\=[<T8ʿȻз̷ȴƲαʮλŲŭǫytkkcb\wNsJsIqFd0m)e&b%]%G/ԿϿǾȿʾȼƻѿȹǶʶ͸࿖ںֲͭĦ}|{vsrqoɜed\YTJCk?m;{7jFeCd%JIDATHǥ@IrvmזTUcc}p%MϮ{^cP@jyNR(_x6^?uN\kr_;Wy 99i"$"rPal3 Y)PY@Iɽ)^@,ԏn㌁fЭ%vHBgkڑcUsC l oR<@ V*P-LC>8a@⨡{U1h{L"rY;4f옯i}D4n|KNfO{p\NŚFǰ Sмjf2WYx B2Ķt-vJ8X\Ա}HO&"M&> dct";4^5B:>܆ b. Կ d{Wg2xod jl]⎝*J]0ٻE{!Qv T4bxA{h1..2!r" Ӊ>sWq{Ds^'+,ժd"iwGX=l8`$QsڞY`jLs0(JfL0n'FpqFCY4P%KzPKReɆ:Z=tT%R3aGVнN.{gBŷ#)ݢ@&H5QCrꓨqG 5r4@mDjqLǏŇZ%=qxE ?=#w'CG 32,/pF+xRte8ڽxF)ϼ|jz ېWȣaVX.]7A1 8v}Rgg!C(qlɓu*z{d/KջL-m|p`H!Ép/eNAuJCݚnFx4kUHEkfi)24Efj n45ƈZb]-:HS霾Sjd(.k- >4Xs?y۩8ylLa45Zr!P0wL>@IENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/android-icon-72x72.png000066400000000000000000000052611476164671100257320ustar00rootroot00000000000000PNG  IHDRHHb3CuPLTEW!r+Uq)̩o*Z";'q(r+W"Y;`/ήz:x6¬t.[&єwSͯͪ_zVnF^,Ƽзŵļz]JoC}?iz>{:e2d&xOjFgD_?M3ӽɹ÷ſʹƻǹơDZٺ­xvϦtrih`ĕZRMDg8e8b1i'g'pJI0E.A+ìϿ־̷ϷıྖγzxssnfeDo=x;`$^$[#["}S mHpKoIDATX՘@i6 )ԩ+P ;wwwwwwwwwwwwwwwwawIZH 33w׹$_޿OIC H"ST* J]DfsIJ0$ JJ ("T]%&w (sq|ăBBm~gg& .7@PxmZl;Yiajg izg2r%+53R$Ρ̷p^<tLR.7 gӡX31 TTKyP 7plӲ+HC2!I n3q@ͤ : ╢v]S;Ԙ4J n7}%li CgBo@b ix(m0@cNR%u0 /-FY@#@]a|É@0RX1k9L<4YtQauirg fqck{R^x1F{;gƩ$O5!O944"FןWoq|ޘ+7f : k񻷢=;  50t. AzD2k3qo{I +&0X+'<ݓLP@ Ľ/ *2` s=X9xbf# ykZv򆩌$F9wQ|ș4#AOl0گ򢜉{#-"oyLEY(L5Q5}\vD0R)_@o1ۃᨴjRTΎ0iZa$2q|4c 4*a9,װVY*^S#IXEvRkM#g`Z+%_Uj4cq/JoAh()<@'/Q" +ys,cG{CavOq*@CA2)BHƒ2_q(y˷䭕|!Ơߦ) ޔCD.Slhn4*m~k-*i,R4X"I[@ 5oO#if-LvW  ,;wڠVV,gл$a!q ro IZ 9U]aAY:Sw]-lH׳-س瞘pЅ,JpPΆz,k% YSSJG`.~ZĈt+qc|`nQ =6:]&1O1%Ta.d|@rZ]f{N_88ikC_ddN DV@ac8&j,`@ꉖV R6갊~IU7k5-OzT=R6{lr\NzvKUD eGd:zgkLoTV)W$[ ʜGٝ:BO97dbtZ)#ȍ&ϑ~?3܎<6dZΕ}d^E8Qjڒƞ**.ύB6ը9̴FǸenZ-FYav;5mq g/߾oCo3K:Lß>n_X|]#h>i3r<4>) R )|D(C>_*J%4`<PC>R-F˔ßޣ" %p_"qc۶ (>Kꋳ"K=.qg MLz0ّ0i'ZYpD 3hc0CM6)zJ@@ه (+f*0bh6('4FLYmnNQ^QM+ֈ#Ws:Q. 9Ճ[Th{b#%ɐ4>_cyv}_FfSV/2\*Pl&{A<Ma7uvݪ$:bQYZE@ T^+t/ka|dBƑ;oݔ,>̇ 1^{p f l!5O52RY( $ sp#C^.JHiEY1HF--APƬ:G8L^w{=k^ @I'bvD';a"t 2(OBoff@dD,Cf 9B]{k˔V::ϊB9}Q|Lg uP#Mi9 +!\۪iSomhN˄r`8=1'Lv_oO]'` ].2rѺX?z?#}#n(r18GնϻY~s,v@UuЅ?jeo`џ -<jlQu1xCmM.SNeu*4`۩C>9*N\#FCHu4@:C 0' 7a 8@Y5kU<ڹ+ĸ$7ߘH[)k"HLnH_nz%P]_!)E9PR٦H1]; /ra@F6jÉF:ڹs硛 ,dj uAFt^? gRRƖEOi,gXHs=Xs. I\=CL\-# '|9?*fN(vBr~|iat6zq)*ė5"9qD=GVrMV3``5bk DNvP0eo0v,tt@a%;^xnBJDŽmX6|Ll4ȍ0V)E~ST~yAN<&bׇܴ1$Nr"W`8(oRquj./IJ)7Ay_a%E _ZR1f1$að<$izhgͤ%/-g}R|Xmlhj7n/-(PP`l%BFǑ]L"M]ZSVh5.*e3sCYvKmԍFT- NK*}SVǥ='h]N4e+#=pKJ\B(ZA$ RTwŁ^|ТՓsJήUpPq/Qms:`ހmS (hjzh4 uO#lhYpD"L#c[^_IENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/apple-icon-114x114.png000066400000000000000000000104321476164671100255410ustar00rootroot00000000000000PNG  IHDRrruOPLTEW!r+̩U;'r+q(؃V o*["q*Y;p(̨Յ[(ŅZ%ɭv2t/ͮy8Įw4˫̫Ј_.U W:Ϋ`}@c4a0¾{=f7^,bAǿʳî̭wi=|RùƱqf{Y{UwSrIyPmH[<M3ζήqg`WFj(iFźõŭb\QHnEmBm(]#qKӽϻôɾʹȣwskeXVsQPuNL^>?*бͽ¶ȴȱү}urpla[z=g'R7ļݹԴɧ}wЧvljNxG@c%XǟogVk8wOIJ IDAThw|eM8Kr!14hBF6M=)(eAAlDAp {Ry?7>_Q)CJPxCj"R*YܕJB$&&% %!,lſȅbK\!CR@E[Ljve h"SL̠X,zN+V\fB've%])IS˴6l޼YV=&ϻRg?P?qذsڬLGLg;OQ1CmxٽNVB3Yo+9WHr׭ɕZPI}8nX&c\RBJ̝v])I~_2^Ny&P+{ "Oe7fIkK~^)̯&f^#!l'נO#ՙַH^38$H9v݃qk Wl񐗯I _EC/ zkp"-U@j&&)̇ˤqqgRGU_+qԏLʏ4ZbFf:&J":L2㍐Mԡ;W@.\ جX+ߦT]ݷ gxHgv*~'#r[S3}qSKY2a}/| PG cNs\Ǿ"O@X)Pkw=̲z#Ҳ=y~mصjlI3`XX-(l<e݈~=ZffK4m&-ya޶}MNEÜR9d(zyq3~ofRguuuGO2!;9D1Lh]}֯%Hh L3C3]GFb't%ꪁw/ ~EHmawV\ TOr\EIEɉl/ڳƷ>&(s9J:J_-]Do(*dF)sCc H឵bt3 {6ȢcdIJ5QCDN9>!rg \3V=9N"M]kk_׹L*lEcͣ7=4DԠ;AʙULm*ڕa<(V~^H5XAuyB:R`Q%;Y"liP<$5~EI'."uJ_U_i+m*$5`4[~Qޯ͚"<+(cn&H$ͽ!̲UW%>-$|Go84TZ)j>.^+ 0#nǽ*yP`L1BYKs"M Mn"lыXSp,5 Du_߄s(-z'VNbn?cKKp]๡FTUb'үl;HcUWOdKW!\㘛W2fرDqE/8Y`wN`ϩzm6)Srݡי\Z;='5O;0q3h/JA[wIʅb=_Yg?KZ&eչe6b&)o;1:Ȥ4*UG*53fqe 9Ѐsc^_kGk J̋2z&}\ZW I)Rdfr${eR#&%j@z46Q!To-Y&g^ B#y g2ƉLrzKƙl0G#y$TRH`0h?!"`0I!+LĈCQkD]ވO0] DÀaf;c0!fOHNvٸǏSGsK6־%'ejxVq9g~r2^r%GB=),@}vjX͙jZbv),=8`$^˗v )@Ɔ}y2:Tx7kǮ֕\(ŸKg^d|ܯ{Rx(G0k`ukE8Fv%] X>L3*vr`0/sT$'%w̨MMa g1=V n H25 j/Y5jpF;g\<ȇӑ +R]W4@ZFr.<"/#w~WOp q_C*z?O]|d8 .&3r[Vб ́Kʌ鵵Ϯ34|y_U؁^f2oPBe#n+Fl 3Mɢ uZe!^5?HP)`GsCnVPs;fy& פ]R:;̘IAly)bzt>KH͉yMsu{vs҄|:bJ\#qa stpےAT| wΘW ]{UcP#?KI=Alx1~٤Y+Q>ѧ?!^S{iLWl(49d4gF;ȑi@{{* }g{}WF=ۍmFOؖ̌c~x`k1cBѿae|63}Vш:7N\9U궪:~`PR ; HG,A+`Q[3j?/k^^*)vX,Sd#A)dL,|}[ՇҢӚd $ ~5 d[5vqeM֥Vq1I?f 5Gid b[u1hĸH y  &TT6Nyd&Ƈq:g~ƋCj^e<M((f[TsJl8D驏=ز$N#z57"QVnlPAj_x,Q0,W E*[ZpEG$N o*=p˲!gMYLjT^;E<}0kkVO^*?R_~//p>0TeyHӂϖI3K#R) P@RYh&*wk"M߾zYjF6}ę)]v xC)(<0Q{ñ/:'_i¸yss"%kS@޸]*L0s&0q|.W$TWCq# OŇQeZ̜eu6Ԭil3t&- f$"Ap);jRˁB}-(&|I.Y%M#"_?q IENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/apple-icon-120x120.png000066400000000000000000000111271476164671100255350ustar00rootroot00000000000000PNG  IHDRxxPLTEW!r+̩U;'q(r+V Y:o*["p(ׄY$ȭt/X!Ҭs-['žʪc4̨̩|>;x5U·]+β̯Ϋж`0yPɼP~Bi<ɭ}ZWvOl@v2}S\=Ŀ÷GFe9tMiEƿü̫˧ushyWzC!|0,qxz(ShK~V3BZbOG<ČynٳТzYxj=N%(`n_&uj,ʡֆ&zb9 ې= yYK8i v &\^"u*8CCU29iKh 5K *_/+;P>jZ{OY%tA>WV즲~pכ'@$_ܥ%K0y'Qʮ]u=9X>e $BU!TBREDm$h5RU#V( MH -MvF,P4ѫzd₸Z:Ѽl \6-i3Ddh_W͏W;3x͍-.ngFk=BTlʞ/(p rή{=* --hpF0s9sfqH1el,Bыk2;[0zAO|d:X}T ݻ{ ~?Zza5ɨs 2s :_x6t94TNS-UCv}'ϡk<è>yKMVA 5t W%bΙ0Lq0WYlR_ Mћ_0Mr{bŌEY6T`Hg4(:+|7]YXQ]'ñ tq_j =Er ۷ H?1AR" (hc܁DY^5b>Rz AR3%|{dp yf<^C2.:*Rmoݼg6O vm:F%#zۆ2*\& yL| ѹOv9DaJ?0DW z:I$@"ӻu0,Kg[xC!$ d?&tYS;NYZSbS?0xB [岎O5JFdB|lSF<:1De)\Z$uSyK\#TV&d|"Ǎ%錮RnNgrJ;3pٓyuO1V{Y&݅yQe9uD?t,+._[uZ !/ dn[:ypɎٝ#z:)Voצ!0X[[~|1JeD7k.hT5ӊ2XER'UZF)jxG"FmǺ˧<|GN뗿"7we3O-]BMbi2gOS7b{-)uF2W]VS{'NPWPeiq2qf$Y1{-cMdYkSi(/BYRikj S05hqtY@!d21z(bEe&詄DDeTqj_IJJD˞ y!dp졑0B'(IENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/apple-icon-144x144.png000066400000000000000000000132431476164671100255520ustar00rootroot00000000000000PNG  IHDRИPLTEr+W!̩V q)Us+<(Y;o*و["q(p'ւVͨӄX!t/TĿɋe7Y$Ⱥ[(w4;'Ϋy8W:Jb3Ŭ˪ºC_.:&ϼǫ|>T ¼qGh=]+wOqK㶳`X{=]=riaVxUyR~BlAmHfCӿɿþ̱ǯͬrjrMnEi;m(μθ˷ɭœyra|[|Ra@E.@+Ž̾}xa[Qj'b%ǾŻ|nd\[vOMHGe&^#·еǵ˹ɸ±èiieuUTOR7K1ѽôܽɦ}}r͢o>k9e1O4ǣǰְ{m‘WsAĩvĕ]m~^IDATxkQƝi+d;3dĠDAWDЍjDABFǢ.ؕ(ZZ7.\.n DWdq".%;9Κ!hh֐&p1W>C5fddd+)ٴz\7c)/#mgRMë@Ҥpx[P10%\3*R9p H'B$p :$"&H6z< it57UZv:aj3Q$;/!;h4V)@4`a555ZZxЩ)*)M:E+cӔe4ͱ'SScZYӲYXk۔7_1;qЛ-d=%k>S9<㽯 Wtn("yD}CMcj3uSqNhC8 6e!ܮwkD7/d^N:&ؽP#{*~<4lFJ?%v{ct t`*,wzhK:vubx8,.yc탳BֵdОۅPtt`DLh~/@:8A:p`р&rKwA\ @͹ uf^O] :fwhğtFqbs }tSo%C;ܻ`骽ؽ(炖tt 5ՂV.x*}\ZKa'B)ڪ#׺y\6ۏؐ -(t ˰w fe0^ Aכe*یF An(A>< H)a3|ҍV Q)jZ8CRā}0R_!3N tusZ 5Uw;k:}*ZP; CU=+IKuT :يZKρPmш660 QAH3M;28צQLLG D!XiNbp;@>>{loo'3uaH@T OO@06L Bܠ }~)Ķ;V ?5? ٵ-aO/{a332Dᒎܑyr`/ kV!zD6^D;i~DO@@)aT@-y¶MJʬ-o@ J ɿKk < Y{אK+VL<9dL w|\9& ^sW?4<9%([Ш( (HS9{oػ{=]c,cK݅AapuјLL0͛oVfD+ ClBDC QyGh&H:8Dդ!@w4`hȍ<ͷ,C9kSa܂ځWTyQj7D $s&"y ɉƒFK! h d $ȎnX-[/xAuLa *$Re2rSm=oGN>"Ws嚼E6-nP2G46<+˼(.Ep0 mtx&ec{Lnd9p /rz QPJlN:S傁Fcz7$ܥ$ o\cU&@.5߀Y)B@Ѫhh )_'8\x?Ы  WlxA DC<ХK/*J QSI@ wQ$zTήW)T#3~ܝ 4;j.5b}* P8kM5}ZZC5WLFYG-guT=81U+k*!PxFE=T|V~P8kٖ3c~-] "}$Hua/!/KE>Ee8Lv-?/&^zEjGSʧၺN$atQIU'ܢC̙q) h=jg\'Q+JEio_bťGrcv|2G.S} C߇ZSg/FWP[j7\9N(Rgng;}hu#ԩhhvfChEc=4}]hd {VgD>7l_e+ p@[< 1u ۿpql wڕ8/OA*Qُ5nr,t#!b?T Y$(U u@)ŕhճYF=dP$"{ l:2NWi,Ns=  ÇΝ?knbrѵk s? Kk^aa.Q9F,M|F lkm$[@09(Uj_v>>q@40M+R#cǸҮl֬YffuD݉mgh@ WF 72 En>1@ H,y;s<0(Vѕcfb"^R "nU"&DEQӀ.h_?#㗈NsRgKz6qŋ[q, J+[}X 1nn:tdt~-[7}ٽy~[7zMU$n *V=1Qy_ĸ_Re2BmtS-5jVM(ۺ #,C=l.(պ4n%\x"֠g=VȈiZ8pidzB]W{'C,4QQ WJ y,d2fTuN P)<&ΒW{qQy7 ^O-UYPkFӦ=ECʡc+: 01jDaS,%[,QsUˇ)oNYIF.9QɖS EVYIj! &$b5d wWπȱ_?J?DUݒykvKhZUF:P.+UtcyH)P+ݳMQ@_4`$ʃm^F|jN3Ž@MM1KpL^y𔲁t>GBycwP) < Ek e[V%Ux&wֽe~ He7[(ƬϵɫͨsHv2̱ˮl]+ͩwrLnEy:_.T ]=:&Ĭ_bAA+;'ȳΰϬd`xWIl@b3|RlGýӻ½ô»ɯ{qTvNa.̩VPExP¿ƽοƺ̢ptMgEϾô߿hfe]}Cm)ûǿnlkhe|`ZNqFy6i'f&oIJ1ɺŸȣʹӱuta}\zSPb%^#R6ɸܸ|[?k7ƵԼdzĥ}xGnIDATxwxSUMp6Ql$M݅nK-Cj%2FADd#  "{&d|ߧ?Jn/{_%t :Aa1soZvLq~o/B,q~8<[7h~))\غƜq$`ӥ 鼴T"8De\X) Hѻ7I4 "suƥqgDÏ?>ܻkI,=-Q8)ڳu;;U0.Gd,gٯg~ǽ;UY ^*xD Ɖ$O^pcCnnÍ Nܣ .NeHt+6prrNF(?CKEPp_ J/SH`p.s=|n䎐ql/ 9 mܧ5*iCWeG'"lbnĦ6"Ґ'w.m] K΂5ݐXr'"Y sQGeh02&[CJn"V%`qͯ d,)'zxpQ[VF2<L ##(h 0i?,ĐOԊtca 7*{i?cǏf.ޡcTtMr1-R5xY@a8 -0PX\n,!ت Y$%MSes OqhtNf,0qn- L.yRRmXrs=(!cK%c{ah_Q6 \nifRC Va{2`]aMt'!VĔLjـTwɻcv&Y.fy!.X3zpA4Ilm#bq7}{̒RQ0we51cD\0WVK0:LiHY$O@c,%ؐ|5_8 pQyQD\`Jl nT4H`FOڨaͪS‰+]h`!'b*,TL x.&揘Yh\Mrns2#xxrX- Y핤fvx`m[x_#r^# Gn< R>*)=#g"0o/C%<30v0ˋxGbdp[ A9rB!=&ҭ rUXiۄg/?LSM8KO,PD,RkIlGO  ma8ےn-,8&9_z-g?nƌGF%L|}:af|_YْRGenGX*#-?|Q݌Z\mVRCX! %mu**V* i 6b|ScXGɵ%.8DSy;؜7p8nn6i-ϙdwe`tb[Udk4 8+*C8k[YiKArR#Lesk{]сSmrqGؒ,PθGPbVy% H~ZCuOlS-Xy515wKNS aA,)9kVSn7>%q~lKbқ+Ӏ=?gp|[}Vhbz[¹[ ^kV mv'M_F3ނ|U,̓-Ic'Ycv[}O?O!TMuCybuB=VMFv)bv!e858žY5dEd UC5+rcq #CRb$o9^0oT#'!Q2/:ZM!2{)lK8k.X6 +jHY:'mFv݁r`-xWt8ELΖx[b>ʫt+6P!EpǬ)6`|&(nq# ̶;̝4c.^;8uI[v2yptiP8P%}j`ɇ2Ntnƿ2u`^ہz R >ō7>h)D^W?%ȒjzT4vg_>yNt2YrݼO'89Kg![j7e=G {]YE^OgYX-A >d_j·(}LkB ľ'.M }|4ǡbm*HL2H2_dIlށ"ndxĺO\04{>`R֖WQ*CdaF\#w#,di4YlB[0\zV$ &&2'Tj φѢAc!䈤ͅ@؂a^(&B3%ҀkrN`2Ȩ;jJpAޞF I"Q_JwBK%Y0_{hH:0!O_@:ѷKE}mQ:/C Z}Qwi<"k.\RIh,3ٌB$! ƇssR90P۽.p&֢VkJ WY]6G5h^)0 R tkZ|J 9>(H}Y0Z/͕#9Fxg$FA |Ê; gWps6^'m%QeN烁(ivpoK~ }B4p!X&g[(,333ٜ^7c+Oӡ.4 iǯ(,:絯^{ ۷/eg7d;BJk \.2f̑1f=4 ע9^ԼbIUÙmfZYf1􃰽Cld#eC}HTPwi*܁8}+/W*+ɢ#}Qr83j,ZY{m*K'1] kw,ÒyǼ=XvɹQ}3eh!D&dYnG6 zx`=77h$1W4@0Xv3¨kٙ꟬jU Hc=B= UDdYRskItr72:3䣦,Q"ܯ8ݣi,ƢàBTycTBؒ j]Aczp1 yY$2 i d G+?u ?mʳ4:Rhi"2'Ȗ 8sb4?x9(u ߐ 硷ޚ6G߱x !CA0.;Ӧ=8iH,ɕHN h #`s&]zOSlɆ4koLh;8~郟$ O~=q%S:y#Q4"kof[0 Uab\-K #ˀ<,{Т̡sX Fĉ}sb`/9K$/!S +$ubQb VmosgiKb),Id z%&=f#Cf_1`11>p yz$~9#0FE TYz|lɖ-UW}}}HȒύ_]j-[{&NıVIt1j9j&$(Z`)1yN]SyXsWj[tO =M2Ycǹ\rUSjUy$"qjF9߫tjmR \o$4T}TAg8KY"!v7y# W1@yeIVeHr2:xFBObr"Hp .;|x!6.է Tt,AB`"(,ޘh}.W^ZZ*_]Bt2Hޑ #Fp:Um@MDo|x,`[j gJOľN·apTUJ?:󖛽jR $J:K<7vv ͐-tE^nl:_D\.`&$ 9N\oe[0]IyvoIKu_9b-p+Bg^93nx~MsrRcp'ܪ (<vdxP3[ `b(k¦\ ]J;01?xDT-$R`4ͭB01?wx[ d|sr$Ye}9Žeg9>.S; vܣv/j$ &FMde-JaT92̮vY+n+Ah`bp-WCgWk$e{s%#NSl uUT6Pٿ"Ti\ՅE3' O(>UI4Mn[]^QPի7/Qfd ҚZo+v;'G.ՉU6Yyby VLV]ѯM*>}kַ6 U`-*4֢"),ʊјr;4hR_(o kMXvwkEvw38b6xMHaP` F3HccA]o-ţ3Xӕo)YHjMөT(qzFQeHY`FeP, HFc>L&D&c SC(ưQp!*b6.prg UpB+fb <‹$]XRu>SkvhU99IENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/apple-icon-180x180.png000066400000000000000000000174441476164671100255610ustar00rootroot00000000000000PNG  IHDR PLTEW!r+̩r,V <(W"֭s+X#Uq)Y;فTщ["ͦo*p'̩o%̧ĭs.p'ȿRW:㿿9&ͨŭu2]*q(л̨ͺy8e7c3ŵ̫h;`/ů~B}?Ȫn$μZ%T KnEtMܸuw4̳ʯΫǫ¶zηŲξ|je]{XI[(ӻȤ~_$U8N4F.߶rnebaxWTRqHt,p*ʷеlej?n)k(ǻɶ­{ytn]wOsJǹ޾nQ=ڸӱͩ|zvpLg&c%ǿrǝjÔYe1]$r~^[k9ȲwKNIDATxKhQMҢyLqd(dRFK+D .b"T[uԍ]k"!7Zb"Bb ;]7sjxSlrs5iMkZ,!ϫ[zb`@D Ůn&BV[}z% Șx qB;ڠjlWUĞ+2nw8\ n5X]`ViU{5!s .݆LqHAWp *F"\1v-#bi1`4%p슧6fTTUJ| Yŀak^FcCO916P*˥7cd,p١U :}FCy͙|~tdLVK6֮(l6+P,"5)tjAmyv8y7g :k 32 :vgEfE~8ۜ9<3WNܛ 6]B11Hkh9zT?)9B4!NfGP|O@Jw}%%`58s@/+YLa;l[k: v,_gLaRc[B26hyOqR&&IHxb ]ߛNaj`[s HsڬRiPp:m N@m@x\<j Bg@Uك4jꞮLT"6hG0 z $G\LY6tn\b8Ͼ8;}-9i u5ۗ8P@ @[Pp8uo$ZM7Z~}d/h: bD;or2[M?$*pY_Pp A HuTTTS AuMt[}cդM³9)cKz\& dVcs>'uɁVvc h&EKns;,3|t4J/01NcpB̤+t>t>I ^;LЛn"+>,$l WХ٠ol A$ihK}d~zo #PTd.^=A#CKnmP&q/E^S٠o6+ m <#t8{%򟠻GbIܙM^3!P偦$D0׵9ki@ʬ[* $/t,xТ&b07+d %xe'.z="0IrԒombbGKTti6#'ޞ3.W'kh~#-!c{g>7`iK6TX⽌mI"_r 6bf]#oj71)ЧOi `)R\RuW~|ё.>IPcuaX|RaNJrCtzV;|&dd}cT @d V4V |WvpЈ"B4^[6; 0v?DMuNp {E.HǘGH5d{YPx@EwƠwn>}2@ Ķ! [h8"agEI>;p7kADbFg&.pqy gXĬoP"p-ء?*Ш.'mX]% qx-8DiK?v'7([N} _=벆,슾(63u.3)dz{̟Fo.37.&Cη^#GR. zءI5J=|<[=jOklv\DW\.FNce^lbsBmK4HSª~"*CyazmT?rUT ehԤ4HqPyJcQtWПqテh@ z]ɣ"[vLaAO]1EARdnfo#&&nfķFЯ49rvw4߾2%rPZ  &eH&_9\q&ĒcUƁ"@tf2a&OG?La ?fL`v NS}1-f|}ͳ 0~3a4KD`V|@̑5, A4kw/0Ζ''f2AFӵnJsZEr yV}g ~߭0*͒¦}0;%1&:VT">/5_L(*N}u;4l&3=U4^^M :vU*hNG i/KN G2=7CblJNnYֳ&6{GI:Z>u9ifl0>m}|%FsȲ-y mhP?z][viNm2ulU#[Z :&95\h9::>4JƟj8@*pkpn0hKFQg=GNw_vvkXG'&үC'vuE C:gBR}-r8љ|䘑)Iv%7ׇs2Ѳ[Vc)hx~8Im}ڒ;zW&v5LB軺|* ݵYbWdC5{({V `φ4؆Dt}wi򛈼,+ rI%7*ڲ?=fA ]w@]N8Obȧgis-s?CE.uh8!(~^% 6h9\BNךbg4@ul I \onma 6N%j*]u5[Qʆ4Y|@?g7u[:\د5a;еqY]bx4N;m@uhщ \uɂM U'^|p.=>=>\ (X3l̀5;jڪ#Rez/mg˥=<ʢNx[H?;WCm|`Y`s{{pq_{ww-a]v]y~&ƌsOg/y3B1='4X h ;5jZ+a+d[/!+@KO¨va%pS1fx YE'7?[fSՐ ㅵCQXTL:n/D3ݬLg !0؟7͎EY~fΉǵN^si\YѱCz},BBe' v${^۶m?C>n 0jо*G㰝z E:]#};)Iҩ?j= Jx',[t۶um*E/;@g6,P5P=W^rnT%>{$CC+ة=RCzި& 1us8 {Bq~_sU~{?p{{{"c @YsNJFBj;ʼ ϼO5q3BKȖ}MpT^PVrV :+BMuf}R\x;뒏<Ν)ucβؓ7댸^BoTFNa\=Ò,AO#7hF,sz0c)tVdՁ!i2^q&¬mNz6 Ç$9~5 . gV;utT6w̸˵-wU=u| R=wfb'-گQ|W# T&4D#SJAYN4V.U[.fJNV$"Ve-" ;+$0#^Йg4er3pW"8 1_@!xnVZgy|fԡͷ܈b%;ӧ.K D̀,>~w5Z[+d z1BS$vnhb.Chs.g%R5)B<41{(֎èW?YF<ۓcb!uAF:iu;#6eXSy`ͩ)OzS~fo pvO.f#c"h-y%ؑFGoR'9ɓ5|nm&' /'XН!gpR>. *kI@w/UpYLYş`2/~siCsOOjzl]k3jUA g7]KؒoEe+9C#ll:eO<ӯn~oyuMґF&j lsg:;;vИ?;yKo5zsmVr! و5 b U>5戈a+ h.q|`7eN; 8>;5 g =S9?r't u! AVlzӈ\0@O~ B>=8'g(,p"P9\mflzg7$vq2)\Ǟo,sLYrLAn&5Ev2ʸ-y.?K]{WӼ>ݿՍ"'DE C?RNܭ=-be V3;w]`%k+:s( SxE(|$63 ;IąysG!I9$BQT !s"#Ԣ*"p Bh(F\0/[ DPZ8DlV+Ј!LV'faDLQsSA$JL& Lr"$"1AHvE~ (DN& 8@| X#[(Ġ 3AN~%#^/Ǧoٿmh_IENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/apple-icon-57x57.png000066400000000000000000000042171476164671100254210ustar00rootroot00000000000000PNG  IHDR99sPLTEr+W!Uq(V ̩W!c%p)\(r+s-g;ՃW"=)ݭv2u0Z%X#پͮ~Az;e6}SƾtMFx6b2^,hEbAW:;'¸Ϋ´xZ|@k@w5Ӽηýż̫ͯ˫˶ïxrpkd]YzXJ_,m)k(]#U zQoJ[<ѿĿǼƻųɾǻɩ˹ƵƲ࿙ŬŪ|Ƥzwukhɜe``^}^TvTKpEw3n*e(h'b%vN^>L2G/ľгɲƮ̼Ӭ}|xqqmkgd_][ZXRRxP|Dm@~/&"(#ˣ0ro^wb*-e>^LT~FTy%,FIbz(;g}PKҎF֢ۓ4K6$oě@*,n=;ȡnZlRI[j "&qѹNduZyg@&UrͤIA?L# 6ke!_;pn.yPrm1jS&guE:\򩍉^Diu8֫! Ѧ:kR떀bsP&Iki_n<򁥘+E唙E[ay=NE,&#S*~2HՎE(Iu]}2ҿI$46_ޅ4wB~-&ߵxX8 )_nM e-cr[-Ρ'6 G#1P(Ҡne 3nZ(-_Q{UXά~jrvZ{zk?,9 Yؾ9\hjIQTѼ^Z6-/2FE9\CIENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/apple-icon-60x60.png000066400000000000000000000045021476164671100254020ustar00rootroot00000000000000PNG  IHDR<< ")@PLTEW!r+q)VV o*["̨V r*s+q(<(X!ԭu0Z&̩Eh:̪|>x7b3_.s-]+m(~T Խ¼ͯWKrJAkAl?\=޽ǿƱni}[z;f7w4g3g'zQiEƿ̺òǽ̯Ȯǻʫͪͩĸysroe[{TvTvPoF~D|@d8p*j(uMqKnHeCY;X:?*˿н¿ĸȶƴϱʺɤعɱĨ}{zwoj`]ZyUSNIx<{8c3a%\#Z!`?C,ȾûȺ϶ĝ޼дwsrq͢nnkged]RQMxFpB=a-\#H0_M; IDATH͖epQINrRMZ)-}i{t6owa$IS+2Ͳ4EX/yX2[/XPɤe {խT,Cjz>{:e2d&xOjFgD_?M3ӽɹ÷ſʹƻǹơDZٺ­xvϦtrih`ĕZRMDg8e8b1i'g'pJI0E.A+ìϿ־̷ϷıྖγzxssnfeDo=x;`$^$[#["}S mHpKoIDATX՘@i6 )ԩ+P ;wwwwwwwwwwwwwwwwawIZH 33w׹$_޿OIC H"ST* J]DfsIJ0$ JJ ("T]%&w (sq|ăBBm~gg& .7@PxmZl;Yiajg izg2r%+53R$Ρ̷p^<tLR.7 gӡX31 TTKyP 7plӲ+HC2!I n3q@ͤ : ╢v]S;Ԙ4J n7}%li CgBo@b ix(m0@cNR%u0 /-FY@#@]a|É@0RX1k9L<4YtQauirg fqck{R^x1F{;gƩ$O5!O944"FןWoq|ޘ+7f : k񻷢=;  50t. AzD2k3qo{I +&0X+'<ݓLP@ Ľ/ *2` s=X9xbf# ykZv򆩌$F9wQ|ș4#AOl0گ򢜉{#-"oyLEY(L5Q5}\vD0R)_@o1ۃᨴjRTΎ0iZa$2q|4c 4*a9,װVY*^S#IXEvRkM#g`Z+%_Uj4cq/JoAh()<@'/Q" +ys,cG{CavOq*@CA2)BHƒ2_q(y˷䭕|!Ơߦ) ޔCD.Slhn4*m~k-*i,R4X"I[@ 5oO#if-LvW  ,;wڠVV,gл$a!q ro IZ 9U]aAY:Sw]-lH׳-س瞘pЅ,JpPΆz,k% YSSJG`.~ZĈt+qc|`nQ =6:]&1O1%Ta.d|@rZ]f{N_88ikC_ddN DV@ac8&j,`@ꉖV R6갊~IU7k5-OzT=R6{lr\NzvKUD eGd:zgkLoTV)W$[ ʜGٝ:BO97dbtZ)#ȍ&ϑ~?3܎<6dZΕ}d^E8Qjڒƞ**.ύB6ը9̴FǸenZ-FYav;5mq g/߾oCo3w5[(;'¾ϱĺIl@z;_.vOgDƸ̩ͮsme]\rKJf8y7e6b3`/]*U }SrLbA^>ſͰʸzzn_vP}Aa$zQ÷ȬȻ¶òǰֵ~vqmib_^XWyUQ~FnFjAi>`+f'lHF.B,?)Ѿ˼͹ѷ˰ɽ˧ğĪ{sifeaĕ[|XSSMqMrEAm(j(i(d&["P5ӿü͵޻ݻȩ~}̡llhedULoDk9s2QIDATX՘epAKkJ)4uwwwwwwwwwwwwwwww(~t%y>M$ZRաk?|"h-EQZP%r%Q˛vgZ ^óhXyfw6J3̙L:TZ{+D_FKóG ##s4R#N(1ikCF$͵~P%UBhjT;\mFGYFJ|:,E2_XmFJYd=n߃6`,A[Аir̪EYh5Q V}~`YkTJc}#YaR]"lc!6]Sp0mvHF0rX)N> dEd ޴iOA =f-%2`ZT2S݅ ʬKJy:1j'3Ue Wgj8.cHq2ƪ0~V#USh L|yB-*=[;fNt.Ʌ}9nܻ8Thv,ݹ _TvA%V\$?,kTVt1U .5PvΥ4`at{$;3( sŠRA,StI as{ӤXU#Tf1r #k\l_śщQ0FNkLu(X !O5%1Xj[ ,Riy:MZ=ލTbuG a;k\&̕PTufF* Nt3Ѓmj%Fmd:XEPcB9ĺCjd2ȳP :-g%zؚHv^%}I%Gva~SFԃiJ4-QθF \N=5@ιNXTTsc|vy1yGkve玨RMV*Tn/glپn(&-s ބJZ*-as'+ JJ|>yM)IJ,} SذGhmg 2X*AYHIa5JA T';/jᒦ,#vf7%СTvg"EV?h2Lg)ba!M.o¶ӧo3Od`aS;UfHпpB]u4!Tg P&,%ٌДUAJMaAЄfn/v`іXqw SuvBM~K<Xݭq< V6lX/`N`lNẊ!5wr$c]rX1H`V 2h&p:9%aUMV 2ٲŅ\/64 빿rvdžX\)b#rȑt}Pƕ-=Szտp~Q=6II ;Ls.Q gPBS֩Yt-XB;%l82 R?IiXX) ʚ{0`՜0,v*ØS2Fe1a]Aau&pjΟeV cVL @\(a)-;}o9R$)){]13+x\ÎYV y ^O(7S[Xdzdre~Pig^XTzʣD7i c22dt3g,hhgmaPXdy٠6;γp,E O~Xhx`7_~IENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/apple-icon-precomposed.png000066400000000000000000000200741476164671100271410ustar00rootroot00000000000000PNG  IHDRe5PLTEr+W!̩s,<(X"V чZ"ԁRY$ރX"Y;r+ܨp*ւṰt.ɬq)n$ͫm"̦PQo&l žͨ>)˨t,p&V9Z;øo$~Oֻ˽ôǹh;y8e7v3_-ΨS7ѽѯb16$ Ŀk}Dk@T^>:&Ǻɴ̫wӺͶ¹ɬ|?ҷϻȾaIB\)uMbAʰ¯phEűϭ;|n_yWqI䵷}`ZStQ{:}S¾whNMv,k(ҼۺohRnFɾroCp*\#xPoIķԱձ{wng[e&ȹYTK2ǡŠdzícvOb%C-ʢɧ}ɜfy=ѿ{KMNt=IDATxVKhQ5Q6D!qZјq!( ( U(QB  P¶B]*AU7nD qgu}I٭B_8%u@,cbq{/`8x]y8?1?tJ:d/GN:A aCXrʥ,;yÙC,{q(vX : 9w.`o/wtc!$`DM[-$e(P% `WڼݔJٞ!ͣc2j>7?)<9? %(@fQp$͢4璳?2 DlHh6x#A7_OAÆZz __Az|;E8K濾ܹa$i,dvqRVna!C]H,cghB џT#n+N߇ Bhl~aᄏo]'F9V0?G`+HMm$ah:R0#oBȌˉua/D'H,0;FHh2 !wP7Y$ܵuad Ja (O 5'ŠNfX.R~WE R=Oz-A &;ȏ 0#頂\#* 0`lzN|=i|4}w h9J1kW(pyƁAwُ-!ш&()0dzS '#5dD(^6! sdeW/B!:B41|L^h\D(6#>%XG;6T*UA 1KlG<fC wؼB_7S`(فoL(G; ЮhцVRXKsy+7w7Ƕ Ru 9\6uW;e}Y+pisT_|}qj@A;޽k˿i&(oJVc̈iڄmր4MI\hQ7" hLݘon][jPgx+y;3s}pnZd!@-yn= =:)tVj'ٲ$ōF!< ,`Kduv0˥kCU'i"b!nNﹴ1Ip9Qw[/}]c #.IxRb!bk-`r6 cbȴރu'Mv•d1֙ :lĮ$0e18޲SЋ\ kJ 1%,f~ĠބR uY?&8ĎbI1iUD Rm !0 lG`D1H:7Y;$gyF҆;U¾Ukw藬{raW(7%NcUȀOm b/E:Ġet8U^]I >̽1P_y؏sfe0D 7[7H%>h+5% lDv xH7HB>… 5[ATlj&41t<`q2Vx-wQV0PKTJFnb(v|C+\F!k2p8O̧Fȯ5!;/x#fB7Arnr3S#|V/QE&L pбBC_ +.W+K\MPjÿ/Ob<8 ;)zЃΌb:V-P6w ["3q H:W0;"W?m}'6m<}w>m1o`vt-\qVCy! `C֤Ǻq&%DRaHw|rk=qL.]ɲVSw`m)x{gcI@{W10d#믑h,(#9?ߥT@珯ԯuMlhImUS:S/D鸁}:mt_!EAډU Z+a1Y2x.p,YAO ztU/=a^7p%Y-S5mc:r!'HeAK@%ǫm*V'}b+.FB`*sw@S7g9*C^W<!q ʴ0^9 +h @hLrdFRG *B"5PQ4>,-hۍ"@F6[us:,dg(+ՙp9;[,绝)c/E ({%T l Ì,Ñդ%`9P|Ab9:x onl-qԌwycJ݁8爞jsPAp?%$fv\nD6qGŋ*-0ϤW /7[ Hc4i;-:fNQD; -V+l0PuXBNZAfdh,~.& Q^Gc'Ǫ;u `x,h-_V+;(!k >bt!zD"V'*_9! v;1 3e7T-Nӻd;!subA  {tZ*uhs쬃M@ECrR`EWd Uk2M'BzDEڍTemΓ6iTZvF09ev¢S[ =joBWjFvڨ" E ''$_͛!J9]$82^zS]LI,߱n/-4MӐCh SETa'Y(5>=rvjh.W+ E\E[.MFǓi:PCz-``BPfӖD˼zEPިFkd @Tm ܖ.')x[R͹^BIH}.c  Lbx!GS+RSO0nz*vU$8Lj4EEH OFr -ӧLv"5`JZY0U~s׋#DI&-{Hc*_p4Jȑ]S Yji%p%Lw7dPi(ÃuTQtץKko\(?'$2hwN0@GhZB( 51F4&1? #`hPL"*K*tH)B-UV`ZBĝ"t%y>1)3!P<Λ{s/'i/ 5Ӡ/WLG:kP4?}򵝶m跓X CfE͠8Oå6|2*rc8*iƀذzl|ódJ/ !n{NOD `Jz4p%aZt @&1@Xٍy}pxesIr#jM܆C>\x˕Hivlp8ĕBP- lPђZЋP6/?*+Ç i_c7Z@(_pvC:v퓹LʯZVN$\>Ƃ I>zmϔ!WS`Ce`试^H7Jx7BG|7Z( C,4W?Bv} 䈈o5W/|޹\;79EDru36A48q"dr׼"ELN+'߱(#V28`z]M.k:`p,-d\h?'6 ,F_ar ?Ǐ0́tMX#ex&wEa Dvm],`W墤hv?-T-.,FVɮB V_Kb3fr=]a]J/9:4v,"+ӕ^TSf-jS UAW<9: mJMz WhjjsA ʡx k_r8V?ҿQ 2l) z'qhz>Y=V*i8 1VfcS x,,*=(|m]d~p8JvoUd9{!ITiM\GU"<]qiO(jھjivwKù#Y$?M_J# J.SIp(/>ypԾP(V;{8igEq[{^TΕJh˰HYx|Vx3 V99/_˞-^{|a4W9udٻ_NR{G>H /JYm-q9Rdv-Z\\,4#::itFHN'K>|d7G9HɎT<Dm$e%2Zvn|}|&"-:JN4c܍LE׾gSb~a`C7X aI_MNHju& /Q+zӳ/^H%RAleAf5 e ʃ׫A ɽs$xy/Q}$cCSW+ -z}Uy2a?u//yNR~IENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/apple-icon.png000066400000000000000000000200741476164671100246230ustar00rootroot00000000000000PNG  IHDRe5PLTEr+W!̩s,<(X"V чZ"ԁRY$ރX"Y;r+ܨp*ւṰt.ɬq)n$ͫm"̦PQo&l žͨ>)˨t,p&V9Z;øo$~Oֻ˽ôǹh;y8e7v3_-ΨS7ѽѯb16$ Ŀk}Dk@T^>:&Ǻɴ̫wӺͶ¹ɬ|?ҷϻȾaIB\)uMbAʰ¯phEűϭ;|n_yWqI䵷}`ZStQ{:}S¾whNMv,k(ҼۺohRnFɾroCp*\#xPoIķԱձ{wng[e&ȹYTK2ǡŠdzícvOb%C-ʢɧ}ɜfy=ѿ{KMNt=IDATxVKhQ5Q6D!qZјq!( ( U(QB  P¶B]*AU7nD qgu}I٭B_8%u@,cbq{/`8x]y8?1?tJ:d/GN:A aCXrʥ,;yÙC,{q(vX : 9w.`o/wtc!$`DM[-$e(P% `WڼݔJٞ!ͣc2j>7?)<9? %(@fQp$͢4璳?2 DlHh6x#A7_OAÆZz __Az|;E8K濾ܹa$i,dvqRVna!C]H,cghB џT#n+N߇ Bhl~aᄏo]'F9V0?G`+HMm$ah:R0#oBȌˉua/D'H,0;FHh2 !wP7Y$ܵuad Ja (O 5'ŠNfX.R~WE R=Oz-A &;ȏ 0#頂\#* 0`lzN|=i|4}w h9J1kW(pyƁAwُ-!ш&()0dzS '#5dD(^6! sdeW/B!:B41|L^h\D(6#>%XG;6T*UA 1KlG<fC wؼB_7S`(فoL(G; ЮhцVRXKsy+7w7Ƕ Ru 9\6uW;e}Y+pisT_|}qj@A;޽k˿i&(oJVc̈iڄmր4MI\hQ7" hLݘon][jPgx+y;3s}pnZd!@-yn= =:)tVj'ٲ$ōF!< ,`Kduv0˥kCU'i"b!nNﹴ1Ip9Qw[/}]c #.IxRb!bk-`r6 cbȴރu'Mv•d1֙ :lĮ$0e18޲SЋ\ kJ 1%,f~ĠބR uY?&8ĎbI1iUD Rm !0 lG`D1H:7Y;$gyF҆;U¾Ukw藬{raW(7%NcUȀOm b/E:Ġet8U^]I >̽1P_y؏sfe0D 7[7H%>h+5% lDv xH7HB>… 5[ATlj&41t<`q2Vx-wQV0PKTJFnb(v|C+\F!k2p8O̧Fȯ5!;/x#fB7Arnr3S#|V/QE&L pбBC_ +.W+K\MPjÿ/Ob<8 ;)zЃΌb:V-P6w ["3q H:W0;"W?m}'6m<}w>m1o`vt-\qVCy! `C֤Ǻq&%DRaHw|rk=qL.]ɲVSw`m)x{gcI@{W10d#믑h,(#9?ߥT@珯ԯuMlhImUS:S/D鸁}:mt_!EAډU Z+a1Y2x.p,YAO ztU/=a^7p%Y-S5mc:r!'HeAK@%ǫm*V'}b+.FB`*sw@S7g9*C^W<!q ʴ0^9 +h @hLrdFRG *B"5PQ4>,-hۍ"@F6[us:,dg(+ՙp9;[,绝)c/E ({%T l Ì,Ñդ%`9P|Ab9:x onl-qԌwycJ݁8爞jsPAp?%$fv\nD6qGŋ*-0ϤW /7[ Hc4i;-:fNQD; -V+l0PuXBNZAfdh,~.& Q^Gc'Ǫ;u `x,h-_V+;(!k >bt!zD"V'*_9! v;1 3e7T-Nӻd;!subA  {tZ*uhs쬃M@ECrR`EWd Uk2M'BzDEڍTemΓ6iTZvF09ev¢S[ =joBWjFvڨ" E ''$_͛!J9]$82^zS]LI,߱n/-4MӐCh SETa'Y(5>=rvjh.W+ E\E[.MFǓi:PCz-``BPfӖD˼zEPިFkd @Tm ܖ.')x[R͹^BIH}.c  Lbx!GS+RSO0nz*vU$8Lj4EEH OFr -ӧLv"5`JZY0U~s׋#DI&-{Hc*_p4Jȑ]S Yji%p%Lw7dPi(ÃuTQtץKko\(?'$2hwN0@GhZB( 51F4&1? #`hPL"*K*tH)B-UV`ZBĝ"t%y>1)3!P<Λ{s/'i/ 5Ӡ/WLG:kP4?}򵝶m跓X CfE͠8Oå6|2*rc8*iƀذzl|ódJ/ !n{NOD `Jz4p%aZt @&1@Xٍy}pxesIr#jM܆C>\x˕Hivlp8ĕBP- lPђZЋP6/?*+Ç i_c7Z@(_pvC:v퓹LʯZVN$\>Ƃ I>zmϔ!WS`Ce`试^H7Jx7BG|7Z( C,4W?Bv} 䈈o5W/|޹\;79EDru36A48q"dr׼"ELN+'߱(#V28`z]M.k:`p,-d\h?'6 ,F_ar ?Ǐ0́tMX#ex&wEa Dvm],`W墤hv?-T-.,FVɮB V_Kb3fr=]a]J/9:4v,"+ӕ^TSf-jS UAW<9: mJMz WhjjsA ʡx k_r8V?ҿQ 2l) z'qhz>Y=V*i8 1VfcS x,,*=(|m]d~p8JvoUd9{!ITiM\GU"<]qiO(jھjivwKù#Y$?M_J# J.SIp(/>ypԾP(V;{8igEq[{^TΕJh˰HYx|Vx3 V99/_˞-^{|a4W9udٻ_NR{G>H /JYm-q9Rdv-Z\\,4#::itFHN'K>|d7G9HɎT<Dm$e%2Zvn|}|&"-:JN4c܍LE׾gSb~a`C7X aI_MNHju& /Q+zӳ/^H%RAleAf5 e ʃ׫A ɽs$xy/Q}$cCSW+ -z}Uy2a?u//yNR~IENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/browserconfig.xml000066400000000000000000000004311476164671100254540ustar00rootroot00000000000000 #ffffffpython-aio-pika-9.5.5/docs/source/_static/favicon/favicon-16x16.png000066400000000000000000000010211476164671100247730ustar00rootroot00000000000000PNG  IHDR(-SPLTEr*V p(Uv3n(j=̻ɭ~m`}?e5Z!vNµƧávzSG]*[)o&c&T S˿ƺǵɰŬȻһŮzzkhfcb_~]NGtG|;s2_,`+l)^#X!iFY;g<IDAT50DbMK Us_Bqgvg|>qj 1ƽJ L,f<0Vs*f7UU1"qp1FX6q3%nXS^Sbf@)CKw clu~##1K4-Qn DKMf'/`^ȋIENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/favicon-32x32.png000066400000000000000000000021541476164671100247770ustar00rootroot00000000000000PNG  IHDR DPLTEW!r+Uq)V o)p'Z"s,ӭv2Y#ƹ])ͬտUz:_.Dz{_uNEoD}?h;x6e5d5s.r,tMS_?Խɼĺʲ²αż˧õѼìyufe`ZwSKIj>p*](e&a%yQTnIlGcAÿлƻ͸õç˸ȶƭڷаƩĨƧƢwwtqnb|WsNM}FqFsBz=[&`$V9S79&7%GfGIDAT8ˍUs@(v1ci6IVҺcٓߜݪ j;] f ĕՅ:GN@i'Z+ Bj+* N5eK.9>lYٮTY:! qWqxT  sN||(Y ڴ,.e(bt--^bKR၃K:Lß>n_X|]#h>i3r<4>) R )|D(C>_*J%4`<PC>R-F˔ßޣ" %p_"qc۶ (>Kꋳ"K=.qg MLz0ّ0i'ZYpD 3hc0CM6)zJ@@ه (+f*0bh6('4FLYmnNQ^QM+ֈ#Ws:Q. 9Ճ[Th{b#%ɐ4>_cyv}_FfSV/2\*Pl&{A<Ma7uvݪ$:bQYZE@ T^+t/ka|dBƑ;oݔ,>̇ 1^{p f l!5O52RY( $ sp#C^.JHiEY1HF--APƬ:G8L^w{=k^ @I'bvD';a"t 2(OBoff@dD,Cf 9B]{k˔V::ϊB9}Q|Lg uP#Mi9 +!\۪iSomhN˄r`8=1'Lv_oO]'` ].2rѺX?z?#}#n(r18GնϻY~s,v@UuЅ?jeo`џ -<jlQu1xCmM.SNeu*4`۩C>9*N\#FCHu4@:C 0' 7a 8@Y5kU<ڹ+ĸ$7ߘH[)k"HLnH_nz%P]_!)E9PR٦H1]; /ra@F6jÉF:ڹs硛 ,dj uAFt^? gRRƖEOi,gXHs=Xs. I\=CL\-# '|9?*fN(vBr~|iat6zq)*ė5"9qD=GVrMV3``5bk DNvP0eo0v,tt@a%;^xnBJDŽmX6|Ll4ȍ0V)E~ST~yAN<&bׇܴ1$Nr"W`8(oRquj./IJ)7Ay_a%E _ZR1f1$að<$izhgͤ%/-g}R|Xmlhj7n/-(PP`l%BFǑ]L"M]ZSVh5.*e3sCYvKmԍFT- NK*}SVǥ='h]N4e+#=pKJ\B(ZA$ RTwŁ^|ТՓsJήUpPq/Qms:`ހmS (hjzh4 uO#lhYpD"L#c[^_IENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/favicon.ico000066400000000000000000000021761476164671100242120ustar00rootroot00000000000000 h(    f`{n!kj mSL{O}ctf.t/u&_Mr$Z$YJzh0u+r,s-w=[4N"Z"X!W&[+q-t)l$`+s"ZMt T#Y VB!l,n%a+q"YMu#VO~:k:z(q,v*p!Y!XV/`%l5w*p!X+]R~LkC}9fg?k+p4u)["V&mo+p"WiT{S%n%o2t+]RSGplcgSPx Hx Fw`tpwpython-aio-pika-9.5.5/docs/source/_static/favicon/manifest.json000066400000000000000000000013201476164671100245600ustar00rootroot00000000000000{ "name": "App", "icons": [ { "src": "\/android-icon-36x36.png", "sizes": "36x36", "type": "image\/png", "density": "0.75" }, { "src": "\/android-icon-48x48.png", "sizes": "48x48", "type": "image\/png", "density": "1.0" }, { "src": "\/android-icon-72x72.png", "sizes": "72x72", "type": "image\/png", "density": "1.5" }, { "src": "\/android-icon-96x96.png", "sizes": "96x96", "type": "image\/png", "density": "2.0" }, { "src": "\/android-icon-144x144.png", "sizes": "144x144", "type": "image\/png", "density": "3.0" }, { "src": "\/android-icon-192x192.png", "sizes": "192x192", "type": "image\/png", "density": "4.0" } ] }python-aio-pika-9.5.5/docs/source/_static/favicon/ms-icon-144x144.png000066400000000000000000000132431476164671100250700ustar00rootroot00000000000000PNG  IHDRИPLTEr+W!̩V q)Us+<(Y;o*و["q(p'ւVͨӄX!t/TĿɋe7Y$Ⱥ[(w4;'Ϋy8W:Jb3Ŭ˪ºC_.:&ϼǫ|>T ¼qGh=]+wOqK㶳`X{=]=riaVxUyR~BlAmHfCӿɿþ̱ǯͬrjrMnEi;m(μθ˷ɭœyra|[|Ra@E.@+Ž̾}xa[Qj'b%ǾŻ|nd\[vOMHGe&^#·еǵ˹ɸ±èiieuUTOR7K1ѽôܽɦ}}r͢o>k9e1O4ǣǰְ{m‘WsAĩvĕ]m~^IDATxkQƝi+d;3dĠDAWDЍjDABFǢ.ؕ(ZZ7.\.n DWdq".%;9Κ!hh֐&p1W>C5fddd+)ٴz\7c)/#mgRMë@Ҥpx[P10%\3*R9p H'B$p :$"&H6z< it57UZv:aj3Q$;/!;h4V)@4`a555ZZxЩ)*)M:E+cӔe4ͱ'SScZYӲYXk۔7_1;qЛ-d=%k>S9<㽯 Wtn("yD}CMcj3uSqNhC8 6e!ܮwkD7/d^N:&ؽP#{*~<4lFJ?%v{ct t`*,wzhK:vubx8,.yc탳BֵdОۅPtt`DLh~/@:8A:p`р&rKwA\ @͹ uf^O] :fwhğtFqbs }tSo%C;ܻ`骽ؽ(炖tt 5ՂV.x*}\ZKa'B)ڪ#׺y\6ۏؐ -(t ˰w fe0^ Aכe*یF An(A>< H)a3|ҍV Q)jZ8CRā}0R_!3N tusZ 5Uw;k:}*ZP; CU=+IKuT :يZKρPmш660 QAH3M;28צQLLG D!XiNbp;@>>{loo'3uaH@T OO@06L Bܠ }~)Ķ;V ?5? ٵ-aO/{a332Dᒎܑyr`/ kV!zD6^D;i~DO@@)aT@-y¶MJʬ-o@ J ɿKk < Y{אK+VL<9dL w|\9& ^sW?4<9%([Ш( (HS9{oػ{=]c,cK݅AapuјLL0͛oVfD+ ClBDC QyGh&H:8Dդ!@w4`hȍ<ͷ,C9kSa܂ځWTyQj7D $s&"y ɉƒFK! h d $ȎnX-[/xAuLa *$Re2rSm=oGN>"Ws嚼E6-nP2G46<+˼(.Ep0 mtx&ec{Lnd9p /rz QPJlN:S傁Fcz7$ܥ$ o\cU&@.5߀Y)B@Ѫhh )_'8\x?Ы  WlxA DC<ХK/*J QSI@ wQ$zTήW)T#3~ܝ 4;j.5b}* P8kM5}ZZC5WLFYG-guT=81U+k*!PxFE=T|V~P8kٖ3c~-] "}$Hua/!/KE>Ee8Lv-?/&^zEjGSʧၺN$atQIU'ܢC̙q) h=jg\'Q+JEio_bťGrcv|2G.S} C߇ZSg/FWP[j7\9N(Rgng;}hu#ԩhhvfChEc=4}]hd {VgD>7l_e+ p@[< 1u ۿpql wڕ8/OA*Qُ5nr,t#!b?T Y$(U u@)ŕhճYF=dP$"{ l:2NWi,Ns=  ÇΝ?knbrѵk s? Kk^aa.Q9F,M|F lkm$[@09(Uj_v>>q@40M+R#cǸҮl֬YffuD݉mgh@ WF 72 En>1@ H,y;s<0(Vѕcfb"^R "nU"&DEQӀ.h_?#㗈NsRgKz6qŋ[q, J+[}X 1nn:tdt~-[7}ٽy~[7zMU$n *V=1Qy_ĸ_Re2BmtS-5jVM(ۺ #,C=l.(պ4n%\x"֠g=VȈiZ8pidzB]W{'C,4QQ WJ y,d2fTuN P)<&ΒW{qQy7 ^O-UYPkFӦ=ECʡc+: 01jDaS,%[,QsUˇ)oNYIF.9QɖS EVYIj! &$b5d wWπȱ_?J?DUݒykvKhZUF:P.+UtcyH)P+ݳMQ@_4`$ʃm^F|jN3Ž@MM1KpL^y𔲁t>GBycwP) < Ek e[V%Ux&wֽe~ H/k z-ylї֍q9Ƃ5ŷyF me=)vXwWSv!?Db+_)ô̮-Vlt"2Qъ{CNQr=ЭXdhArm+E2\\\t(#,C<FJtB91lE//fJRja411zr*sK2GPV=/?Xg" .ϨxŪlkc3Afa&vuSB\ۼ KN csefw<8YlTcHb]Uဘas 7`̞;R XȎ{i-;'X)2sb>x!x#%f"όp\jt򴎾(P0]X߬OQ# ƽ6u+hk-]өm $B-A\nnDqAML\11Ԉ`\Nݻo iw2߾}~˛ScEy$ѡz\,[Z8W݌>WkBB[Kpj Wfmƙ!z@"h4"9%kHSJ"*t~M`.4=M35%z2>i.76orGߥF%۷V"!YHizoCB$A!Ȼ(Tg"?},G< dgZU/C<$Q/ZBfh~!'yVZh(bψbidr GxU>kdki+pRRjIjz^ Exke: _PK%Ҕ #/ ]*uNփς&B1C (B@J㡗9p:C e5EpՕ UZģ]WctًڌԬcC]k/hPC %ś3mϾT R4=6#m҃G#PVdꤲP'OeGщ%]-kfiUR;ۢ<kdZ^! I6]؆$故钅*3"嗅T],eT\x"Y5"% d'XqEBF4wnhY:üBGQ/*h9%-Uw yxqBi_%+W%& M.ʼnV޷5Eտr3ȥ(:AFEI^Wldz[u(:s_Bxa8|1;=yjtP 9F"k!m S @,Hl9_E`K1UG fz@%Krվ aEղWD$Hzb|b/,T(_B ' ±cy xC p,5O DZP(@BlEcN/"(,* *^8URo4ΑV>˦X,:܅cu/kiT*Yr+GKt4W9XJd3R}Mad4791&X`ݣd2P`LJXxֆy٢Egg} }\k*,0شS2k{˧b(]P`9تs%:dVG`w1r*,b#_ ,##ȥRp`]#̞Sau-(=87RYQG8U KF:2 NcS!夊:#9ʳd=%Q$K85z,,t:+丸(Xi5jfѶ cvmrXC1gwi&y_65}O{"cI2XˠOQпl׶|;:$i*gV\V{6M6g-zlbd5e4wn Alıgu.jLlێּ&"֐?B#9]g8tʚrDEpYzԀU'XԷ-Әp!.ȱF_jPs/bEELƋ!IѤ/c=7z %Vq%kΐ6š/絨Zgg}SFBKJHN%+AÒ,NY3yK uA"HŰjE{,ӆ IV%I[ c cId^fsC|ۧ?ɺM.ӦkYYb+G5L8ۃZ1.2+@L&sΤ:*VêVIpxQEFD.v/1aBJĉStXpHεnDSSS'N2AϏ덿jaVb$ _ox1bޖ ٓ+kz b{-^e30 g5O= bG8nx.ouCp,\$g!dڵ|b-55Y$,y֓xjC"VZ:/u}$ ˌ&pp`5w֋?L$q-"RƧX`e}#TR&ovF跮.JwX<Qy~#F;mj%:}~P1kTIJVcxWCaa\5Z>1 n,ekM%uVb}G[Z\|8\oaTA&OV1ĥqc1rSW;=x)F.ZT 8V`'KU>|[d~17ҘS>` uklPL\ 4V>vu" /6ٮ}RK5 ymvWU.B57w*ځTΦokbP3ﴎvv*i6'iqɓk JHaP f~tdDžzy}ǛilVaf>d|O(b]X'`g<^XMD|$K٣RBoUw aIr|,>`)O[2a-Zi;3[S_-6l}@VDh;IhE ^`c(S5}Y{pҚ.FÒ6L*5IIcP C`6QXS2+dH(s 8|f AB"uVv;Ԩ[:gLJH+u2ǼVcvV1yJJ^,80T mg8aOj1wV>L9E+f6 1h 6hP%i PA`&qB '#QI .k-9`^ؤH6/E &+,Y4: pr1~8LEH,tP,$sϊZ9kZ5lQ HDh*ñ52.Jsb&PѰp V +CL T 0ge&AlajH<H8p8bQ8 = IENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/ms-icon-310x310.png000066400000000000000000000422531476164671100250610ustar00rootroot00000000000000PNG  IHDR66bNPLTEr+W!̩V s,<(X!q)Y;s+UX#ެp'ϫo%̪Ѣl(āSˍ^$Ͻ̧ͨǁTȫn#ĭt/W:v3Q˨['̹=)îx79&ʱRg:Ƿ_-ކ]*߉b2ޓtOt,yPά\=ՔxUҾ混ĻⴊXe6}Az-WP58N!2 fֈ@CO$q/g}3BGաZ2<ϓ/pb,,pX0+ouGX~'f`ҋI]4I@4=eDS6n04a$%ώK'F:9'"pU.V*䖬JIfFF*B.uoH0#3S/ Ӷq6ͅ,:'u#B`$% 6VlL-f]o$Ѿ=_{Q4MZ?3CiM$A0$ .rI.zHC:tJ,#tphAP 1C dkN^H5Z{R)|vpL@tswѩ9bLبX#iOp )wZ|Ri+b1N'8NО0Z|.8hks֌gU>6q3oc}XzN?~~r t*d}*ސ.jple7֛"p\om;4jp67ްXxhgj~ҩmsd8l(K8<-[N8uW oFYCG}_3LӴ,OcF"G78"p7=k[F=.1]n墶 *GRU-ځ^ω !2:#m1&D ab Dat<8_ގɮK-[pvStL(kРHVZi2q+_JCO% ٙi;&d S "ۨhtMtT)]xmT]I 5 dIUYD%NJo,(zFE͌ K *azRQ)QߏAtJ4yy{G(NprL 242̈́Be2t47 YhWsaEQZUۭ%˺oW}j.ƧŸ4Lݩ?{sT˚ SitBhlPO:q;mr;CNtZǂF?Eb-p007sl4P Bp;%ˑW`n^ϒDa^h#fɸAR1TKպ_A4x(c-d&E7t47CKd @pjuk!ng>@G7W[)dsHټa Lb R3b.CSm-5KH!T;B%k]!XzmxDnS KUUr;f^⥫_yBacL篓Fhl]r<0tٕp$TKFdS,ԐRp>Ìldyؒ!zHɒ8qAGhå'[*PIۨeQ+/Y=61ůL+F*< ~ 7BϜR}0Dl|_͌:65cnЦsf0Jo|ĭmo[pI#!$%8[Fd?n=Ա: "D近$J):Ԏw Y?9& k(ԖڦKJh MI tR( -,FAE QDŅh DxЋ'=9_~]d޼̛<_͛ԣ w4b1_uߓuW]FE>1-k54k`@ i([P}zO d8*$QLC \]^G2xuw?zUĆ$=l5*RaYuUYBt.WLz?&b؂R)Xr)S#91!i~YY}`㭵E/$P ]UGRؒǞ<3?%H{SQaSΓ FLwENn(=)1[]> YnDAl`qGOt(6Һ[T  bCuSj}`D |hWl bWŰ<]nYLK։E 3QvCPlG k\Q,nQ7eLy&7W׃C4]wF~VŢ=|N'˒WobjG8<0mt .`_6т9*TmLd n:o{^3H|׍q0XQD"(` B@ຘ*iWyw !6&/6]4hRHOZ` ~Puuy]ViXF%bW$pE35Ķ:< &w>O$a+<}[LH-P@Dl$@k߹Z^~:JSbÎE6WEf̙hh9Y\`-;(ؖReO$Վj;+^R?&WrÌ'SW[LP6ĨK)TeD C.MiݣnqJ~ጓСHLoAltk [3"Dg? ʆ;u֪xF,NqϓOhsk‘J/5±]jt'}hPij,Cjȏ ,fԺ̼&%RĩJissrynvv-(~kظlP,AQ ކ7)|xVxay'cOw'h*^]Jܒ ] .bæcvfyBfn6plsUk]L xJŢw^ I"akofJ2]_9A؜0Q2 ߨ Z;(_4j2BʓGֺ-LʎW BʨťJԶƦ<6 ` ʼntO `cW "M )4DG&uJ7=Ȋ+مuqc[]+hɌQ1G&: bˢb[`Ղ^Xhm{c?l+v7]shza.yonn-nu#j*52MgXP`) >c؞ lÅNE6ΰU Ffy$c s)M[G,+jPؕm+ {+_k\]v]E`;i,~@_P굾mZb1<qfqϥJU]4}k.h}ڧ<&4'c؄e6NI`sF4ojstνW`c~06F#1"0sȆ69 \YA*'*|k f*A ܀mxd #.C'6D/׬ {c;°m^_J. ]wCi^ڭvʽ^H~@om#ŐGϴftKxY̯OL6F1T&0l@TNbc}&VjN1`]tYnzc(y oU +wQ-?(a8q)Ugr'6 4dՎVaShY+2)B_ ʼnV1=?4ٮ^A$Wך=8y_[T%a, k]`?UFd B Jq>(=)QC' fD+)6Yx1J+?n|V W^nRRl9a::&dki'ְeu4<6\ˤTJpLm*JEtD햱C)gT4%[Yj3|=Is THyM|flYNv(d! )'j6O`_:Dl7$Tq+<0 Gkuk7Wr(?N`]9ULxNךfۦfM6 g:ĆHe_'qJ`:jaeBGf;rpQBK7Uu3v`ŏhމ+(XGLd<LdZL~N8x E?Ɠjµ5==leނHC`k{eqFFϥmnU=Rh@w,h`FGj9ʰ,xpfv]Z3J7rGR<*Q+XYch49 qD,M9Vl&'OF3"Ls|R+*:&,8r&ljb 4)1ܶS lVX=붟XXEӆ8t7K,NK[au28 R,?Rc}C@6Z_寄zMz[(S,Bfs6xJ(le}yY8F U{$da7weu)mJ&dfᩥ)*BGU׺":H!Pfopzo,AmFL)N3\jI}}6([X/N!(dBvE"Rg^j!,z( oAAwjO`K9. 43%n5I'|L/xh6գ nuu0Fi'AWJFu`@13wMV>f9r]PKJLPLu/ l^x)TXC%m3.aCcu_04wT DI [-r灁]xd+n-dT?VNKFqW^JsƚzYExtv56הWJLZcхv|5-Y3Ta[#6I K{j <Za R^oY4ͨeex= `24Vi+U{q_lƫs2ܿ޲Zz[lmgslf+ cw iKɦ5[D]#,ڎ,0[o9B[~B/{y[t9_,b[چw>i7zq$}F豲rU,M8SL >kM e ij6_QZ.x[ëˬgSE.ܿ6dşf>ZKJ1o{ilr5B.o#XAut- 񆜘J6`| 瞞o.o͑Xıq0JFbG')+,{fGT]ژ&?Gvln>>f+~&pmsMGj=)V?Hvb?};߮ ۻ9xGTOiU1=yCRk o ?ؒ6`=^!j5M崣hJ:U-#zdn33Bkֺi;q(NKя|3ylˡ9(-r [x駧J5 H9kicBwnuEC\@džmzdjO-Q&6nX%&VfLSo( qDklqVVAEM\8ZpXnqoq8b{Q{"c#kۧ=)G̅{߽zF;WR`px[?e+3 C2˰Uk԰~t'fH>r;+fӮ f8xj{֫-HڒJQ zg6J lb֠Ymґi(Mn\nCo[G* [|{CxmM?5zpR3#=1㟸ʩѩ8"\2~Ŧm2x1P.`Jpna2N-5)In bP1EEu )DLb*Ĺq/~?gۤB}xN OlΝvm#^6Kj+i8@ZBk o5'.|[ٜUr4N82rS\~و$tx4'ِf5Q:Jrn>ٯ@B֤1M U5Br9| [YckLS̍kS!)m0647W_.W[XPƈ`qn Ôk)D3c8:"Z+kDkuCmcovq+'"W92 n\k4.F-D@6gȤ  6if #& s/=k j~}i4>J#oY5xm}/( tDx> ny>+8㖨Q ^O\~=oBc UlE2{~x8p+??LDf@F)3ST΍aS6`jpϑGɿ4#+eGs1ƂMEBTe H?OJYmӣ])&Q΂@zEd&?`.)X8 9>6rr gљXGsEҮtN:A+ZV27%Q8iOS yͷa]g-R_tk$V8)d>x~xU!7)N#923+š_Q YX9u](MpyhI~I/(YuQ)Q::=S35#z$ >o$((7=9h|\\3^͵X4>ɯ4a-/))Ij%pPAe5kZvpմdlexP*>Qvf)_=Lk7ʍ3Fu3p:(?Y`.hoF?ۀtnS36j.- 8UB[~r&.Цۅ}&i|AX5qx*vP֞3@ys~9dGGbw}akI'Xkhd r G4R 6y8g$| 6X͋'&W۫EES!ۅva55jh,^?adܵ & rJHc+6fvkKefh{ϒ?ZTL Qys_>cl ׀AygZԪ1nfxٺbÆ>_57k&ڒR25'o6HIE~{j ̬R`JƊ ғs,/V4bM'*5jr 6 9+=lE<Ԥ.|C-6ϬQ.>=]j[Ⱋ7JʿOʍ4c5R|LLb崛7*'ĨÆ,x,^ө[Lg!5sUkKۊl|+Lܮ,}!9sjVMKN笖h4N9ljֻ םk@^+S_ xj#ɸql/QlkT-mGlqXvz-F=mS?YprDJKC1K96kR)&b*Ws-ހYjcb;8BGNښ{c׀y獄Ikdb;R% iE:洨qWsl/x1}۠8{c7eش+1;ZUy!{R1=KxހZ OYUv2Ho%׊ k>g /+_Fcr2%cӔalF(~ږSޢn} ly]S_|Qlb nA%+l:g8X o>V-١iгEr:JZ֡UlTwos1<_nf7aVe7~/;d>h UkurR 'hbbA*H56By[SKpD r?85NXJ6;v5d I-*T0'x7Gp7<[/11Cr\f&Eqa`bm-=kPMxбh+UVSR_Bi¸'ͅ^TKK&Zj5Ɩ*½I)*pZtny"MpE* b:"6%14cQc8B%| 1aւ*T" GRRrEsӼvۦM@_;Kؐף+a%B^m۽rԑc7\ajX&nfF06S&멉CAѫJVV\SGה&nwNT[On\\1hm MP>.kDЋ;Oq+N6s譻f& -O!Q6hq3ÍW&dZ1|-c~7J7iM?DI-~`zW=15. $ =U `&܄˕ifX32PhPjImΘ =blhUN(^#\ MO-π|;uٱcI}ҫym71Z3B(3)Gǎ9t:QZ7`Բ [{mXbbuM7r#l0.8u}%V.S.1kN:EvСY]y3$6~uʬYN ,p 2\,_ V_r-**)BZ.77zׇ1|#徼z}Y5Hf7Rl#sX^IG M{]t5i~aY>9_t)4Z߂ ]Mn8ry^H<|S]:@YHgC2U`cq#mAv5 C ?~oIRcN1>F(ƙŻe۸*lB*-ɗӬΚyq>S; ^ۥrvڄkoWh)/ۭ% *O}^OL#p#,ODM[ގ+X VOKT I{^FG\qJw(\}x{^/RHͧ:bS&,OZ6BlTBxר-<2&+IKvz5Ekn:UݞAMɅWxԈvVBlWGx~q]2:Gfzg[:*QU[jK]Df2RwAe ^R_O}XLh8T 4 HxƖ,PA ̂[3/T  yBxަ@4@FRRPi.C2JqxiLQj6.%8$|*YZ!\Ub_z-I(-D$4(ﳖVH34 '#T_~ ]zE6)GʓKݑ\_0|^Vאei_lVĭa7dW]R󣖮 Mlٺ~u:24 P4Q㪰mGĞRS_3άq#A褙b67@|\]<'_S,!J*_.g{UӦmѼy''$Ə4 8 )ßQRjQF!Р^OH:ѦUӦ;8cN*dy  0'u,kG*UrQ]I .cUʗIHοWe*_p4ޤS*^,K{PFnRuvzT޼˗.gwFUB [FQ2O>'ܯ%ź?x5qxleSsR9-Cmy h2NҳUqUh|1u \it#cxuU^p#iZNW#\f> F gI@P,*8z3w7%۸M~\19q+Ki:+r5ԏF۰( "bIЌek߄D %iT^M 2 Ӆ)Z|8aC\fD͍J`9| Hgt#XVp9(=}eS-0q62g+~Jժ.Y>1oе9ЈںO2-0z#szDQ>!˕2^<kXI%Um7YHܬj.0 ]UmY梈ʴ \r^EZ},׏ T\ד .mdGt/bp:xR*>ۚb^l*#݉;6P+Zp"hߑtNEWrc,dTzoF6@u4{>", I!*w-ݢ?Ү؂zd'P 0Kf<.yMBs o`B>Ź!3ccfff HwhŞf%fH1(vai:n41KR-w-kOm~f 7梶xe]w˖o`)y2sNcLMdBރ5FNwc`ՉʖCR?{_7k~5iQV]3(oR|SofT۱eLx`], ĹB gуTtev;&(D05u!(hpljH2vaR.P/ȋR(鼰ț~`tu <ζwi3v̫gԐ8tPt@%o+>3;j%x؞|dS)bHn~X}((&\9Jo>#/X >t腍9mծvunV,hJ:mWڠhKMDAP1T5=]3[6SnFvzM[79 pk&fٴԨ|!U-v,=jwe4zЍᣆf|Ow|EMl&h>Ӊ̰rsch)Z„\l6[&S-X|ǻa$̓ti[ ꅗfcr)ܶ0~lm jb4x$S)i8zuxىWlx960f{Y%4@>l^Svr@ `8_j8ᵑ "@D 7*xM-%55lq͖c*/><>jGtz`3YM=[~XO}Cw9SHج\`obK?cU 5sO AGuk8冫H@ϰl&r[gj?PلS-m15[)qgyGMs-пfl=HM @x?k$CxgFzŷ5:Dg]\VOblUTd5:o|\ ?/pԠ=4eǗ}n;6*X܈ЛQARF^3ιpi Gѵm&* s7=4ۗ*Xن 'fz'Drӎxh/lMkkNXT6W׿TKP8RyQ u>N:Zv0ywLVRV@W">jǩW@n޲wgt̡v2'r!*kىչieظ(݆O5k? T %\I1:fGXױHdJ\O/lmq iJD'ݳa8i#8:uO vɒPd4_rl·@g2P:M PޮfRރجFkF&>e蚢^P)hO34(4#gr\5JI^BWt᷾pnn8۬p|0\XZ.׋يZd0íe"Cm pqT\qe%>$-VVfffY?pM^9x]:" ɭy2>~*=NA5_+ 8HVUfZH^@{k D^^&jG`:2gLJӎX7n:q'9v_P Q * W]-<&[ ZA5l4@3 tcrl9Zy4X#xD  X:pX2a3,al ̷T[ɭ@GxTZBrH`0abXl { ]ڠI^ AUwtlKeRm'Na@TXj}7|eՒBU G~^>$%K%#KgV˔4u "_ Lh2NZ&̎kz>S$+[Y:#YcUNI:9R#3iat&!q1$)2J•~3: JS-0C=&|-_E7{]h9v(v` ]NGXZZZ`؍Kf{؄J34w"v&uCPYIENDB`python-aio-pika-9.5.5/docs/source/_static/favicon/ms-icon-70x70.png000066400000000000000000000052371476164671100247300ustar00rootroot00000000000000PNG  IHDRFFFPLTEr+W!q(V̩Uo*Z";'s-s+Y$X:ۄW!̨ͪf7w5u1ȪoF~A`/^+\(ˮ¹̬˪S|>y8['U ~T νdbZuMm(nIɺw{XTLrIFj>g:d4v3g&zQ[<M3ûϳDZɾͬȶıڹunmb`]xSyQKIq,kGbAF.ĹҸη»ȻͺǯҰtrjjc~a^z\{XV}S}ElB{:^#qKgD?)νĽͮõòÜī}Ť|yxrooʞif^]p?>i6b.^)j(a%tMsLV9jV]IDATX͘e@ X)rڞuw%4i?u>˱73aBްD^3i Ba Ea1֣HSbjܸO" [ZcÆaQ4!{uݷU̘k9=V(v崅3gN3rM`}S rhw"8\˕j!FU2ncH K{GQDCv;(!N GPBJ%sAƷ  sRγ,\ -!(, N]+p :wbb8K7N1nM&9ߔ[ FS;,̓:`Hc9<ڧ?a"1=w 2cҊbVFx ߧjIM%A @\1B&1)= 1'){_ɉ5#b;ΔȠD JMYкbQTӹ48AixQEL0\WGQ)NnY)|*7w(ң*QL=!ZĨ-[V yÙѨ H2MY cʃ5O&Y'vÃ5ԇ넩FYO= F78F!w viTLR@M̓Ov=NY r~mjLY( >lhbDsHa7dWJȉ1)aX'Dxrѝ18q1 u.i θɠ`lb2]j7Յ"h4u%1viYSr 46WW~P9d/|~\1z"LRlʕO[՗yh蔺ENl҆kM bEթLʻїQ!)qWPPH,Pe{58LytQO`C K\Z1 }-6# Ӹ )2ʹ )tGJOδ"p3kZ$i4Z)K}^{N+03a4Zlh׮kU#WH&zjDO#vuUJ8oş1UlnPWkgtOI" y_םyްWցSO=ͣTX)کqjj*ڨro02W Ε:/"鴬L߀!E@D8OԀty?~s\} nd uZMYݞb)yid]p*m)~W'e&a%[#cA[=O5K2H0ΰŦ¢}xoBkBe;~Y+o)k(Y'`?F// àӷ{t`ZvZYsLzKvI`6f5_4]-zU(zS#S7ٽ^|Th:f9c8`3\I1d&uP$^#J9"A/Ǥܺӵвnn̢mĕ[}iPmN~iNEd@lW=c9fR9|Z1UA*cI)Q6z`f-tRNS2 ų) &N.஬j=`NE90IDATX[@m}$ R +2`8+0wwqw.{g%YKQxY=pV]`-)Yƥsr{1Z#A O,PQ;zH{% ]’bn"du @UHƇ!/]\;  ,cX@LHb&f̀!m}|XGЂ*# H{T k@ʀZ}X5р*i@@6cla[\ 0"P+kH_< *Aڃi.q>q($ `}wT 9(I GLd@lч#R@e| {IwbA00Ϣj; BLELНp D 42ی=tF{ǬX_5|aB7oFUB0?7M[ K[uH[ #JhD5~&B<*3}&H{K tVTdvjΏt2]Y~l္$IY[#wd!'MHmJ""&W=6ILyjours,y*55{[Jﶢp.5j#ՃKglC/}֊Csne}1!cgTkꭧbΉ*.7톝/ꚬHW}CG5*|F]TEpoX2U_KalNr`XϞkkj+N=Vfաׯ?, lʮiG^=jh.?v2{i#mGkn2])A#aEqrnU$0n$R$Ŧ39} iY;ͣ]4k sؔr SFǵf/[bؾw-i4i87AZ[BQ|IENDB`python-aio-pika-9.5.5/docs/source/_static/logo.png000066400000000000000000000233771476164671100221200ustar00rootroot00000000000000PNG  IHDR,,N~GPLTEJ0, - eCbA& _?1! . bAdB- + $ !Q6pJ8%I0X:\=I0W9( [<P5D-<'M3B+5#H1;'5$6$5$5%<(W!r+̩9&V!4# 8%6$ 2! 7$U 3" 0 {R/ T . }S~T , yPY;jFvNtMa@oJ]>sLnIk)xOdB_?wOU8qKlH) * kGfDhE\=rKW:cBbA̭Z<azUھN4ʧpqgEϰL2S7׹Ӵ\u`H?*' =)ϱ¤Q6A+R6ǤD-àĩp*gDȦo)[#J1۾Ӷƨm)Y!F/ quU.r+f&ŢfW]#ټRgBd5b%lL%H0{mG{[5\+i'd%ƧvPj;_$̯bpJuGpZAiU;|W*qP(W%ɫ`|UwY4h'ۼvjVmTxJr^Go@c=}`&pfpS%$~l]ނWڤJ"k4):UJ hB-g-:Y8EMS:%[7Sg)@[%prvf~Dyr]̪#LɩLzI-KJۀ9Wvwv˹ٔfѢ- eٷ^O~y4+$c.v^:ƌX ;n>Հ&i]eŋ+$WTenpM,HZ/ Zu\z< gVڢ/@^+l:BuOVNR4`h:ϟ''A*/-Z]JBVX0-hVzBj|,ZDXWI(N-b8T)uV`hjmhJT}a ZbsrtBsôj",pz:f O޵uX>dKjo:nQ-V>-Ԃ-Liݹ|HuޯҺ҂ʦuÉ J`MPZ]Z-Ou2zV"JaKi~w5 )@l?p7Pe:Ho,G6a'MQ!i+_Ldh`]c7,Z-'$4kGk' ֌BӮ_ a} '!,UMa OasXu)DspzNiZ`FD1 XڰLT5 VN4-i kތ,8ߩf,XmȰ2`pX##e:B%XzBLý"dXzT`P5/CUO&* ކH5#:<8 TX,5,D5!`XZ Y&:C)q",~F 娰D橬 ySަZAx':k=Y#K7vK;!TX3;VcP'50gԝ[V$`y~V!J` P˾GKQ{E]NfM{5X‡ΤThY7-$Yvhz S+x%X7OI }{0b*[rnnnttn~ ݜ|(btl߅AV˩kircߓy1fpaD`9߇,ϱ1˲F_]1kpO4}W}%aс} b /Z@,LJVK4c˲6`<`M뜕BG]F a=Y\ )?'#Y"`sC8,23ХPL8L hv& kYv\ß5`Qz0BJ>D$T2l]*YH # K74mX\† KA#Oa++ځTzo5CË,xLCf X&Ⱦ i^Bz D`av #Cxǰ^?V].(.X'ᅉ* }%X6z0x )9,xf`n2s8Mυp]x**taͤLTUF`| Jw-|RNdJfdB0, jM{4 X݀ȂN챪X8,4ao lX?~yd9F* L.$ҨVZ>IZ 4Ǵҍ7ݳirVfv3/]k 6=u?/, kV+.&;{qiQ^mFPw 'QaA<QDTATQ73 ~3ΕBG V/Gf;6n^Ȟ`!X3NyzfV`e)vEaμbf?,=;~^oE%G1qP(-*u֊ډ,8\F8V.Ϲ"6.=?J*MZlg]gRܦ:B;ZSG%:sN XkơylۍVGخ 9UZ3o;IU`FYf 02Zs0$&f;~rjv\(NTZ,]fjrڝN-ON0<ةTg-HdR,qò锚:t@ieyn kYN%lVPV)afeefU|H )mV&V[{FXȢd`]3X v3RB-4*Ƅ[-6zk<Xpa !`Ak<㹔c3"U`5b  Y-pPx(vbOGJZ2k*/r,P C`~_c Y8Zx\Ayh47X|/  w<"Īi  hZx@( dR}9ia[ۏ  9n[ H!;ia1ϖXP=?BiAK%.Aaq.N*FqbS, %pG %X%6=ƨxie X(,a7t\N%2,tY(פ *1/tBahBnT,`!c^hюR+扶+K*,H<V|,TGYdKrXĪ*[:͟\ߤHVNJa-鑴6q*\*>RVWyHCCKi飪9Ԫ|n 뵌UUȨ ։ ![?׳ŹfAX0J]<Ba)_)KI\% y<ӃVz<+!wȰJi6EpBBa5=2X.޳ɬԯ\=%6kdz$/D`U 0t݉ђ;95Η>v]o`o:hHeVAXu/TAaJMV`Z6{?UV-KkC]k`}6;Ҽ𢮽Vhm6c¼\m7jUd5XS77מ _2:VV`;D"S +h.MY]˙`C R ^0BW:@Eyw={.UXԕZyAK X#Rtq;4CE(0:Uu{[$M*Ƨ u7=%MRQS!1X?gt膱'\R֑([Z: +,&[\3Fr;UͰo^*'zPba Z\$^Ҷ/XEXyX;ZMJH+!,: +rY k_FSIdlfXWUp"K,RZh 1n.XU5v8ϙD Xe`?~K?h3cwߟUcKpI$Zrq98? VN84-X gh|X\5jDJQ%DB`bNkF*U$QM ˊֳ?_7F+]ꉬVwp|Jĉv5X'b CpmW'ITIԮs-Ehe{F*՗5-OU;|d[C}ˬPNHQ!ߘ>SiؑV[J#JJ_iVn 1Qt{⃂kw] kCYuZ!cV1#XXݖ52T] }yzX#s \khX `E;]ʹcSL8Օ*sW#k:>ֲZh lsi6* Lʵb5N}V9k^zX**uuFJX=.炱̴f/ ,fCL .{tc-_V9jnZ8DQG inKy$ TCF ) "yE1_yp!W42,x#|b[pAnEk J *R}>EECgRيb/Zp,TQ-+Uqjq俰X^w0˜V`y^aH,޴ȩ!tY%$+Q̉Aj*ETp҃?xi7D(GJREl pNe:1);ӊۭh-P^TB.QaAdb`lFiJ}76*e.0Y*CH RR)E\K!Y`PTTJa!`ddI"xÂH/#1$[$!dQjP$bǺr^Ph%53-ʖLSs҃]KWFMyC>5߅vA;e6_0q9c;]>AbP t&ƅߞD9C2BDq^t؈ڒ_\tfpwO@ux.֍KbUif3o&aĹ y*ci816XLgs]CVjDun-KXV16ƒgase֟4C}{g3%2j_{(73%9Sbo.>ĚkQPX<vwe@Q XCCbJjP mVVDl"bA"zrf^/iLA\dg_}vfwҞؓ,j1HSo f7yʁ7"<3>Ցdlr%z )xn =&葮sמn0Iw F҈aWGsX' F{  ~smYd+w*XK{ .JmDkҳJe(%3?<,3b^ݜW':| _- ԫpr Zg:K:\jE;_+')=F־;[+ Bt4,0n_Y}uݹ{j:po{s.NhH;q;|+Hr=ZYݩd^{hLf5=#*&Td`B2N:~HI:ǫgήEX2VòC7(˒b!*$ zKKuò,-&@¢R]n t8<Q/PGa)V ƢK7Edr-0aQK.SUQ*gnQz XLyc,i.L{ȊGD!,~B!W7⺦ja+RkMoP05Z%`aيXJx붙2:\pbsu1Om LĽ!l;PICS66F+aP, 1X xbUildg aVHB}:)5PS΃0ΒTq;5#~$,KPd޲`%'s}jBwX"&(X14mbZmJW KT4a7 IZ*(zЂI J#! (AA'gmv,^wיWF51ϣYPHHl([O_&nOIZtn&H'I\B!xVl8VÝ |:'3%N-zizՏTV=,-Y]Rz62SOο{l]DLʳ(.DܐM?'ޫ1ttv$W=&r=[LVU \'n}KRleG('߮(Ne"nƆzjrF^ (RQJ4s­#VRqjfŲktc+x~8WY9Eepv@=$TBE`a"+ZaYtlriyPKEB(|m&)P C7ɠ.)H`"D,,L:5;(NFwb:} jYC&  KCr0! AHN b GR:M_&&Y'S ho\S~djRL)ʥ%W.B&z:,w)A EpQPHς29h "/|ΩhSH*"lLa~ʅa X6}^H~h.(dd DŽe_(Jk?ʒ:BID#,o|ic)~(1< ȕ2ͥ -}U\ ЏϳQ&Y:T, a 5N)9R-(M߮ {޼S5ޙP',%azPQ䫟B TLuа uVsO_PCĪ>:Jpb WM2'-vm 1>n"WSS:`-`rüз \T'mZ+V5ښ Ƃ_(5ʰ\ժhXuܲ`QaÉn ,f}r.l`1ۚL=LXrZm4߻ᄰ۰wca59musEll]e9L];u[;q۵ڌT抻"ѵnl;kK7 @D "y b+5bri_ 9^%NT;Wp]DV D|ݥ^zY/Sژ ̼Hr WRe2ekJʹ`R" lìIENDB`python-aio-pika-9.5.5/docs/source/_static/logo2x.png000066400000000000000000000530731476164671100223660ustar00rootroot00000000000000PNG  IHDRXXhPLTEI1E.?)E.  7$ C-5$ + $,   @+:&T8=(0 ^>tN/ 6$ K2S99&jF<(W!r+̩:':&4# 8%V 0 6$ 7$U 3" . ~T 1 }S, {R2! Y;rKzQuNwOgEyPjGmIoJ_?* ) ]>a@tMqJlHbAeCdCcBiFd%[<' \=Z< ȤfɯɦW:շQ5U9>)A+p*C-e6ҳF.lI0{]8˧pŢi}YN4K2ܾ׺Z"S7o)ģ{g&\#Ҷn)׹yl(qi'_$˫M3׺nlٻϱɩg7k=a:a$̭ŧC3s_uP|U%gAcF!`~RmGe&ǩoP*& ϴqyU`3nL"iI!oigauGgTdQG^~lçޠuڜ;SRIDATxA @@2m APZ -3h/0*z /h"ZlZڐu[E0?UJPE]UTP`K*B2Tsv#?ù]vM]ǿrX+n5]~/DUTP1VjKPҦL4Tj,mҰIJH =_Lpi ó{|Cc#˗,ڒY- ݽ)] \1j%ZVϣp1NOx6gb 2w|&P`ř^u\  UΣrMu(,jLWbC@~<$8&-thN. xΖwM"|ę^]pl\g`6[uN=AJWɵ2s-1$Q7$,{k .u=>ShH{fj&C9FZ-<[:M 鷾 ^='Pt)Ц4r]&Yѱ; [e#9*:{ns$6g;F-"Ġms4lGgT_-*??53ɢZڤJ׿_~T*Y^W0g nVn֕,X;[ţh(K5K.[|fio|Y))w[9d0+w!wf"rVt X6*>XY/8m<jQϹt4%Yё !| _+{v +fr"^Y pE3gW(Peg9Q0 y*:tӭB䘱 UY' U ۪~oZayU+)nX5>/KWg8AڞꎝbH,g ՌٖRwPXS+nQun2[WZYqMn,fo Uʂza9{Ev^hԚ`(g() +*:dWk5իW[3Jo,&1+~ *XqX6]}_*2=g3J4O0Aƕg=#<<W`V/ *Y++tì,b?!B+* ,bBR xfexoe1څp_{c**٬^pҭB9c4fiOnoY* N ?Pif81ТK^P'S'w!# 6( W(#^fªL+Y곉Hsyp_ιI ʂYa*,:/FȂ+LxLwp0a1k&xb!Vpx+*!w!eVYa7D^Y̎,.ԾʆF)xҋ0>,ݽ1 [SYsYq";(Fq'"r9hiq3x݅D ""{_'Sk{EUL]qNL7zذzb|1pas2$FR}S3Ui"4zYuhwKfR X9/_IjJW1'RH>I5`Z6[snn+WCGBf! 뒲;V]?̿kP6=Yb6WX(嘾鬇,[0b{uMXջD٬eNZ{Aؼ-(߁l>Nb$l]6ӐbU:W~,sMDD) ^nbMλ(Peӟ], aE|D;[D./ϔbH,we;BgYKn IXJ{P>Eb һX DbOA,\ _ F(VgOrNN7O`D~tb]oΣ<H=b]S:+b{!tD|O)Z:<>Npok=dQ,O ~zXiWtu d WVzJa xJX( $OȚe0Nd"EjQ6W3r WXiW,9ʾ67Ka=*tlY[6Ś<S n 0%XP,.Bf)xĪXwkoZGȌ,|^|HR϶TB'RU!Je18fŜʈ qLQ/!G99o@bɳ'V>+T ۸LXˀRP#f@,|qJX5z*Yw8dۚJX1:·4 4nE(em^l|G ;:U)Z nI8f3O;݋E-hԓS6TB*#YXx4q=!VzAPop5QVZ,;axR,.b*b)j!WsELO[xIaZC")z6'Lp[,AiL[3ԦJX=X^ ƻG 1GlΊ;"HYw2˴K/Mm.ZYBXq0ky$7VU*mUH[YX#᎙RBN✲V%U¢X8] qZ'fH)֞q0kQpXJYT g17KVǸN]})W0戚] blqx^Jn*Zmlq^,-p^z0廻SK@/qZ,Eʺ m I3hϳcߜ8P0 U3{!|}Zx*wrsWn6hs kc9-,ih-2k6+W`9KgwFXwωElXe\Pdi ;H,z| snM8QhX]ޱXkxsSKA-}X?3lTƑo6a:bug6LGq A10XuN)q RԣՇJ%:?5rfzQH>SsXfc4kI&i0XZSޭG/nd ~\WC,{vwk5>JqE+81Rum!n (VM)lLf'J8abەˌচTI"_@C*|ZqcK::{/ /oXz7n 9U@,X^+Wk[ nL G$usmƪ"4i @Coܭ%{t 6%;0VJl3`gXd;XX8ޝ> Dq֦iLj(.DDQpC'AO#ÇX*oPթ%)D6,ȢCXG ^' CGY גaY b=(eaHVIZ v;EuJZ6xOhZʔ4J{e}:^D,e)xUGTET`JV~U`ԁ֯`)+edS*E ,3٫OXMU.X5!,'e)k/Uͱ[rs,uXՀJ,f,U`<V{n2c3Xei{>`dT5/ ,m`ոò؋]jcX꽔%K/XuA:9P! kU:YbeQEʕwv +Xaؾ]XhCɊ&eCX΂`S4RURYڏK9MjWO^(,;,XfHҰjˆ]ul{f,2LY%aX0\ઞror5C=>%uaxWEaB. KeEEXwJRrIj#^_W)%aL2d cX\ (j"Bh!S(,%qRaTEVU#]5GYXd#'K|!đ+KUX4/b9W?NՅE6ۜ,2`e +_ԹK_°Ƞ_+KUXH\9VlФ8,dEA1Jje +t$Z+NtlTEG bG],]a€VPbZ ` b6PpWX)AaTVÒ}G':xˏ1Yz‚F\5‚ `+թH _ 7CaΤ`-ێlur "VdQu1+ȏHUSXyW`W5f`՗&k`1W`q:bSMs 3d0v +._A bCF_@fVHr>n,`q29tl"҂4v_JɚXPPZYl5#:I1%KwXAF VD˶ Z(o߳pВ#,aiBX9ՏXa%K_X a2Q-Xf8aL`UdU`U,<`ƿ< `eonVXf{ΰ`>Ѣ!X9*P*XB(Xx>™%X\^*X! ޷pf cpU`}3Sț`힤`{'(Yڷp^% E^.a"W cA e֔WowYVv/I]eٺKd8Vs ΖdpDX+"1:ŖY#YhrifJ\fa>;ǰt#lɊe)-݇|z`Ů <+(X‚Ғt_  :wc}zemSL' .I #-cJKXlrPiYҕk+R"-{rt]龍Q !UWYyɱJAJ׌ZNJ qyH,$r3aC6~HN-.t! kWbV m9YB ey-ͮ8 PXADZ+(WJ'+ hɜnrYϰ6⬖9# X|6VF:#Y}:՛ZDeΣ E Ft?dARyY;5ֺ鯱șJ!Mdy5*/ٻޤb0_}62P 1 15Y$&z 11JZ?i4F9-cD[H˭8W*dyFd܇ɸhmYPcw`ᒅϐ󶐖SxW U1V}WVu"bR>[$+$BTLU*Eû+-'[;~X>*bJ `D:~h1[ˣՓ,էk #2֫k ܹpտŜB nN;lbOXD,UB֭o`DzkWFZ;0YnGWWV'tTr)`8|pU뗏캯'[%­6̪D֍2Uו` `UzX!wΤs܃v˜"0^ Kx)X,CWɤgGuvBз7DI/Ϥ5#XAs[+D{ Ts7`_+`źLoaQnjmvG +DKTka܄` iOuv5 ˸?ꬵ4ݣ:{ C#2]e,˸3d.Sm?Ea7dtթE d5ȔU~L ˑRdM떾1$9UX['+4n/$F/p`wpL:MeXޥl}6c^ ӵJubE)o tLaWWT_4YX{e}zU]mNִ=o.v:֭+4-Y,fK]4U6ob-Ѻ jqڀBS2`0X,F}yҏAW7±>VCsO'8ZhdmTe(PmubP {HdkLˣjkDFW+NTϔq2]苣jYE+T)5/^d!-_|T8BzC^UKH/ 2yf"٪p(Q|._nKY۸%bWtjm1-*Y v:nՉ£"hqf}Xղf+ }d/+Km⥕#U_tREa{YN?yb.̏k (̬Vάut5`{bX?$Vd`'̕W=+'yuW1kEW8cZX{%2]Y@1`-㛪k w^xZl7=UHKޭGR{%$BY1.e 8vxOA -,5א%PXl\E*&BYlOlm+B XZLV̓uo ¼7( |=8W:@a1+QF":VѲb cKh.XZz&e} nBWY@჉7K]_BY˜u&d|+ a Ik9֌]kXVڀVs1Ȉ>Ղq֩d+;aq|abrXEʌX vхLϕ!D|FjXe\Y ˓%Y{5+̰_ +ka |l k9+İNfЕop`8D ֩=LʈaᘅR@ZhaL{ a'a:*?#+}$rHW6"KoP׫YojoXa ~Wa'aEZ BXŔ5NV|u4;pCW& XvXd/V`-WvXce7W`weMve=QzrhlWÒɊrEk1x b A3]9K& VeOtW.eգu| zցzMt,^8h[X` Xx r, ek%6h+G` ?\ X Yrm4W%>/]V%+]9K5h-amrw2Z*s5oM@/MErl<-}2tAFĜAQ`cQ֞˛,mXiK\P*f+%带'Kڿ{_`l4Wr "o ]qq. XY LFf{jdÚBFMX+I!t*Vvb"XslKےÚFaݚigA!"Xmbacº.Vց#S kI.KvZڌ` ^S"+f %aօ]6TB>}ϟ_6aZ*^">,vtJ*U5aM-T2`1FLc7XJwkl-(yd[f۱nfJnN?bo% ðz*멆Ku:㚭اNǟQJvvN6aˀ#eza;+Dj6[V+dNr{\#oB3օK%| J+RqTm֭bbƊ?Zp-|:!,\]+tGޙVEaDDDpBQUQ_$/>RB&Ij&BԶr؆+ mA[Ko;:E,8<胠^{IzҦbz^۬^g+JeՓk,Ue)MM[C\,€)CVǐ #X.9BWccÁ-0 jrc/)ZHe}?+=z 0-zKvLBlNP;=#o36L10(a,_:3Aph${8=K +6,H-3csCλ<O8HG[^4V_alwy`;`QtvwFU=.Qr&Bby^s5sCjF|N_gd/`!ӭeI=-P9lp.D_rh1KEmT 7dzĒ6:`1K]'5`ĕӮ_1:9;|`pGwL4?|* Ͼ6>ՈX!#WWpz5MD'K˜J}v⸰aDv}s|?1 X&F ؤλp^Gz\닖9u{{`1WS?ہSm uƷ=wy`` )MnyT'NڹџprVxb2LB{guӦdXQ9 ,m50 L,K'Όpn/&?7tZYl=)mE/(+RFݐXܺ[\@Y7zܞ=99ɾ{׻Cu1KOcN}X,-,rjƤC[^Co[֖:Wh֞輘ـW}%FFםKŚ 2wi99Jr/é$} ±B0b~dp1zZ*Y&N0X>^6`)>Gl6KX1O1f3VB$Lk Z3XZM] ,.6P).c?Y>J* ,{ߵt!.HV_R^ NVL)M w\ctOrJ>9-yY6+ ;26,@ VC\̂k# XC$uTs%Iͅ.`£Jd,I"2Vt뙩~WJI:bq9ŎN Xb Γg姿\+O)v+hSbLEߴXjK:|Csu,mJ]IoLx%)BM'Nְ# Tgٽ)iൌsc`}E`L&e/|$˕T9 oSQ7icn9+t|%3LUX:H[JKxk; 6:"pP܌Kq^& rCjK{@q+5U ,1e){݆qjXoug1`;yK V'9ߊ(UF4lX;@Y;ɾ\![U+,?>K^ o37Dd*nn|~VvAKQƽX_9C2WRƕn3XHEy{M¤X M ExrJL~Kf`d~UMk^h whWm`:+EPVO`|gkL~ [vJ,$HkEZ@5XB^BE=-3ЗqiB)O,^<^m+BƊ̎VGXvA2`p)Ke&1KZ{Zl.G\V9 ݩ:3)K$2F-$nyZ IG~I MjQy2-(f`m!#:&>XX>3nK ]޷D]X܄Ft$f z}7}N B6#`Y*@s`(IK;'X{ u/)DXm7W "-ZwXUѢQtx[,N/RZ`dn 1IQMFk0:Y}2_ X@ cgv;&LgtD]В߄~CZ,E'1P5`hs,U@6,sbM] ,Уʸn鼏v+z:NM!omZ#-XX-X"cy Z!a&D kAtXx8'=iUdxdM, Qs;߃9pHk +XXׯkFDORQ֞ <ۼrJx;۲R{g$W|Kڽg(2V BWn?dDt/# .'ckJ X 6&`uy4+s/GL>]I 3kj [fS:xÖ0ٟ% #\a_擯lkDVО#() ^K`p`]um5G\}܄^/`9B!y/>ɀ0RW\!h."P|+a1C>d6&y@(@ J{9G%)Xxt霎~Wk᤭MfLۜb.di%t9~D `8Osr3QU mY/C06qfWsn֛o'mܱ>Ch-7b$i-lA3y W9{X47,~uQȇ9`%@s5ۭ`R=⛆7uuq7^yUX]z! n!&`w%o \*nKA@j/0rYTWԹLɹgxSbpaZDBϯC"{\.:`M`Я郍/o|ʐm`Ei{4*$Tַ X\{{ʖ|hyKL>I럵l/N,Dt= X]}mUxӥGYw>::dkQXtPYA} d\U֋oUp_:e_Ɗx9-Y^87}' viaj[GVMwN.mغQ) d-wu/6EQ烩|$/ H>x֘q כ)8Q)QRJC>QyXkuuuζ {﬽{y❪* S'u'BlUɺ*H$!ʪAYUYM{u8%,ϫ6Vwld ,+]LU\n˭:mB:BtKXg~mͭ, H|(5poċnmL20WM\Ѣp- YbYd@'͂c%O Y {I,+芏gEuu{?aa0y7qX ,}I.=:N[hht?~AcL L!VE!eEС zIL$m$ԌzhĊWMV *q|xP. `/5}BYw ܢ/ 5VvYᄥA_ Ut̼֟/5?'t/ywbby ܂'8DYf"P0/,-,k'S=P%+S`Z>;_k˫qsPօO C>*uZec›(㳗ws͇tD[tJPfT>?ݻ~ڍ}U(y*3tʼnE/k7o|l߃EJ G]9Kgr,dSV]}]s][XSl]!0H5a}(LrΕSbnV?eFQ0C RnR4K=˅ulqh%1p'Xl8K'y0DX岊N} udԳP%Q5"L {.6d])ND ̺))l˓|1LD#׺OeBW(YYc$MƒPˆ#>ƪsI"a=uR8F(ZeR d,U*Sph0O_E<Z s>pvJB=릏F ]jo3y;l_)+C.CMeq38GH_.Ó1s;vLPvX [s7Rb==.]th#aZDUHXl)ғ oe hq8{,5+u>ǠJcۛҹn&ԣ AEszxEbBPrN}^#Qpaeѫ]Vttʁ}Cuq!{,hޑa[5!ԱQ vԙpVs$_ei L2ϹƝhYe.,rx3s?׺ kߎD iX ډB4\VzʜRAWEbdY .=(LEtل"MQ<. Pː:$;#Y=j=r܎K%WAIYu>j:LЙ©.ރ//C\C Y i) b0,.L{+GVGn3 @Fzq@jMaEgGe"Ю6a p:D=n>C{?fU4t;"|exsm . 2x֑U@P|,@J6a aĂeM~#4 !,DH}͙ b"uX\r([a8[X"8.kd[J!`rn>uK.5fڔ˺QKFX]PCkN'K°FY&n(  kwt%PՎ4Ҧ;kƵLvHX|}i\'Uam]FiӒ0Â˚5vpA>>dg¥Kh"=Âˢ_ i24LAXU-1dC?AN@M[@ 5c㈄MBӨP#y UHIEi%vށ3]s7c2Y'vf2Q8RNseh4%]eZnNV 7I9?"~Y^ZX}Z<[̰EP'dfӛ୴:\,VeL*ɐ'^ U+}ezC*ocW/#X&ee6J=?Ѫq5^eWr*y;xZ64Pi*cUb\zE#t%dӝ>T+[~6Ya+] Y`hVX;M9ȯ8 VWTKoU nJiF1u*a焩*lWqp%dnai;nXdп n;:PPf9+mWcJ!CeEzu~&ie2U*Su׫,>Ї4kd+Ha41H1gl`ZPBV֖NcKC'Z 4qHqa LJl+j`xh1Z9-2E,3aPbRB]EX4FczIm(?`lPĄdLUH xQ B_^s?{Ua8eVUZS[W}Q3GUMc(m{\+!"QEUɞY*ʴ9w.zTUCmq]p7*7QZ?h+uuW*8TFUU^V59Pqj cjk\ݑB]Ff-J{-J-FhTO?eD?mIENDB`python-aio-pika-9.5.5/docs/source/_static/tutorial/000077500000000000000000000000001476164671100223015ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/_static/tutorial/bindings.svg000066400000000000000000000140071476164671100246210ustar00rootroot00000000000000
P
P
X
X
binding
binding
binding
binding
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/consumer.svg000066400000000000000000000030501476164671100246530ustar00rootroot00000000000000
C
C
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/direct-exchange-multiple.svg000066400000000000000000000257341476164671100277200ustar00rootroot00000000000000
P
P
X
X
black
black
type=direct
type=direct
C1
C1
C2
C2
Q1
Q1
Q2
Q2
black
black
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/direct-exchange.svg000066400000000000000000000301111476164671100260500ustar00rootroot00000000000000
P
P
X
X
orange
orange
type=direct
type=direct
C1
C1
C2
C2
Q1
Q1
Q2
Q2
black
black
green
green
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/exchanges.svg000066400000000000000000000135711476164671100247760ustar00rootroot00000000000000
P
P
X
X
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/prefetch-count.svg000066400000000000000000000166211476164671100257560ustar00rootroot00000000000000
P
P
C1
C1
C2
C2
queue_name=hello
queue_name=hello
prefetch=1
prefetch=1
prefetch=1
prefetch=1
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/producer.svg000066400000000000000000000030461476164671100246500ustar00rootroot00000000000000
P
P
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/python-five.svg000066400000000000000000000302031476164671100252700ustar00rootroot00000000000000
P
P
X
X
*.orange.*
*.orange.*
lazy.#
lazy.#
type=topic
type=topic
C1
C1
C2
C2
Q1
Q1
Q2
Q2
*.*.rabbit
*.*.rabbit
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/python-four.svg000066400000000000000000000323671476164671100253270ustar00rootroot00000000000000
P
P
X
X
error
error
type=direct
type=direct
C1
C1
C2
C2
amqp.gen-S9b...
amqp.gen-S9b...
amqp.gen-Ag1...
amqp.gen-Ag1...
info
info
error
error
warning
warning
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/python-one-overall.svg000066400000000000000000000106501476164671100265660ustar00rootroot00000000000000
P
P
C
C
hello
hello
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/python-six.svg000066400000000000000000000256301476164671100251520ustar00rootroot00000000000000
Request
reply_to=amqp.gen-Xa2...
correlation_id=abc
Request...
Reply
correlation_id=abc
Reply...
C
C
S
S
Server
Server
Client
Client
rpc_queue
rpc_queue
amqp.gen-Xa2...
amqp.gen-Xa2...
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/python-three-overall.svg000066400000000000000000000210161476164671100271120ustar00rootroot00000000000000
P
P
amq.gen-Rq6...
amq.gen-Rq6...
amq.gen-As8...
amq.gen-As8...
X
X
C1
C1
C2
C2
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/python-two.svg000066400000000000000000000115101476164671100251500ustar00rootroot00000000000000
P
P
C1
C1
C2
C2
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/queue.svg000066400000000000000000000040731476164671100241520ustar00rootroot00000000000000
queue_name
queue_name
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/receiving.svg000066400000000000000000000063651476164671100250070ustar00rootroot00000000000000
C
C
hello
hello
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_static/tutorial/sending.svg000066400000000000000000000063601476164671100244560ustar00rootroot00000000000000
P
P
hello
hello
Text is not SVG - cannot display
python-aio-pika-9.5.5/docs/source/_templates/000077500000000000000000000000001476164671100211455ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/_templates/base.html000066400000000000000000000030661476164671100227520ustar00rootroot00000000000000{% extends "!base.html" %} {% block extrahead %} {{ super() }} {% endblock %} python-aio-pika-9.5.5/docs/source/apidoc.rst000066400000000000000000000004331476164671100210010ustar00rootroot00000000000000API Reference ============= .. automodule:: aio_pika :members: .. autoclass:: aio_pika.patterns.base :members: .. autoclass:: aio_pika.patterns.Master :members: .. autoclass:: aio_pika.patterns.Worker :members: .. autoclass:: aio_pika.patterns.RPC :members: python-aio-pika-9.5.5/docs/source/conf.py000066400000000000000000000164111476164671100203120ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # aio-pika documentation build configuration file, created by # sphinx-quickstart on Fri Mar 31 17:03:20 2017. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import datetime import os import sys # noinspection PyUnresolvedReferences from importlib.metadata import Distribution __version__ = Distribution.from_name("aio-pika").version sys.path.insert(0, os.path.abspath(os.path.dirname("__file__"))) autoclass_content = "both" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.coverage", "sphinx.ext.viewcode", "sphinxcontrib.googleanalytics", ] googleanalytics_id = "G-VNYV7TYPS6" googleanalytics_enabled = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = ".rst" # The master toctree document. master_doc = "index" # General information about the project. project = "aio-pika" copyright = "{}, Dmitry Orlov".format(datetime.datetime.now().year) author = "Dmitry Orlov" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = ".".join(map(str, __version__.split(".")[:-1])) # The full version, including alpha/beta/rc tags. release = ".".join(map(str, __version__.split(".")[-1])) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # type: ignore # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = "furo" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} html_theme_options = { "sidebar_hide_name": True, "source_repository": "https://github.com/mosquito/aio-pika/", "source_branch": "master", "source_directory": "docs/source", "footer_icons": [ { "name": "GitHub", "url": "https://github.com/mosquito/aio-pika", "html": ( """""" """""" ), "class": "", }, ], } html_title = "Wrapper for the aiormq for asyncio and humans" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = "aio-pikadoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { # type: ignore # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'a4paper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '12pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ( master_doc, "aio-pika.tex", "aio-pika Documentation", "Dmitry Orlov", "manual", ), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, "aio-pika", "aio-pika Documentation", [author], 1)] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, "aio-pika", "aio-pika Documentation", author, "aio-pika", "One line description of project.", "Miscellaneous", ), ] # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = project epub_author = author epub_publisher = author epub_copyright = copyright # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] html_logo = "_static/logo2x.png" html_favicon = "_static/icon.png" # html_sidebars = {"**": ["about.html", "navigation.html", "searchbox.html"]} python-aio-pika-9.5.5/docs/source/examples/000077500000000000000000000000001476164671100206265ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/examples/benchmark.py000066400000000000000000000045241476164671100231370ustar00rootroot00000000000000import asyncio import os import time from contextlib import contextmanager from typing import Generator, Any import aio_pika from aio_pika import connect_robust @contextmanager def timeit(message: str, iterations: int) -> Generator[Any, Any, Any]: delay = -time.perf_counter() print(f"{message} started") try: yield finally: delay += time.perf_counter() print( f"{message} completed in {delay:.6f} seconds, " f"{iterations} iterations {delay / iterations:.6f} seconds " f"per iteration" ) async def main() -> None: connect = await connect_robust( os.getenv("AMQP_URL", "amqp://guest:guest@localhost") ) iterations = 100_000 async with connect: message = aio_pika.Message(b"test") incoming_message: aio_pika.abc.AbstractIncomingMessage async with connect.channel() as channel: queue = await channel.declare_queue(auto_delete=True) with timeit( "Sequential publisher confirms", iterations=iterations ): for _ in range(iterations): await channel.default_exchange.publish( message, routing_key=queue.name ) with timeit("Iterator consume no_ack=False", iterations=iterations): counter = 0 async for incoming_message in queue.iterator(no_ack=False): await incoming_message.ack() counter += 1 if counter >= iterations: break async with connect.channel(publisher_confirms=False) as channel: queue = await channel.declare_queue(auto_delete=True) with timeit( "Sequential no publisher confirms", iterations=iterations ): for _ in range(iterations): await channel.default_exchange.publish( message, routing_key=queue.name ) with timeit("Iterator consume no_ack=True", iterations=iterations): counter = 0 async for _ in queue.iterator(no_ack=True): counter += 1 if counter >= iterations: break if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/examples/extend-patterns.py000066400000000000000000000010701476164671100243230ustar00rootroot00000000000000from typing import Any import msgpack # type: ignore from aio_pika.patterns import RPC, Master class MsgpackRPC(RPC): CONTENT_TYPE = "application/msgpack" def serialize(self, data: Any) -> bytes: return msgpack.dumps(data) def deserialize(self, data: bytes) -> bytes: return msgpack.loads(data) class MsgpackMaster(Master): CONTENT_TYPE = "application/msgpack" def serialize(self, data: Any) -> bytes: return msgpack.dumps(data) def deserialize(self, data: bytes) -> bytes: return msgpack.loads(data) python-aio-pika-9.5.5/docs/source/examples/external-credentials.py000066400000000000000000000014661476164671100253240ustar00rootroot00000000000000import asyncio import ssl import aio_pika from aio_pika.abc import SSLOptions async def main() -> None: connection = await aio_pika.connect_robust( host="127.0.0.1", login="", ssl=True, ssl_options=SSLOptions( cafile="cacert.pem", certfile="cert.pem", keyfile="key.pem", no_verify_ssl=ssl.CERT_REQUIRED, ), client_properties={"connection_name": "aio-pika external credentials"}, ) async with connection: routing_key = "test_queue" channel = await connection.channel() await channel.default_exchange.publish( aio_pika.Message(body="Hello {}".format(routing_key).encode()), routing_key=routing_key, ) if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/examples/log-level-set.py000066400000000000000000000001151476164671100236540ustar00rootroot00000000000000import logging from aio_pika import logger logger.setLevel(logging.ERROR) python-aio-pika-9.5.5/docs/source/examples/main.py000066400000000000000000000023761476164671100221340ustar00rootroot00000000000000import asyncio from typing import Optional from aio_pika import Message, connect_robust from aio_pika.abc import AbstractIncomingMessage async def main() -> None: connection = await connect_robust( "amqp://guest:guest@127.0.0.1/?name=aio-pika%20example", ) queue_name = "test_queue" routing_key = "test_queue" # Creating channel channel = await connection.channel() # Declaring exchange exchange = await channel.declare_exchange("direct", auto_delete=True) # Declaring queue queue = await channel.declare_queue(queue_name, auto_delete=True) # Binding queue await queue.bind(exchange, routing_key) await exchange.publish( Message( bytes("Hello", "utf-8"), content_type="text/plain", headers={"foo": "bar"}, ), routing_key, ) # Receiving one message incoming_message: Optional[AbstractIncomingMessage] = await queue.get( timeout=5, fail=False ) if incoming_message: # Confirm message await incoming_message.ack() else: print("Queue empty") await queue.unbind(exchange, routing_key) await queue.delete() await connection.close() if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/examples/master.py000066400000000000000000000013441476164671100224750ustar00rootroot00000000000000import asyncio from aio_pika import connect_robust from aio_pika.patterns import Master async def main() -> None: connection = await connect_robust( "amqp://guest:guest@127.0.0.1/?name=aio-pika%20master", ) async with connection: # Creating channel channel = await connection.channel() master = Master(channel) # Creates tasks by proxy object for task_id in range(1000): await master.proxy.my_task_name(task_id=task_id) # Or using create_task method for task_id in range(1000): await master.create_task( "my_task_name", kwargs=dict(task_id=task_id), ) if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/examples/pooling.py000066400000000000000000000030351476164671100226500ustar00rootroot00000000000000import asyncio import aio_pika from aio_pika.abc import AbstractRobustConnection from aio_pika.pool import Pool async def main() -> None: async def get_connection() -> AbstractRobustConnection: return await aio_pika.connect_robust("amqp://guest:guest@localhost/") connection_pool: Pool = Pool(get_connection, max_size=2) async def get_channel() -> aio_pika.Channel: async with connection_pool.acquire() as connection: return await connection.channel() channel_pool: Pool = Pool(get_channel, max_size=10) queue_name = "pool_queue" async def consume() -> None: async with channel_pool.acquire() as channel: # type: aio_pika.Channel await channel.set_qos(10) queue = await channel.declare_queue( queue_name, durable=False, auto_delete=False, ) async with queue.iterator() as queue_iter: async for message in queue_iter: print(message) await message.ack() async def publish() -> None: async with channel_pool.acquire() as channel: # type: aio_pika.Channel await channel.default_exchange.publish( aio_pika.Message(("Channel: %r" % channel).encode()), queue_name, ) async with connection_pool, channel_pool: task = asyncio.create_task(consume()) await asyncio.wait([asyncio.create_task(publish()) for _ in range(50)]) await task if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/examples/rpc-callee.py000066400000000000000000000011631476164671100232100ustar00rootroot00000000000000import asyncio from aio_pika import connect_robust from aio_pika.patterns import RPC async def multiply(*, x: int, y: int) -> int: return x * y async def main() -> None: connection = await connect_robust( "amqp://guest:guest@127.0.0.1/", client_properties={"connection_name": "callee"}, ) # Creating channel channel = await connection.channel() rpc = await RPC.create(channel) await rpc.register("multiply", multiply, auto_delete=True) try: await asyncio.Future() finally: await connection.close() if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/examples/rpc-caller.py000066400000000000000000000013161476164671100232250ustar00rootroot00000000000000import asyncio from aio_pika import connect_robust from aio_pika.patterns import RPC async def main() -> None: connection = await connect_robust( "amqp://guest:guest@127.0.0.1/", client_properties={"connection_name": "caller"}, ) async with connection: # Creating channel channel = await connection.channel() rpc = await RPC.create(channel) # Creates tasks by proxy object for i in range(1000): print(await rpc.proxy.multiply(x=100, y=i)) # Or using create_task method for i in range(1000): print(await rpc.call("multiply", kwargs=dict(x=100, y=i))) if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/examples/simple_async_consumer.py000066400000000000000000000015331476164671100256030ustar00rootroot00000000000000import asyncio import aio_pika async def process_message( message: aio_pika.abc.AbstractIncomingMessage, ) -> None: async with message.process(): print(message.body) await asyncio.sleep(1) async def main() -> None: connection = await aio_pika.connect_robust( "amqp://guest:guest@127.0.0.1/", ) queue_name = "test_queue" # Creating channel channel = await connection.channel() # Maximum message count which will be processing at the same time. await channel.set_qos(prefetch_count=100) # Declaring queue queue = await channel.declare_queue(queue_name, auto_delete=True) await queue.consume(process_message) try: # Wait until terminate await asyncio.Future() finally: await connection.close() if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/examples/simple_consumer.py000066400000000000000000000015601476164671100244060ustar00rootroot00000000000000import asyncio import logging import aio_pika async def main() -> None: logging.basicConfig(level=logging.DEBUG) connection = await aio_pika.connect_robust( "amqp://guest:guest@127.0.0.1/", ) queue_name = "test_queue" async with connection: # Creating channel channel = await connection.channel() # Will take no more than 10 messages in advance await channel.set_qos(prefetch_count=10) # Declaring queue queue = await channel.declare_queue(queue_name, auto_delete=True) async with queue.iterator() as queue_iter: async for message in queue_iter: async with message.process(): print(message.body) if queue.name in message.body.decode(): break if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/examples/simple_publisher.py000066400000000000000000000007411476164671100245500ustar00rootroot00000000000000import asyncio import aio_pika async def main() -> None: connection = await aio_pika.connect_robust( "amqp://guest:guest@127.0.0.1/", ) async with connection: routing_key = "test_queue" channel = await connection.channel() await channel.default_exchange.publish( aio_pika.Message(body=f"Hello {routing_key}".encode()), routing_key=routing_key, ) if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/examples/simple_publisher_transactions.py000066400000000000000000000027551476164671100273470ustar00rootroot00000000000000import asyncio import aio_pika async def main() -> None: connection = await aio_pika.connect_robust( "amqp://guest:guest@127.0.0.1/", ) async with connection: routing_key = "test_queue" # Transactions conflicts with `publisher_confirms` channel = await connection.channel(publisher_confirms=False) # Use transactions with async context manager async with channel.transaction(): # Publishing messages but delivery will not be done # before committing this transaction for i in range(10): message = aio_pika.Message(body="Hello #{}".format(i).encode()) await channel.default_exchange.publish( message, routing_key=routing_key, ) # Using transactions manually tx = channel.transaction() # start transaction manually await tx.select() await channel.default_exchange.publish( aio_pika.Message(body="Hello {}".format(routing_key).encode()), routing_key=routing_key, ) await tx.commit() # Using transactions manually tx = channel.transaction() # start transaction manually await tx.select() await channel.default_exchange.publish( aio_pika.Message(body="Should be rejected".encode()), routing_key=routing_key, ) await tx.rollback() if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/examples/tornado-pubsub.py000066400000000000000000000025241476164671100241470ustar00rootroot00000000000000import asyncio import tornado.ioloop import tornado.web from aio_pika import Message, connect_robust class Base: QUEUE: asyncio.Queue class SubscriberHandler(tornado.web.RequestHandler, Base): async def get(self) -> None: message = await self.QUEUE.get() await self.finish(message.body) class PublisherHandler(tornado.web.RequestHandler): async def post(self) -> None: connection = self.application.settings["amqp_connection"] channel = await connection.channel() try: await channel.default_exchange.publish( Message(body=self.request.body), routing_key="test", ) finally: await channel.close() await self.finish("OK") async def make_app() -> tornado.web.Application: amqp_connection = await connect_robust() channel = await amqp_connection.channel() queue = await channel.declare_queue("test", auto_delete=True) Base.QUEUE = asyncio.Queue() await queue.consume(Base.QUEUE.put, no_ack=True) return tornado.web.Application( [(r"/publish", PublisherHandler), (r"/subscribe", SubscriberHandler)], amqp_connection=amqp_connection, ) async def main() -> None: app = await make_app() app.listen(8888) await asyncio.Future() if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/examples/worker.py000066400000000000000000000015731476164671100225170ustar00rootroot00000000000000import asyncio from aio_pika import connect_robust from aio_pika.patterns import Master, NackMessage, RejectMessage async def worker(*, task_id: int) -> None: # If you want to reject message or send # nack you might raise special exception if task_id % 2 == 0: raise RejectMessage(requeue=False) if task_id % 2 == 1: raise NackMessage(requeue=False) print(task_id) async def main() -> None: connection = await connect_robust( "amqp://guest:guest@127.0.0.1/?name=aio-pika%20worker", ) # Creating channel channel = await connection.channel() # Initializing Master with channel master = Master(channel) await master.create_worker("my_task_name", worker, auto_delete=True) try: await asyncio.Future() finally: await connection.close() if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/index.rst000066400000000000000000000420041476164671100206510ustar00rootroot00000000000000.. aio-pika documentation master file, created by sphinx-quickstart on Fri Mar 31 17:03:20 2017. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. .. _aio-pika: https://github.com/mosquito/aio-pika .. _asyncio: https://docs.python.org/3/library/asyncio.html .. _aiormq: http://github.com/mosquito/aiormq/ Welcome to aio-pika's documentation! ==================================== .. image:: https://coveralls.io/repos/github/mosquito/aio-pika/badge.svg?branch=master :target: https://coveralls.io/github/mosquito/aio-pika :alt: Coveralls .. image:: https://github.com/mosquito/aio-pika/workflows/tests/badge.svg :target: https://github.com/mosquito/aio-pika/actions?query=workflow%3Atests :alt: Github Actions .. image:: https://img.shields.io/pypi/v/aio-pika.svg :target: https://pypi.python.org/pypi/aio-pika/ :alt: Latest Version .. image:: https://img.shields.io/pypi/wheel/aio-pika.svg :target: https://pypi.python.org/pypi/aio-pika/ .. image:: https://img.shields.io/pypi/pyversions/aio-pika.svg :target: https://pypi.python.org/pypi/aio-pika/ .. image:: https://img.shields.io/pypi/l/aio-pika.svg :target: https://pypi.python.org/pypi/aio-pika/ `aio-pika`_ is a wrapper for the `aiormq`_ for `asyncio`_ and humans. Features ++++++++ * Completely asynchronous API. * Object oriented API. * Transparent auto-reconnects with complete state recovery with `connect_robust` (e.g. declared queues or exchanges, consuming state and bindings). * Python 3.6+ compatible. * For python 3.5 users available `aio-pika<7` * Transparent `publisher confirms`_ support * `Transactions`_ support * Completely type-hints coverage. .. _publisher confirms: https://www.rabbitmq.com/confirms.html .. _Transactions: https://www.rabbitmq.com/semantics.html#tx AMQP URL parameters +++++++++++++++++++ URL is the supported way to configure connection. For customisation of connection behaviour you might pass the parameters in URL query-string like format. This article describes a description for these parameters. ``aiormq`` specific ~~~~~~~~~~~~~~~~~~~ * ``name`` (``str`` url encoded) - A string that will be visible in the RabbitMQ management console and in the server logs, convenient for diagnostics. * ``cafile`` (``str``) - Path to Certificate Authority file * ``capath`` (``str``) - Path to Certificate Authority directory * ``cadata`` (``str`` url encoded) - URL encoded CA certificate content * ``keyfile`` (``str``) - Path to client ssl private key file * ``certfile`` (``str``) - Path to client ssl certificate file * ``no_verify_ssl`` - No verify server SSL certificates. ``0`` by default and means ``False`` other value means ``True``. * ``heartbeat`` (``int``-like) - interval in seconds between AMQP heartbeat packets. ``0`` disables this feature. ``aio_pika.connect`` function and ``aio_pika.Connection`` class specific ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * ``interleave`` (``int``-like) - controls address reordering when a host name resolves to multiple IP addresses. If 0 or unspecified, no reordering is done, and addresses are tried in the order returned by ``getaddrinfo()``. If a positive integer is specified, the addresses are interleaved by address family, and the given integer is interpreted as "First Address Family Count" as defined in `RFC 8305`_. The default is ``0`` if ``happy_eyeballs_delay`` is not specified, and ``1`` if it is. .. note:: Really useful for RabbitMQ clusters with one DNS name with many ``A``/``AAAA`` records. .. warning:: This option is supported by ``asyncio.DefaultEventLoopPolicy`` and available since python 3.8. * ``happy_eyeballs_delay`` (``float``-like) - if given, enables Happy Eyeballs for this connection. It should be a floating-point number representing the amount of time in seconds to wait for a connection attempt to complete, before starting the next attempt in parallel. This is the "Connection Attempt Delay" as defined in `RFC 8305`_. A sensible default value recommended by the RFC is ``0.25`` (250 milliseconds). .. note:: Really useful for RabbitMQ clusters with one DNS name with many ``A``/``AAAA`` records. .. warning:: This option is supported by ``asyncio.DefaultEventLoopPolicy`` and available since python 3.8. ``aio_pika.connect_robust`` function and ``aio_pika.RobustConnection`` class specific ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ For ``aio_pika.RobustConnection`` class is applicable all ``aio_pika.Connection`` related parameters like, ``name``/``interleave``/``happy_eyeballs_delay`` and some specific: * ``reconnect_interval`` (``float``-like) - is the period in seconds, not more often than the attempts to re-establish the connection will take place. * ``fail_fast`` (``true``/``yes``/``y``/``enable``/``on``/``enabled``/``1`` means ``True``, otherwise ``False``) - special behavior for the start connection attempt, if it fails, all other attempts stops and an exception will be thrown at the connection stage. Enabled by default, if you are sure you need to disable this feature, be ensures for the passed URL is really working. Otherwise, your program will go into endless reconnection attempts that can not be successed. .. _RFC 8305: https://datatracker.ietf.org/doc/html/rfc8305.html URL examples ~~~~~~~~~~~~ * ``amqp://username:password@hostname/vhost?name=connection%20name&heartbeat=60&happy_eyeballs_delay=0.25`` * ``amqps://username:password@hostname/vhost?reconnect_interval=5&fail_fast=1`` * ``amqps://username:password@hostname/vhost?cafile=/path/to/ca.pem`` * ``amqps://username:password@hostname/vhost?cafile=/path/to/ca.pem&keyfile=/path/to/key.pem&certfile=/path/to/sert.pem`` Installation ++++++++++++ Installation with pip: .. code-block:: shell pip install aio-pika Installation from git: .. code-block:: shell # via pip pip install https://github.com/mosquito/aio-pika/archive/master.zip # manually git clone https://github.com/mosquito/aio-pika.git cd aio-pika python setup.py install Development +++++++++++ Clone the project: .. code-block:: shell git clone https://github.com/mosquito/aio-pika.git cd aio-pika Create a new virtualenv for `aio-pika`_: .. code-block:: shell virtualenv -p python3.5 env Install all requirements for `aio-pika`_: .. code-block:: shell env/bin/pip install -e '.[develop]' Table Of Contents +++++++++++++++++ .. toctree:: :glob: :maxdepth: 3 quick-start patterns rabbitmq-tutorial/index apidoc Thanks for contributing +++++++++++++++++++++++ * `@mosquito`_ (author) * `@decaz`_ (steel persuasiveness while code review) * `@heckad`_ (bug fixes) * `@smagafurov`_ (bug fixes) * `@hellysmile`_ (bug fixes and ideas) * `@altvod`_ (bug fixes) * `@alternativehood`_ (bugfixes) * `@cprieto`_ (bug fixes) * `@akhoronko`_ (bug fixes) * `@iselind`_ (bug fixes) * `@DXist`_ (bug fixes) * `@blazewicz`_ (bug fixes) * `@chibby0ne`_ (bug fixes) * `@jmccarrell`_ (bug fixes) * `@taybin`_ (bug fixes) * `@ollamh`_ (bug fixes) * `@DriverX`_ (bug fixes) * `@brianmedigate`_ (bug fixes) * `@dan-stone`_ (bug fixes) * `@Kludex`_ (bug fixes) * `@bmario`_ (bug fixes) * `@tzoiker`_ (bug fixes) * `@Pehat`_ (bug fixes) * `@WindowGenerator`_ (bug fixes) * `@dhontecillas`_ (bug fixes) * `@tilsche`_ (bug fixes) * `@leenr`_ (bug fixes) * `@la0rg`_ (bug fixes) * `@SolovyovAlexander`_ (bug fixes) * `@kremius`_ (bug fixes) * `@zyp`_ (bug fixes) * `@kajetanj`_ (bug fixes) * `@Alviner`_ (moral support, debug sessions and good mood) * `@Pavkazzz`_ (composure, and patience while debug sessions) * `@bbrodriges`_ (supplying grammar while writing documentation) * `@dizballanze`_ (review, grammar) .. _@mosquito: https://github.com/mosquito .. _@decaz: https://github.com/decaz .. _@heckad: https://github.com/heckad .. _@smagafurov: https://github.com/smagafurov .. _@hellysmile: https://github.com/hellysmile .. _@altvod: https://github.com/altvod .. _@alternativehood: https://github.com/alternativehood .. _@cprieto: https://github.com/cprieto .. _@akhoronko: https://github.com/akhoronko .. _@iselind: https://github.com/iselind .. _@DXist: https://github.com/DXist .. _@blazewicz: https://github.com/blazewicz .. _@chibby0ne: https://github.com/chibby0ne .. _@jmccarrell: https://github.com/jmccarrell .. _@taybin: https://github.com/taybin .. _@ollamh: https://github.com/ollamh .. _@DriverX: https://github.com/DriverX .. _@brianmedigate: https://github.com/brianmedigate .. _@dan-stone: https://github.com/dan-stone .. _@Kludex: https://github.com/Kludex .. _@bmario: https://github.com/bmario .. _@tzoiker: https://github.com/tzoiker .. _@Pehat: https://github.com/Pehat .. _@WindowGenerator: https://github.com/WindowGenerator .. _@dhontecillas: https://github.com/dhontecillas .. _@tilsche: https://github.com/tilsche .. _@leenr: https://github.com/leenr .. _@la0rg: https://github.com/la0rg .. _@SolovyovAlexander: https://github.com/SolovyovAlexander .. _@kremius: https://github.com/kremius .. _@zyp: https://github.com/zyp .. _@kajetanj: https://github.com/kajetanj .. _@Alviner: https://github.com/Alviner .. _@Pavkazzz: https://github.com/Pavkazzz .. _@bbrodriges: https://github.com/bbrodriges .. _@dizballanze: https://github.com/dizballanze See also ++++++++ `aiormq`_ ~~~~~~~~~ `aiormq` is a pure python AMQP client library. It is under the hood of **aio-pika** and might to be used when you really loving works with the protocol low level. Following examples demonstrates the user API. Simple consumer: .. code-block:: python import asyncio import aiormq async def on_message(message): """ on_message doesn't necessarily have to be defined as async. Here it is to show that it's possible. """ print(f" [x] Received message {message!r}") print(f"Message body is: {message.body!r}") print("Before sleep!") await asyncio.sleep(5) # Represents async I/O operations print("After sleep!") async def main(): # Perform connection connection = await aiormq.connect("amqp://guest:guest@localhost/") # Creating a channel channel = await connection.channel() # Declaring queue declare_ok = await channel.queue_declare('helo') consume_ok = await channel.basic_consume( declare_ok.queue, on_message, no_ack=True ) loop = asyncio.get_event_loop() loop.run_until_complete(main()) loop.run_forever() Simple publisher: .. code-block:: python import asyncio from typing import Optional import aiormq from aiormq.abc import DeliveredMessage MESSAGE: Optional[DeliveredMessage] = None async def main(): global MESSAGE body = b'Hello World!' # Perform connection connection = await aiormq.connect("amqp://guest:guest@localhost//") # Creating a channel channel = await connection.channel() declare_ok = await channel.queue_declare("hello", auto_delete=True) # Sending the message await channel.basic_publish(body, routing_key='hello') print(f" [x] Sent {body}") MESSAGE = await channel.basic_get(declare_ok.queue) print(f" [x] Received message from {declare_ok.queue!r}") loop = asyncio.get_event_loop() loop.run_until_complete(main()) assert MESSAGE is not None assert MESSAGE.routing_key == "hello" assert MESSAGE.body == b'Hello World!' The `patio`_ and the `patio-rabbitmq`_ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ **PATIO** is an acronym for Python Asynchronous Tasks for AsyncIO - an easily extensible library, for distributed task execution, like celery, only targeting asyncio as the main design approach. **patio-rabbitmq** provides you with the ability to use *RPC over RabbitMQ* services with extremely simple implementation: .. code-block:: python from patio import Registry, ThreadPoolExecutor from patio_rabbitmq import RabbitMQBroker rpc = Registry(project="patio-rabbitmq", auto_naming=False) @rpc("sum") def sum(*args): return sum(args) async def main(): async with ThreadPoolExecutor(rpc, max_workers=16) as executor: async with RabbitMQBroker( executor, amqp_url="amqp://guest:guest@localhost/", ) as broker: await broker.join() And the caller side might be written like this: .. code-block:: python import asyncio from patio import NullExecutor, Registry from patio_rabbitmq import RabbitMQBroker async def main(): async with NullExecutor(Registry(project="patio-rabbitmq")) as executor: async with RabbitMQBroker( executor, amqp_url="amqp://guest:guest@localhost/", ) as broker: print(await asyncio.gather( *[ broker.call("mul", i, i, timeout=1) for i in range(10) ] )) `FastStream`_ ~~~~~~~~~~~~~ **FastStream** is a powerful and easy-to-use Python library for building asynchronous services that interact with event streams.. If you need no deep dive into **RabbitMQ** details, you can use more high-level **FastStream** interfaces: .. code-block:: python from faststream import FastStream from faststream.rabbit import RabbitBroker broker = RabbitBroker("amqp://guest:guest@localhost:5672/") app = FastStream(broker) @broker.subscriber("user") async def user_created(user_id: int): assert isinstance(user_id, int) return f"user-{user_id}: created" @app.after_startup async def pub_smth(): assert ( await broker.publish(1, "user", rpc=True) ) == "user-1: created" Also, **FastStream** validates messages by **pydantic**, generates your project **AsyncAPI** spec, supports In-Memory testing, RPC calls, and more. In fact, it is a high-level wrapper on top of **aio-pika**, so you can use both of these libraries' advantages at the same time. `python-socketio`_ ~~~~~~~~~~~~~~~~~~ `Socket.IO`_ is a transport protocol that enables real-time bidirectional event-based communication between clients (typically, though not always, web browsers) and a server. This package provides Python implementations of both, each with standard and asyncio variants. Also this package is suitable for building messaging services over **RabbitMQ** via **aio-pika** adapter: .. code-block:: python import socketio from aiohttp import web sio = socketio.AsyncServer(client_manager=socketio.AsyncAioPikaManager()) app = web.Application() sio.attach(app) @sio.event async def chat_message(sid, data): print("message ", data) if __name__ == '__main__': web.run_app(app) And a client is able to call `chat_message` the following way: .. code-block:: python import asyncio import socketio sio = socketio.AsyncClient() async def main(): await sio.connect('http://localhost:8080') await sio.emit('chat_message', {'response': 'my response'}) if __name__ == '__main__': asyncio.run(main()) The `taskiq`_ and the `taskiq-aio-pika`_ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ **Taskiq** is an asynchronous distributed task queue for python. The project takes inspiration from big projects such as Celery and Dramatiq. But taskiq can send and run both the sync and async functions. The library provides you with **aio-pika** broker for running tasks too. .. code-block:: python from taskiq_aio_pika import AioPikaBroker broker = AioPikaBroker() @broker.task async def test() -> None: print("nothing") async def main(): await broker.startup() await test.kiq() `Rasa`_ ~~~~~~~ With over 25 million downloads, Rasa Open Source is the most popular open source framework for building chat and voice-based AI assistants. With **Rasa**, you can build contextual assistants on: * Facebook Messenger * Slack * Google Hangouts * Webex Teams * Microsoft Bot Framework * Rocket.Chat * Mattermost * Telegram * Twilio Your own custom conversational channels or voice assistants as: * Alexa Skills * Google Home Actions **Rasa** helps you build contextual assistants capable of having layered conversations with lots of back-and-forth. In order for a human to have a meaningful exchange with a contextual assistant, the assistant needs to be able to use context to build on things that were previously discussed – **Rasa** enables you to build assistants that can do this in a scalable way. And it also uses **aio-pika** to interact with **RabbitMQ** deep inside! Versioning ========== This software follows `Semantic Versioning`_ .. _Semantic Versioning: http://semver.org/ .. _faststream: https://github.com/airtai/faststream .. _patio: https://github.com/patio-python/patio .. _patio-rabbitmq: https://github.com/patio-python/patio-rabbitmq .. _Socket.IO: https://socket.io/ .. _python-socketio: https://python-socketio.readthedocs.io/en/latest/intro.html .. _taskiq: https://github.com/taskiq-python/taskiq .. _taskiq-aio-pika: https://github.com/taskiq-python/taskiq-aio-pika .. _Rasa: https://rasa.com/docs/rasa/ python-aio-pika-9.5.5/docs/source/patterns.rst000066400000000000000000000023711476164671100214050ustar00rootroot00000000000000.. _aio-pika: https://github.com/mosquito/aio-pika Patterns and helpers ++++++++++++++++++++ .. note:: Available since `aio-pika>=1.7.0` `aio-pika`_ includes some useful patterns for creating distributed systems. .. _patterns-worker: Master/Worker ~~~~~~~~~~~~~ Helper which implements Master/Worker pattern. This applicable for balancing tasks between multiple workers. The master creates tasks: .. literalinclude:: examples/master.py :language: python Worker code: .. literalinclude:: examples/worker.py :language: python The one or multiple workers executes tasks. .. _patterns-rpc: RPC ~~~ Helper which implements Remote Procedure Call pattern. This applicable for balancing tasks between multiple workers. The caller creates tasks and awaiting results: .. literalinclude:: examples/rpc-caller.py :language: python One or multiple callees executing tasks: .. literalinclude:: examples/rpc-callee.py :language: python Extending ~~~~~~~~~ Both patterns serialization behaviour might be changed by inheritance and redefinition of methods :func:`aio_pika.patterns.base.serialize` and :func:`aio_pika.patterns.base.deserialize`. Following examples demonstrates it: .. literalinclude:: examples/extend-patterns.py :language: python python-aio-pika-9.5.5/docs/source/quick-start.rst000066400000000000000000000025261476164671100220160ustar00rootroot00000000000000Quick start +++++++++++ Some useful examples. Simple consumer ~~~~~~~~~~~~~~~ .. literalinclude:: examples/simple_consumer.py :language: python Simple publisher ~~~~~~~~~~~~~~~~ .. literalinclude:: examples/simple_publisher.py :language: python Asynchronous message processing ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. literalinclude:: examples/simple_async_consumer.py :language: python Working with RabbitMQ transactions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. literalinclude:: examples/simple_publisher_transactions.py :language: python Get single message example ~~~~~~~~~~~~~~~~~~~~~~~~~~ .. literalinclude:: examples/main.py :language: python Set logging level ~~~~~~~~~~~~~~~~~ Sometimes you want to see only your debug logs, but when you just call `logging.basicConfig(logging.DEBUG)` you set the debug log level for all loggers, includes all aio_pika's modules. If you want to set logging level independently see following example: .. literalinclude:: examples/log-level-set.py :language: python Tornado example ~~~~~~~~~~~~~~~ .. literalinclude:: examples/tornado-pubsub.py :language: python External credentials example ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. literalinclude:: examples/external-credentials.py :language: python Connection pooling ~~~~~~~~~~~~~~~~~~ .. literalinclude:: examples/pooling.py :language: python python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/000077500000000000000000000000001476164671100224525ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/1-introduction.rst000066400000000000000000000240661476164671100260730ustar00rootroot00000000000000.. _issue: https://github.com/mosquito/aio-pika/issues .. _pull request: https://github.com/mosquito/aio-pika/compare .. _aio-pika: https://github.com/mosquito/aio-pika .. _official tutorial: https://www.rabbitmq.com/tutorials/tutorial-one-python.html .. _asyncio: https://docs.python.org/3/library/asyncio.html .. _asyncio tutorial: https://pymotw.com/3/asyncio/coroutines.html .. _introduction: Introduction ============ .. warning:: This is a beta version of the port from `official tutorial`_. Please when you found an error create `issue`_ or `pull request`_ for me. It is expected that you are familiar with the basics of `asyncio`_. Anyway following examples work as written. You feel free to download them and test it as is without any changes (in case your RabbitMQ installation allows access for user "guest"). Otherwise we recommend to read `asyncio tutorial`_. .. note:: **Prerequisites** This tutorial assumes RabbitMQ is installed_ and running on localhost on standard port (`5672`). In case you use a different host, port or credentials, connections settings would require adjusting. .. _installed: https://www.rabbitmq.com/download.html **Where to get help** If you're having trouble going through this tutorial you can `contact us`_ through the mailing list. .. _contact us: https://groups.google.com/forum/#!forum/rabbitmq-users RabbitMQ is a message broker. The principal idea is pretty simple: it accepts and forwards messages. You can think about it as a post office: when you send mail to the post box you're pretty sure that Mr. Postman will eventually deliver the mail to your recipient. Using this metaphor RabbitMQ is a post box, a post office and a postman. The major difference between RabbitMQ and the post office is the fact that it doesn't deal with paper, instead it accepts, stores and forwards binary blobs of data ‒ messages. RabbitMQ, and messaging in general, uses some jargon. * Producing means nothing more than sending. A program that sends messages is a producer. We'll draw it like that, with "P": .. image:: /_static/tutorial/producer.svg :align: center * A queue is the name for a mailbox. It lives inside RabbitMQ. Although messages flow through RabbitMQ and your applications, they can be stored only inside a queue. A queue is not bound by any limits, it can store as many messages as you like ‒ it's essentially an infinite buffer. Many producers can send messages that go to one queue, many consumers can try to receive data from one queue. A queue will be drawn as like that, with its name above it: .. image:: /_static/tutorial/queue.svg :align: center * Consuming has a similar meaning to receiving. A consumer is a program that mostly waits to receive messages. On our drawings it's shown with "C": .. image:: /_static/tutorial/consumer.svg :align: center .. note:: Note that the producer, consumer, and broker do not have to reside on the same machine; indeed in most applications they don't. Hello World! ++++++++++++ .. note:: Using the `aio-pika`_ async Python client Our "Hello world" won't be too complex ‒ let's send a message, receive it and print it on the screen. To do so we need two programs: one that sends a message and one that receives and prints it. Our overall design will look like: .. image:: /_static/tutorial/python-one-overall.svg :align: center Producer sends messages to the "hello" queue. The consumer receives messages from that queue. .. note:: **RabbitMQ libraries** RabbitMQ speaks AMQP 0.9.1, which is an open, general-purpose protocol for messaging. There are a number of clients for RabbitMQ in `many different languages`_. In this tutorial series we're going to use `aio-pika`_. To install it you can use the `pip`_ package management tool. .. _many different languages: https://www.rabbitmq.com/devtools.html .. _pip: https://pip.pypa.io/en/stable/quickstart/ Sending +++++++ .. image:: /_static/tutorial/sending.svg :align: center Our first program *send.py* will send a single message to the queue. The first thing we need to do is to establish a connection with RabbitMQ server. .. literalinclude:: examples/1-introduction/send.py :language: python :lines: 5-12 We're connected now, to a broker on the local machine - hence the localhost. If we wanted to connect to a broker on a different machine we'd simply specify its name or IP address here. Next, before sending we need to make sure the recipient queue exists. If we send a message to non-existing location, RabbitMQ will just trash the message. Let's create a queue to which the message will be delivered, let's name it *hello*: .. literalinclude:: examples/1-introduction/send.py :language: python :lines: 14-15 At that point we're ready to send a message. Our first message will just contain a string Hello World! and we want to send it to our hello queue. In RabbitMQ a message can never be sent directly to the queue, it always needs to go through an exchange. But let's not get dragged down by the details ‒ you can read more about exchanges in the :ref:`third part of this tutorial `. All we need to know now is how to use a default exchange identified by an empty string. This exchange is special ‒ it allows us to specify exactly to which queue the message should go. The queue name needs to be specified in the *routing_key* parameter: .. literalinclude:: examples/1-introduction/send.py :language: python :lines: 17-21 Before exiting the program we need to make sure the network buffers were flushed and our message was actually delivered to RabbitMQ. We can do it by gently closing the connection. In this example async context manager has been used. .. literalinclude:: examples/1-introduction/send.py :language: python :lines: 10-12 .. note:: *Sending doesn't work!* If this is your first time using RabbitMQ and you don't see the "Sent" message then you may be left scratching your head wondering what could be wrong. Maybe the broker was started without enough free disk space (by default it needs at least 1Gb free) and is therefore refusing to accept messages. Check the broker logfile to confirm and reduce the limit if necessary. The `configuration file documentation`_ will show you how to set *disk_free_limit*. .. _configuration file documentation: http://www.rabbitmq.com/configure.html#config-items Receiving +++++++++ .. image:: /_static/tutorial/receiving.svg :align: center Our second program *receive.py* will receive messages from the queue and print them on the screen. Again, first we need to connect to RabbitMQ server. The code responsible for connecting to Rabbit is the same as previously. The next step, just like before, is to make sure that the queue exists. Creating a queue using *queue_declare* is idempotent ‒ we can run the command as many times as we like, and only one will be created. .. literalinclude:: examples/1-introduction/receive.py :language: python :lines: 22-28 You may ask why we declare the queue again ‒ we have already declared it in our previous code. We could avoid that if we were sure that the queue already exists. For example if *send.py* program was run before. But we're not yet sure which program to run first. In such cases it's a good practice to repeat declaring the queue in both programs. .. note:: **Listing queues** You may wish to see what queues RabbitMQ has and how many messages are in them. You can do it (as a privileged user) using the rabbitmqctl tool: :: $ sudo rabbitmqctl list_queues Listing queues ... hello 0 ...done. (omit sudo on Windows) Receiving messages from the queue is simple. It works by subscribing a `callback function` to a queue or using `simple get`. Whenever we receive a message, this callback function is called by the `aio-pika`_ library. In our case this function will print on the screen the contents of the message. .. literalinclude:: examples/1-introduction/receive.py :language: python :pyobject: on_message Next, we need to tell RabbitMQ that this particular callback function should receive messages from our hello queue: .. literalinclude:: examples/1-introduction/receive.py :language: python :pyobject: main The *no_ack* parameter will be described :ref:`later on `. Putting it all together +++++++++++++++++++++++ Full code for :download:`send.py `: .. literalinclude:: examples/1-introduction/send.py :language: python Full :download:`receive.py ` code: .. literalinclude:: examples/1-introduction/receive.py :language: python Now we can try out our programs in a terminal. First, let's send a message using our send.py program:: $ python send.py [x] Sent 'Hello World!' The producer program send.py will stop after every run. Let's receive it:: $ python receive.py [x] Received message IncomingMessage:{ "app_id": null, "body_size": 12, "cluster_id": null, "consumer_tag": "ctag1.11fa33f5f4fa41f6a6488648181656e0", "content_encoding": null, "content_type": null, "correlation_id": "b'None'", "delivery_mode": 1, "delivery_tag": 1, "exchange": "", "expiration": null, "headers": null, "message_id": null, "priority": null, "redelivered": false, "reply_to": null, "routing_key": "hello", "synchronous": false, "timestamp": null, "type": "None", "user_id": null } Message body is: b'Hello World!' Hurray! We were able to send our first message through RabbitMQ. As you might have noticed, the *receive.py* program doesn't exit. It will stay ready to receive further messages, and may be interrupted with **Ctrl-C**. Try to run *send.py* again in a new terminal. We've learned how to send and receive a message from a named queue. It's time to move on to :ref:`part 2 ` and build a simple work queue. .. note:: This material was adopted from `official tutorial`_ on **rabbitmq.org**. python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/2-work-queues.rst000066400000000000000000000303711476164671100256360ustar00rootroot00000000000000.. _issue: https://github.com/mosquito/aio-pika/issues .. _pull request: https://github.com/mosquito/aio-pika/compare .. _aio-pika: https://github.com/mosquito/aio-pika .. _official tutorial: https://www.rabbitmq.com/tutorials/tutorial-two-python.html .. _work-queues: Work Queues =========== .. warning:: This is a beta version of the port from `official tutorial`_. Please when you found an error create `issue`_ or `pull request`_ for me. This implementation is a part of official tutorial. Since version 1.7.0 `aio-pika`_ has :ref:`patterns submodule `. You might use :class:`aio_pika.patterns.Master` for real projects. .. note:: Using the `aio-pika`_ async Python client .. note:: **Prerequisites** This tutorial assumes RabbitMQ is installed_ and running on localhost on standard port (`5672`). In case you use a different host, port or credentials, connections settings would require adjusting. .. _installed: https://www.rabbitmq.com/download.html **Where to get help** If you're having trouble going through this tutorial you can `contact us`_ through the mailing list. .. _contact us: https://groups.google.com/forum/#!forum/rabbitmq-users .. image:: /_static/tutorial/python-two.svg :align: center In the :ref:`first tutorial ` we wrote programs to send and receive messages from a named queue. In this one we'll create a Work Queue that will be used to distribute time-consuming tasks among multiple workers. The main idea behind Work Queues (aka: *Task Queues*) is to avoid doing a resource-intensive task immediately and having to wait for it to complete. Instead we schedule the task to be done later. We encapsulate a task as a message and send it to the queue. A worker process running in the background will pop the tasks and eventually execute the job. When you run many workers the tasks will be shared between them. This concept is especially useful in web applications where it's impossible to handle a complex task during a short HTTP request window. Preparation +++++++++++ In the previous part of this tutorial we sent a message containing `"Hello World!"`. Now we'll be sending strings that stand for complex tasks. We don't have a real-world task, like images to be resized or pdf files to be rendered, so let's fake it by just pretending we're busy - by using the `asyncio.sleep()` function. We'll take the number of dots in the string as its complexity; every dot will account for one second of "work". For example, a fake task described by Hello... will take three seconds. We will slightly modify the send.py code from our previous example, to allow arbitrary messages to be sent from the command line. This program will schedule tasks to our work queue, so let's name it *new_task.py*: .. literalinclude:: examples/2-work-queues/new_task_initial.py :language: python :pyobject: main Our old receive.py script also requires some changes: it needs to fake a second of work for every dot in the message body. It will pop messages from the queue and perform the task, so let's call it *tasks_worker.py*: .. code-block:: python async def on_message(message: IncomingMessage): print(" [x] Received %r" % message.body) await asyncio.sleep(message.body.count(b'.')) print(" [x] Done") Round-robin dispatching +++++++++++++++++++++++ One of the advantages of using a Task Queue is the ability to easily parallelise work. If we are building up a backlog of work, we can just add more workers and that way, scale easily. First, let's try to run two *tasks_worker.py* scripts at the same time. They will both get messages from the queue, but how exactly? Let's see. You need three consoles open. Two will run the ``tasks_worker.py`` script. These consoles will be our two consumers - C1 and C2. :: shell1$ python tasks_worker.py [*] Waiting for messages. To exit press CTRL+C :: shell2$ python tasks_worker.py [*] Waiting for messages. To exit press CTRL+C In the third one we'll publish new tasks. Once you've started the consumers you can publish a few messages:: shell3$ python new_task.py First message. shell3$ python new_task.py Second message.. shell3$ python new_task.py Third message... shell3$ python new_task.py Fourth message.... shell3$ python new_task.py Fifth message..... Let's see what is delivered to our workers:: shell1$ python tasks_worker.py [*] Waiting for messages. To exit press CTRL+C [x] Received 'First message.' [x] Received 'Third message...' [x] Received 'Fifth message.....' :: shell2$ python tasks_worker.py [*] Waiting for messages. To exit press CTRL+C [x] Received 'Second message..' [x] Received 'Fourth message....' By default, RabbitMQ will send each message to the next consumer, in sequence. On average every consumer will get the same number of messages. This way of distributing messages is called round-robin. Try this out with three or more workers. Message acknowledgment ++++++++++++++++++++++ Doing a task can take a few seconds. You may wonder what happens if one of the consumers starts a long task and dies with it only partly done. With our current code once RabbitMQ delivers message to the customer it immediately removes it from memory. In this case, if you kill a worker we will lose the message it was just processing. We'll also lose all the messages that were dispatched to this particular worker but were not yet handled. But we don't want to lose any tasks. If a worker dies, we'd like the task to be delivered to another worker. In order to make sure a message is never lost, RabbitMQ supports message acknowledgments. An ack(nowledgement) is sent back from the consumer to tell RabbitMQ that a particular message had been received, processed and that RabbitMQ is free to delete it. If a consumer dies (its channel is closed, connection is closed, or TCP connection is lost) without sending an ack, RabbitMQ will understand that a message wasn't processed fully and will re-queue it. If there are other consumers online at the same time, it will then quickly redeliver it to another consumer. That way you can be sure that no message is lost, even if the workers occasionally die. There aren't any message timeouts; RabbitMQ will redeliver the message when the consumer dies. It's fine even if processing a message takes a very, very long time. Message acknowledgments are turned on by default. In previous examples we explicitly turned them off via the `no_ack=True` flag. It's time to remove this flag and send a proper acknowledgment from the worker, once we're done with a task. .. code-block:: python async def on_message(message: IncomingMessage): print(" [x] Received %r" % message.body) await asyncio.sleep(message.body.count(b'.')) print(" [x] Done") await message.ack() .. code-block:: python # Declaring queue queue = await channel.declare_queue("hello") # Start listening the queue with name 'hello' await queue.consume(on_message) or using special context processor: .. literalinclude:: examples/2-work-queues/tasks_worker.py :language: python :lines: 7-11 If context processor will catch an exception, the message will be returned to the queue. Using this code we can be sure that even if you kill a worker using CTRL+C while it was processing a message, nothing will be lost. Soon after the worker dies all unacknowledged messages will be redelivered. .. note:: **Forgotten acknowledgment** It's a common mistake to miss the ack. It's an easy error, but the consequences are serious. Messages will be redelivered when your client quits (which may look like random redelivery), but RabbitMQ will eat more and more memory as it won't be able to release any unacked messages. In order to debug this kind of mistake you can use rabbitmqctl to print the messages_unacknowledged field:: $ sudo rabbitmqctl list_queues name messages_ready messages_unacknowledged Listing queues ... hello 0 0 ...done. Message durability ++++++++++++++++++ We have learned how to make sure that even if the consumer dies, the task isn't lost. But our tasks will still be lost if RabbitMQ server stops. When RabbitMQ quits or crashes it will forget the queues and messages unless you tell it not to. Two things are required to make sure that messages aren't lost: we need to mark both the queue and messages as durable. First, we need to make sure that RabbitMQ will never lose our queue. In order to do so, we need to declare it as *durable*: .. code-block:: python queue = await channel.declare_queue("hello", durable=True) Although this command is correct by itself, it won't work in our setup. That's because we've already defined a queue called hello which is not durable. RabbitMQ doesn't allow you to redefine an existing queue with different parameters and will return an error to any program that tries to do that. But there is a quick workaround - let's declare a queue with different name, for example task_queue: .. literalinclude:: examples/2-work-queues/tasks_worker.py :language: python :lines: 23-27 This queue_declare change needs to be applied to both the producer and consumer code. At that point we're sure that the task_queue queue won't be lost even if RabbitMQ restarts. Now we need to mark our messages as persistent - by supplying a delivery_mode property with a value `PERSISTENT` (see enum :class:`aio_pika.DeliveryMode`). .. literalinclude:: examples/2-work-queues/new_task.py :language: python :pyobject: main .. note:: **Note on message persistence** Marking messages as persistent doesn't fully guarantee that a message won't be lost. Although it tells RabbitMQ to save the message to disk, there is still a short time window when RabbitMQ has accepted a message and hasn't saved it yet. Also, RabbitMQ doesn't do fsync(2) for every message -- it may be just saved to cache and not really written to the disk. The persistence guarantees aren't strong, but it's more than enough for our simple task queue. If you need a stronger guarantee then you can use `publisher confirms`_. `aio-pika`_ supports `publisher confirms`_ out of the box. .. _publisher confirms: https://www.rabbitmq.com/confirms.html Fair dispatch +++++++++++++ You might have noticed that the dispatching still doesn't work exactly as we want. For example in a situation with two workers, when all odd messages are heavy and even messages are light, one worker will be constantly busy and the other one will do hardly any work. Well, RabbitMQ doesn't know anything about that and will still dispatch messages evenly. This happens because RabbitMQ just dispatches a message when the message enters the queue. It doesn't look at the number of unacknowledged messages for a consumer. It just blindly dispatches every n-th message to the n-th consumer. .. image:: /_static/tutorial/prefetch-count.svg :align: center In order to defeat that we can use the basic.qos method with the `prefetch_count=1` setting. This tells RabbitMQ not to give more than one message to a worker at a time. Or, in other words, don't dispatch a new message to a worker until it has processed and acknowledged the previous one. Instead, it will dispatch it to the next worker that is not still busy. .. literalinclude:: examples/2-work-queues/tasks_worker.py :language: python :lines: 18-21 .. note:: **Note about queue size** If all the workers are busy, your queue can fill up. You will want to keep an eye on that, and maybe add more workers, or have some other strategy. Putting it all together +++++++++++++++++++++++ Final code of our :download:`new_task.py ` script: .. literalinclude:: examples/2-work-queues/new_task.py :language: python And our :download:`tasks_worker.py `: .. literalinclude:: examples/2-work-queues/tasks_worker.py :language: python Using message acknowledgments and prefetch_count you can set up a work queue. The durability options let the tasks survive even if RabbitMQ is restarted. Now we can move on to :ref:`tutorial 3 ` and learn how to deliver the same message to many consumers. .. note:: This material was adopted from `official tutorial`_ on **rabbitmq.org**. python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/3-publish-subscribe.rst000066400000000000000000000210431476164671100267710ustar00rootroot00000000000000.. _issue: https://github.com/mosquito/aio-pika/issues .. _pull request: https://github.com/mosquito/aio-pika/compare .. _aio-pika: https://github.com/mosquito/aio-pika .. _official tutorial: https://www.rabbitmq.com/tutorials/tutorial-three-python.html .. _publish-subscribe: Publish/Subscribe ================= .. warning:: This is a beta version of the port from `official tutorial`_. Please when you found an error create `issue`_ or `pull request`_ for me. .. note:: Using the `aio-pika`_ async Python client .. note:: **Prerequisites** This tutorial assumes RabbitMQ is installed_ and running on localhost on standard port (`5672`). In case you use a different host, port or credentials, connections settings would require adjusting. .. _installed: https://www.rabbitmq.com/download.html **Where to get help** If you're having trouble going through this tutorial you can `contact us`_ through the mailing list. .. _contact us: https://groups.google.com/forum/#!forum/rabbitmq-users In the :ref:`previous tutorial ` we created a work queue. The assumption behind a work queue is that each task is delivered to exactly one worker. In this part we'll do something completely different — we'll deliver a message to multiple consumers. This pattern is known as "publish/subscribe". To illustrate the pattern, we're going to build a simple logging system. It will consist of two programs — the first will emit log messages and the second will receive and print them. In our logging system every running copy of the receiver program will get the messages. That way we'll be able to run one receiver and direct the logs to disk; and at the same time we'll be able to run another receiver and see the logs on the screen. Essentially, published log messages are going to be broadcast to all the receivers. Exchanges +++++++++ In previous parts of the tutorial we sent and received messages to and from a queue. Now it's time to introduce the full messaging model in Rabbit. Let's quickly go over what we covered in the previous tutorials: * A producer is a user application that sends messages. * A queue is a buffer that stores messages. * A consumer is a user application that receives messages. The core idea in the messaging model in RabbitMQ is that the producer never sends any messages directly to a queue. Actually, quite often the producer doesn't even know if a message will be delivered to any queue at all. Instead, the producer can only send messages to an exchange. An exchange is a very simple thing. On one side it receives messages from producers and the other side it pushes them to queues. The exchange must know exactly what to do with a message it receives. Should it be appended to a particular queue? Should it be appended to many queues? Or should it get discarded. The rules for that are defined by the exchange type. .. image:: /_static/tutorial/exchanges.svg :align: center There are a few exchange types available: `DIRECT`, `TOPIC`, `HEADERS` and `FANOUT` (see :class:`aio_pika.ExchangeType`). We'll focus on the last one — the fanout. Let's create an exchange of that type, and call it `logs`: .. literalinclude:: examples/3-publish-subscribe/emit_log.py :language: python :lines: 15-17 The fanout exchange is very simple. As you can probably guess from the name, it just broadcasts all the messages it receives to all the queues it knows. And that's exactly what we need for our logger. .. note:: **Listing exchanges** To list the exchanges on the server you can run the ever useful rabbitmqctl:: $ sudo rabbitmqctl list_exchanges Listing exchanges ... logs fanout amq.direct direct amq.topic topic amq.fanout fanout amq.headers headers ...done. In this list there are some `amq.*` exchanges and the default (unnamed) exchange. These are created by default, but it is unlikely you'll need to use them at the moment. **Nameless exchange** In previous parts of the tutorial we knew nothing about exchanges, but still were able to send messages to queues. That was possible because we were using a default exchange, which we identify by the empty string (""). Recall how we published a message before: .. code-block:: python await channel.default_exchange.publish( Message(message_body), routing_key='hello', ) The exchange parameter is the name of the exchange. The empty string denotes the default or nameless exchange: messages are routed to the queue with the name specified by routing_key, if it exists. Now, we can publish to our named exchange instead: .. literalinclude:: examples/3-publish-subscribe/emit_log.py :language: python :lines: 19-29 Temporary queues ++++++++++++++++ As you may remember previously we were using queues which had a specified name (remember `hello` and `task_queue`?). Being able to name a queue was crucial for us — we needed to point the workers to the same queue. Giving a queue a name is important when you want to share the queue between producers and consumers. But that's not the case for our logger. We want to hear about all log messages, not just a subset of them. We're also interested only in currently flowing messages not in the old ones. To solve that we need two things. Firstly, whenever we connect to Rabbit we need a fresh, empty queue. To do it we could create a queue with a random name, or, even better - let the server choose a random queue name for us. We can do this by not supplying the queue parameter to `declare_queue`: .. code-block:: python queue = await channel.declare_queue() Secondly, once we disconnect the consumer the queue should be deleted. There's an exclusive flag for that: .. literalinclude:: examples/3-publish-subscribe/receive_logs.py :language: python :lines: 26 Bindings ++++++++ .. image:: /_static/tutorial/bindings.svg :align: center We've already created a fanout exchange and a queue. Now we need to tell the exchange to send messages to our queue. That relationship between exchange and a queue is called a binding. .. literalinclude:: examples/3-publish-subscribe/receive_logs.py :language: python :lines: 21-29 From now on the logs exchange will append messages to our queue. .. note:: **Listing bindings** You can list existing bindings using, you guessed it, `rabbitmqctl list_bindings`. Putting it all together +++++++++++++++++++++++ .. image:: /_static/tutorial/python-three-overall.svg :align: center The producer program, which emits log messages, doesn't look much different from the previous tutorial. The most important change is that we now want to publish messages to our logs exchange instead of the nameless one. We need to supply a routing_key when sending, but its value is ignored for fanout exchanges. Here goes the code for :download:`emit_log.py ` script: .. literalinclude:: examples/3-publish-subscribe/emit_log.py :language: python As you see, after establishing the connection we declared the exchange. This step is necessary as publishing to a non-existing exchange is forbidden. The messages will be lost if no queue is bound to the exchange yet, but that's okay for us; if no consumer is listening yet we can safely discard the message. The code for :download:`receive_logs.py ` script: .. literalinclude:: examples/3-publish-subscribe/receive_logs.py :language: python We're done. If you want to save logs to a file, just open a console and type:: $ python receive_logs.py > logs_from_rabbit.log If you wish to see the logs on your screen, spawn a new terminal and run:: $ python receive_logs.py And of course, to emit logs type:: $ python emit_log.py Using *rabbitmqctl list_bindings* you can verify that the code actually creates bindings and queues as we want. With two *receive_logs.py* programs running you should see something like:: $ sudo rabbitmqctl list_bindings Listing bindings ... logs exchange amq.gen-JzTY20BRgKO-HjmUJj0wLg queue [] logs exchange amq.gen-vso0PVvyiRIL2WoV3i48Yg queue [] ...done. The interpretation of the result is straightforward: data from exchange logs goes to two queues with server-assigned names. And that's exactly what we intended. To find out how to listen for a subset of messages, let's move on to :ref:`tutorial 4 ` .. note:: This material was adopted from `official tutorial`_ on **rabbitmq.org**. python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/4-routing.rst000066400000000000000000000155001476164671100250350ustar00rootroot00000000000000.. _issue: https://github.com/mosquito/aio-pika/issues .. _pull request: https://github.com/mosquito/aio-pika/compare .. _aio-pika: https://github.com/mosquito/aio-pika .. _official tutorial: https://www.rabbitmq.com/tutorials/tutorial-four-python.html .. _routing: Routing ======= .. warning:: This is a beta version of the port from `official tutorial`_. Please when you found an error create `issue`_ or `pull request`_ for me. .. note:: Using the `aio-pika`_ async Python client .. note:: **Prerequisites** This tutorial assumes RabbitMQ is installed_ and running on localhost on standard port (`5672`). In case you use a different host, port or credentials, connections settings would require adjusting. .. _installed: https://www.rabbitmq.com/download.html **Where to get help** If you're having trouble going through this tutorial you can `contact us`_ through the mailing list. .. _contact us: https://groups.google.com/forum/#!forum/rabbitmq-users In the :ref:`previous tutorial ` we built a simple logging system. We were able to broadcast log messages to many receivers. In this tutorial we're going to add a feature to it — we're going to make it possible to subscribe only to a subset of the messages. For example, we will be able to direct only critical error messages to the log file (to save disk space), while still being able to print all of the log messages on the console. Bindings ++++++++ In previous examples we were already creating bindings. You may recall code like: .. code-block:: python async def main(): ... # Binding the queue to the exchange await queue.bind(logs_exchange) ... A binding is a relationship between an exchange and a queue. This can be simply read as: the queue is interested in messages from this exchange. Bindings can take an extra *routing_key* parameter. To avoid the confusion with a *basic_publish* parameter we're going to call it a *binding key*. This is how we could create a binding with a key: .. code-block:: python async def main(): ... # Binding the queue to the exchange await queue.bind(logs_exchange, routing_key="black") ... The meaning of a binding key depends on the exchange type. The *fanout* exchanges, which we used previously, simply ignored its value. Direct exchange +++++++++++++++ Our logging system from the previous tutorial broadcasts all messages to all consumers. We want to extend that to allow filtering messages based on their severity. For example we may want the script which is writing log messages to the disk to only receive critical errors, and not waste disk space on warning or info log messages. We were using a fanout exchange, which doesn't give us too much flexibility — it's only capable of mindless broadcasting. We will use a direct exchange instead. The routing algorithm behind a direct exchange is simple — a message goes to the queues whose binding key exactly matches the routing key of the message. To illustrate that, consider the following setup: .. image:: /_static/tutorial/direct-exchange.svg :align: center In this setup, we can see the *direct* exchange X with two queues bound to it. The first queue is bound with binding key *orange*, and the second has two bindings, one with binding key *black* and the other one with *green*. In such a setup a message published to the exchange with a routing key *orange* will be routed to queue *Q1*. Messages with a routing key of *black* or *green* will go to *Q2*. All other messages will be discarded. Multiple bindings +++++++++++++++++ .. image:: /_static/tutorial/direct-exchange-multiple.svg :align: center It is perfectly legal to bind multiple queues with the same binding key. In our example we could add a binding between *X* and *Q1* with binding key *black*. In that case, the *direct* exchange will behave like fanout and will broadcast the message to all the matching queues. A message with routing key black will be delivered to both *Q1* and *Q2*. Emitting logs +++++++++++++ We'll use this model for our logging system. Instead of *fanout* we'll send messages to a *direct* exchange. We will supply the log severity as a *routing key*. That way the receiving script will be able to select the severity it wants to receive. Let's focus on emitting logs first. Like always we need to create an exchange first: .. code-block:: python from aio_pika import ExchangeType async def main(): ... direct_logs_exchange = await channel.declare_exchange( 'logs', ExchangeType.DIRECT ) And we're ready to send a message: .. code-block:: python async def main(): ... await direct_logs_exchange.publish( Message(message_body), routing_key=severity, ) To simplify things we will assume that `'severity'` can be one of `'info'`, `'warning'`, `'error'`. Subscribing +++++++++++ Receiving messages will work just like in the previous tutorial, with one exception - we're going to create a new binding for each severity we're interested in. .. code-block:: python async def main(): ... # Declaring queue queue = await channel.declare_queue(exclusive=True) # Binding the queue to the exchange await queue.bind(direct_logs_exchange, routing_key=severity) ... Putting it all together +++++++++++++++++++++++ .. image:: /_static/tutorial/python-four.svg :align: center The simplified code for :download:`receive_logs_direct_simple.py `: .. literalinclude:: examples/4-routing/receive_logs_direct_simple.py :language: python The code for :download:`emit_log_direct.py `: .. literalinclude:: examples/4-routing/emit_log_direct.py :language: python .. note:: The callback-based code for :download:`receive_logs_direct.py `: .. literalinclude:: examples/4-routing/receive_logs_direct.py :language: python If you want to save only *'warning'* and *'error'* (and not *'info'*) log messages to a file, just open a console and type:: $ python receive_logs_direct_simple.py warning error > logs_from_rabbit.log If you'd like to see all the log messages on your screen, open a new terminal and do:: $ python receive_logs_direct.py info warning error [*] Waiting for logs. To exit press CTRL+C And, for example, to emit an error log message just type:: $ python emit_log_direct.py error "Run. Run. Or it will explode." [x] Sent 'error':'Run. Run. Or it will explode.' Move on to :ref:`tutorial 5 ` to find out how to listen for messages based on a pattern. .. note:: This material was adopted from `official tutorial`_ on **rabbitmq.org**. python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/5-topics.rst000066400000000000000000000137651476164671100246630ustar00rootroot00000000000000.. _issue: https://github.com/mosquito/aio-pika/issues .. _pull request: https://github.com/mosquito/aio-pika/compare .. _aio-pika: https://github.com/mosquito/aio-pika .. _syslog: http://en.wikipedia.org/wiki/Syslog .. _official tutorial: https://www.rabbitmq.com/tutorials/tutorial-five-python.html .. _topics: Topics ====== .. warning:: This is a beta version of the port from `official tutorial`_. Please when you found an error create `issue`_ or `pull request`_ for me. .. note:: Using the `aio-pika`_ async Python client .. note:: **Prerequisites** This tutorial assumes RabbitMQ is installed_ and running on localhost on standard port (`5672`). In case you use a different host, port or credentials, connections settings would require adjusting. .. _installed: https://www.rabbitmq.com/download.html **Where to get help** If you're having trouble going through this tutorial you can `contact us`_ through the mailing list. .. _contact us: https://groups.google.com/forum/#!forum/rabbitmq-users In the :ref:`previous tutorial ` we improved our logging system. Instead of using a fanout exchange only capable of dummy broadcasting, we used a direct one, and gained a possibility of selectively receiving the logs. Although using the direct exchange improved our system, it still has limitations — it can't do routing based on multiple criteria. In our logging system we might want to subscribe to not only logs based on severity, but also based on the source which emitted the log. You might know this concept from the syslog_ unix tool, which routes logs based on both severity (`info`/`warn`/`crit`...) and facility (`auth`/`cron`/`kern`...). That would give us a lot of flexibility - we may want to listen to just critical errors coming from 'cron' but also all logs from 'kern'. To implement that in our logging system we need to learn about a more complex topic exchange. Topic exchange ++++++++++++++ Messages sent to a topic exchange can't have an arbitrary *routing_key* - it must be a list of words, delimited by dots. The words can be anything, but usually they specify some features connected to the message. A few valid routing key examples: `"stock.usd.nyse"`, `"nyse.vmw"`, `"quick.orange.rabbit"`. There can be as many words in the routing key as you like, up to the limit of 255 bytes. The binding key must also be in the same form. The logic behind the topic exchange is similar to a direct one - a message sent with a particular routing key will be delivered to all the queues that are bound with a matching binding key. However there are two important special cases for binding keys: * `*` (star) can substitute for exactly one word. * `#` (hash) can substitute for zero or more words. It's easiest to explain this in an example: .. image:: /_static/tutorial/python-five.svg :align: center In this example, we're going to send messages which all describe animals. The messages will be sent with a routing key that consists of three words (two dots). The first word in the routing key will describe a celerity, second a colour and third a species: `".."`. We created three bindings: *Q1* is bound with binding key `"*.orange.*"` and Q2 with `"*.*.rabbit"` and `"lazy.#"`. These bindings can be summarised as: * Q1 is interested in all the orange animals. * Q2 wants to hear everything about rabbits, and everything about lazy animals. * A message with a routing key set to `"quick.orange.rabbit"` will be delivered to both queues. Message `"lazy.orange.elephant"` also will go to both of them. On the other hand `"quick.orange.fox"` will only go to the first queue, and `"lazy.brown.fox"` only to the second. `"lazy.pink.rabbit"` will be delivered to the second queue only once, even though it matches two bindings. "quick.brown.fox" doesn't match any binding so it will be discarded. What happens if we break our contract and send a message with one or four words, like `"orange"` or `"quick.orange.male.rabbit"`? Well, these messages won't match any bindings and will be lost. On the other hand `"lazy.orange.male.rabbit"`, even though it has four words, will match the last binding and will be delivered to the second queue. .. note:: **Topic exchange** Topic exchange is powerful and can behave like other exchanges. When a queue is bound with `"#"` (hash) binding key - it will receive all the messages, regardless of the routing key - like in fanout exchange. When special characters `"*"` (star) and `"#"` (hash) aren't used in bindings, the topic exchange will behave just like a direct one. Putting it all together +++++++++++++++++++++++ We're going to use a topic exchange in our logging system. We'll start off with a working assumption that the routing keys of logs will have two words: `"."`. The code is almost the same as in the :ref:`previous tutorial `. The code for :download:`emit_log_topic.py `: .. literalinclude:: examples/5-topics/emit_log_topic.py :language: python The code for :download:`receive_logs_topic.py `: .. literalinclude:: examples/5-topics/receive_logs_topic.py :language: python To receive all the logs run:: python receive_logs_topic.py "#" To receive all logs from the facility `"kern"`:: python receive_logs_topic.py "kern.*" Or if you want to hear only about `"critical"` logs:: python receive_logs_topic.py "*.critical" You can create multiple bindings:: python receive_logs_topic.py "kern.*" "*.critical" And to emit a log with a routing key `"kern.critical"` type:: python emit_log_topic.py "kern.critical" "A critical kernel error" Have fun playing with these programs. Note that the code doesn't make any assumption about the routing or binding keys, you may want to play with more than two routing key parameters. Move on to :ref:`tutorial 6 ` to learn about RPC. .. note:: This material was adopted from `official tutorial`_ on **rabbitmq.org**. python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/6-rpc.rst000066400000000000000000000230761476164671100241430ustar00rootroot00000000000000.. _issue: https://github.com/mosquito/aio-pika/issues .. _pull request: https://github.com/mosquito/aio-pika/compare .. _aio-pika: https://github.com/mosquito/aio-pika .. _official tutorial: https://www.rabbitmq.com/tutorials/tutorial-six-python.html .. _rpc: Remote procedure call (RPC) =========================== .. warning:: This is a beta version of the port from `official tutorial`_. Please when you found an error create `issue`_ or `pull request`_ for me. This implementation is a part of official tutorial. Since version 1.7.0 `aio-pika`_ has :ref:`patterns submodule `. You might use :class:`aio_pika.patterns.RPC` for real projects. .. note:: Using the `aio-pika`_ async Python client .. note:: **Prerequisites** This tutorial assumes RabbitMQ is installed_ and running on localhost on standard port (`5672`). In case you use a different host, port or credentials, connections settings would require adjusting. .. _installed: https://www.rabbitmq.com/download.html **Where to get help** If you're having trouble going through this tutorial you can `contact us`_ through the mailing list. .. _contact us: https://groups.google.com/forum/#!forum/rabbitmq-users In the :ref:`second tutorial ` we learned how to use *Work Queues* to distribute time-consuming tasks among multiple workers. But what if we need to run a function on a remote computer and wait for the result? Well, that's a different story. This pattern is commonly known as *Remote Procedure Call or RPC*. In this tutorial we're going to use RabbitMQ to build an RPC system: a client and a scalable RPC server. As we don't have any time-consuming tasks that are worth distributing, we're going to create a dummy RPC service that returns Fibonacci numbers. Client interface ++++++++++++++++ To illustrate how an RPC service could be used we're going to create a simple client class. It's going to expose a method named call which sends an RPC request and blocks until the answer is received: .. code-block:: python async def main(): fibonacci_rpc = FibonacciRpcClient() result = await fibonacci_rpc.call(4) print("fib(4) is %r" % result) .. note:: **A note on RPC** Although RPC is a pretty common pattern in computing, it's often criticised. The problems arise when a programmer is not aware whether a function call is local or if it's a slow RPC. Confusions like that result in an unpredictable system and adds unnecessary complexity to debugging. Instead of simplifying software, misused RPC can result in unmaintainable spaghetti code. Bearing that in mind, consider the following advice: * Make sure it's obvious which function call is local and which is remote. * Document your system. Make the dependencies between components clear. * Handle error cases. How should the client react when the RPC server is down for a long time? When in doubt avoid RPC. If you can, you should use an asynchronous pipeline - instead of RPC-like blocking, results are asynchronously pushed to a next computation stage. Callback queue ++++++++++++++ In general doing RPC over RabbitMQ is easy. A client sends a request message and a server replies with a response message. In order to receive a response the client needs to send a 'callback' queue address with the request. Let's try it: .. code-block:: python async def main(): ... # Queue for results callback_queue = await channel.declare_queue(exclusive=True) await channel.default_exchange.publish( Message( request, reply_to=callback_queue.name ), routing_key='rpc_queue' ) # ... and some code to read a response message from the callback_queue ... ... .. note:: **Message properties** The AMQP protocol predefines a set of 14 properties that go with a message. Most of the properties are rarely used, with the exception of the following: * `delivery_mode`: Marks a message as persistent (with a value of 2) or transient (any other value). You may remember this property from the :ref:`second tutorial `. * `content_type`: Used to describe the mime-type of the encoding. For example for the often used JSON encoding it is a good practice to set this property to: application/json. * `reply_to`: Commonly used to name a callback queue. * `correlation_id`: Useful to correlate RPC responses with requests. See additional info in :class:`aio_pika.Message` Correlation id ++++++++++++++ In the method presented above we suggest creating a callback queue for every RPC request. That's pretty inefficient, but fortunately there is a better way - let's create a single callback queue per client. That raises a new issue, having received a response in that queue it's not clear to which request the response belongs. That's when the `correlation_id` property is used. We're going to set it to a unique value for every request. Later, when we receive a message in the callback queue we'll look at this property, and based on that we'll be able to match a response with a request. If we see an unknown `correlation_id` value, we may safely discard the message - it doesn't belong to our requests. You may ask, why should we ignore unknown messages in the callback queue, rather than failing with an error? It's due to a possibility of a race condition on the server side. Although unlikely, it is possible that the RPC server will die just after sending us the answer, but before sending an acknowledgment message for the request. If that happens, the restarted RPC server will process the request again. That's why on the client we must handle the duplicate responses gracefully, and the RPC should ideally be idempotent. Summary +++++++ .. image:: /_static/tutorial/python-six.svg :align: center Our RPC will work like this: * When the Client starts up, it creates an anonymous exclusive callback queue. * For an RPC request, the Client sends a message with two properties: `reply_to`, which is set to the callback queue and `correlation_id`, which is set to a unique value for every request. * The request is sent to an rpc_queue queue. * The RPC worker (aka: server) is waiting for requests on that queue. When a request appears, it does the job and sends a message with the result back to the Client, using the queue from the reply_to field. * The client waits for data on the callback queue. When a message appears, it checks the `correlation_id` property. If it matches the value from the request it returns the response to the application. Putting it all together +++++++++++++++++++++++ The code for :download:`rpc_server.py `: .. literalinclude:: examples/6-rpc/rpc_server.py :language: python :linenos: The server code is rather straightforward: * (34) As usual we start by establishing the connection and declaring the queue. * (6) We declare our fibonacci function. It assumes only valid positive integer input. (Don't expect this one to work for big numbers, it's probably the slowest recursive implementation possible). * (15) We declare a callback for basic_consume, the core of the RPC server. It's executed when the request is received. It does the work and sends the response back. The code for :download:`rpc_client.py `: .. literalinclude:: examples/6-rpc/rpc_client.py :language: python :linenos: The client code is slightly more involved: * (15) We establish a connection, channel and declare an exclusive 'callback' queue for replies. * (22) We subscribe to the 'callback' queue, so that we can receive RPC responses. * (26) The 'on_response' callback executed on every response is doing a very simple job, for every response message it checks if the correlation_id is the one we're looking for. If so, it saves the response in self.response and breaks the consuming loop. * (30) Next, we define our main call method - it does the actual RPC request. * (31) In this method, first we generate a unique correlation_id number and save it - the 'on_response' callback function will use this value to catch the appropriate response. * (36) Next, we publish the request message, with two properties: reply_to and correlation_id. And finally we return the response back to the user. Our RPC service is now ready. We can start the server:: $ python rpc_server.py [x] Awaiting RPC requests To request a fibonacci number run the client:: $ python rpc_client.py [x] Requesting fib(30) The presented design is not the only possible implementation of a RPC service, but it has some important advantages: If the RPC server is too slow, you can scale up by just running another one. Try running a second rpc_server.py in a new console. On the client side, the RPC requires sending and receiving only one message. No synchronous calls like queue_declare are required. As a result the RPC client needs only one network round trip for a single RPC request. Our code is still pretty simplistic and doesn't try to solve more complex (but important) problems, like: * How should the client react if there are no servers running? * Should a client have some kind of timeout for the RPC? * If the server malfunctions and raises an exception, should it be forwarded to the client? * Protecting against invalid incoming messages (eg checking bounds) before processing. .. note:: If you want to experiment, you may find the rabbitmq-management plugin useful for viewing the queues. .. note:: This material was adopted from `official tutorial`_ on **rabbitmq.org**. python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/7-publisher-confirms.rst000066400000000000000000000155101476164671100271650ustar00rootroot00000000000000.. _issue: https://github.com/mosquito/aio-pika/issues .. _pull request: https://github.com/mosquito/aio-pika/compare .. _aio-pika: https://github.com/mosquito/aio-pika .. _official tutorial: https://www.rabbitmq.com/tutorials/tutorial-seven-php.html .. _publisher-confirms: Publisher Confirms ================== .. warning:: This is a beta version of the port from `official tutorial`_. Please when you found an error create `issue`_ or `pull request`_ for me. .. note:: Using the `aio-pika`_ async Python client .. note:: **Prerequisites** This tutorial assumes RabbitMQ is installed_ and running on localhost on standard port (`5672`). In case you use a different host, port or credentials, connections settings would require adjusting. .. _installed: https://www.rabbitmq.com/download.html **Where to get help** If you're having trouble going through this tutorial you can `contact us`_ through the mailing list. .. _contact us: https://groups.google.com/forum/#!forum/rabbitmq-users `Publisher confirms `_ are a RabbitMQ extension to implement reliable publishing. When publisher confirms are enabled on a channel, messages the client publishes are confirmed asynchronously by the broker, meaning they have been taken care of on the server side. Overview ++++++++ In this tutorial we're going to use publisher confirms to make sure published messages have safely reached the broker. We will cover several strategies to using publisher confirms and explain their pros and cons. Enabling Publisher Confirms on a Channel ++++++++++++++++++++++++++++++++++++++++ Publisher confirms are a RabbitMQ extension to the AMQP 0.9.1 protocol. Publisher confirms are enabled at the channel level by setting the :code:`publisher_confirms` parameter to :code:`True`, which is the default. .. code-block:: python channel = await connection.channel( publisher_confirms=True, # This is the default ) Strategy #1: Publishing Messages Individually +++++++++++++++++++++++++++++++++++++++++++++ Let's start with the simplest approach to publishing with confirms, that is, publishing a message and waiting synchronously for its confirmation: .. literalinclude:: examples/7-publisher-confirms/publish_individually.py :language: python :start-at: # Sending the messages :end-before: # Done sending messages In the previous example we publish a message as usual and wait for its confirmation with the :code:`await` keyword. The :code:`await` returns as soon as the message has been confirmed. If the message is not confirmed within the timeout or if it is nack-ed (meaning the broker could not take care of it for some reason), the :code:`await` will throw an exception. The :code:`on_return_raises` parameter of :code:`aio_pika.connect()` and :code:`connection.channel()` controls this behaivior for if a mandatory message is returned. The handling of the exception usually consists in logging an error message and/or retrying to send the message. Different client libraries have different ways to synchronously deal with publisher confirms, so make sure to read carefully the documentation of the client you are using. This technique is very straightforward but also has a major drawback: it **significantly slows down publishing**, as the confirmation of a message blocks the publishing of all subsequent messages. This approach is not going to deliver throughput of more than a few hundreds of published messages per second. Nevertheless, this can be good enough for some applications. Strategy #2: Publishing Messages in Batches +++++++++++++++++++++++++++++++++++++++++++ To improve upon our previous example, we can publish a batch of messages and wait for this whole batch to be confirmed. The following example uses a batch of 100: .. literalinclude:: examples/7-publisher-confirms/publish_batches.py :language: python :start-at: batchsize = 100 :end-before: # Done sending messages Waiting for a batch of messages to be confirmed improves throughput drastically over waiting for a confirm for individual message (up to 20-30 times with a remote RabbitMQ node). One drawback is that we do not know exactly what went wrong in case of failure, so we may have to keep a whole batch in memory to log something meaningful or to re-publish the messages. And this solution is still synchronous, so it blocks the publishing of messages. .. note:: To initiate message sending asynchronously, a task is created with :code:`asyncio.create_task`, so the execution of our function is handled by the event-loop. The :code:`await asyncio.sleep(0)` is required to make the event loop switch to our coroutine. Any :code:`await` would have sufficed, though. Using :code:`async for` with an :code:`async` generator also requires the generator to yield control flow with :code:`await` for message sending to be initiated. Without the task and the :code:`await` the message sending would only be initiated with the :code:`asyncio.gather` call. For some applications this behaivior might be acceptable. Strategy #3: Handling Publisher Confirms Asynchronously +++++++++++++++++++++++++++++++++++++++++++++++++++++++ The broker confirms published messages asynchronously, our helper function will publish the messages and be notified of these confirms: .. literalinclude:: examples/7-publisher-confirms/publish_asynchronously.py :language: python :start-at: # List for storing tasks :end-at: await asyncio.gather(*tasks) In Python 3.11 a :code:`TaskGroup` can be used instead of the :code:`list` with :code:`asyncio.gather`. The helper function publishes the message and awaits the confirmation. This way the helper function knows which message the confirmation, timeout or rejection belongs to. .. literalinclude:: examples/7-publisher-confirms/publish_asynchronously.py :language: python :pyobject: publish_and_handle_confirm Summary +++++++ Making sure published messages made it to the broker can be essential in some applications. Publisher confirms are a RabbitMQ feature that helps to meet this requirement. Publisher confirms are asynchronous in nature but it is also possible to handle them synchronously. There is no definitive way to implement publisher confirms, this usually comes down to the constraints in the application and in the overall system. Typical techniques are: * publishing messages individually, waiting for the confirmation synchronously: simple, but very limited throughput. * publishing messages in batch, waiting for the confirmation synchronously for a batch: simple, reasonable throughput, but hard to reason about when something goes wrong. * asynchronous handling: best performance and use of resources, good control in case of error, but can be involved to implement correctly. .. note:: This material was adopted from `official tutorial`_ on **rabbitmq.org**. python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/000077500000000000000000000000001476164671100242705ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/1-introduction/000077500000000000000000000000001476164671100271475ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/1-introduction/receive.py000066400000000000000000000020331476164671100311410ustar00rootroot00000000000000import asyncio from aio_pika import connect from aio_pika.abc import AbstractIncomingMessage async def on_message(message: AbstractIncomingMessage) -> None: """ on_message doesn't necessarily have to be defined as async. Here it is to show that it's possible. """ print(" [x] Received message %r" % message) print("Message body is: %r" % message.body) print("Before sleep!") await asyncio.sleep(5) # Represents async I/O operations print("After sleep!") async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() # Declaring queue queue = await channel.declare_queue("hello") # Start listening the queue with name 'hello' await queue.consume(on_message, no_ack=True) print(" [*] Waiting for messages. To exit press CTRL+C") await asyncio.Future() if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/1-introduction/send.py000066400000000000000000000011441476164671100304520ustar00rootroot00000000000000import asyncio from aio_pika import Message, connect async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() # Declaring queue queue = await channel.declare_queue("hello") # Sending the message await channel.default_exchange.publish( Message(b"Hello World!"), routing_key=queue.name, ) print(" [x] Sent 'Hello World!'") if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/2-work-queues/000077500000000000000000000000001476164671100267165ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/2-work-queues/new_task.py000066400000000000000000000013501476164671100311020ustar00rootroot00000000000000import asyncio import sys from aio_pika import DeliveryMode, Message, connect async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() message_body = b" ".join( arg.encode() for arg in sys.argv[1:] ) or b"Hello World!" message = Message( message_body, delivery_mode=DeliveryMode.PERSISTENT, ) # Sending the message await channel.default_exchange.publish( message, routing_key="task_queue", ) print(f" [x] Sent {message!r}") if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/2-work-queues/new_task_initial.py000066400000000000000000000012151476164671100326130ustar00rootroot00000000000000import asyncio import sys from aio_pika import Message, connect async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() message_body = b" ".join( arg.encode() for arg in sys.argv[1:] ) or b"Hello World!" # Sending the message await channel.default_exchange.publish( Message(message_body), routing_key="hello", ) print(f" [x] Sent {message_body!r}") if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/2-work-queues/tasks_worker.py000066400000000000000000000017601476164671100320120ustar00rootroot00000000000000import asyncio from aio_pika import connect from aio_pika.abc import AbstractIncomingMessage async def on_message(message: AbstractIncomingMessage) -> None: async with message.process(): print(f" [x] Received message {message!r}") await asyncio.sleep(message.body.count(b'.')) print(f" Message body is: {message.body!r}") async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() await channel.set_qos(prefetch_count=1) # Declaring queue queue = await channel.declare_queue( "task_queue", durable=True, ) # Start listening the queue with name 'task_queue' await queue.consume(on_message) print(" [*] Waiting for messages. To exit press CTRL+C") await asyncio.Future() if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/3-publish-subscribe/000077500000000000000000000000001476164671100300555ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/3-publish-subscribe/emit_log.py000066400000000000000000000015061476164671100322300ustar00rootroot00000000000000import asyncio import sys from aio_pika import DeliveryMode, ExchangeType, Message, connect async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() logs_exchange = await channel.declare_exchange( "logs", ExchangeType.FANOUT, ) message_body = b" ".join( arg.encode() for arg in sys.argv[1:] ) or b"Hello World!" message = Message( message_body, delivery_mode=DeliveryMode.PERSISTENT, ) # Sending the message await logs_exchange.publish(message, routing_key="info") print(f" [x] Sent {message!r}") if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/3-publish-subscribe/receive_logs.py000066400000000000000000000017711476164671100331030ustar00rootroot00000000000000import asyncio from aio_pika import ExchangeType, connect from aio_pika.abc import AbstractIncomingMessage async def on_message(message: AbstractIncomingMessage) -> None: async with message.process(): print(f"[x] {message.body!r}") async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() await channel.set_qos(prefetch_count=1) logs_exchange = await channel.declare_exchange( "logs", ExchangeType.FANOUT, ) # Declaring queue queue = await channel.declare_queue(exclusive=True) # Binding the queue to the exchange await queue.bind(logs_exchange) # Start listening the queue await queue.consume(on_message) print(" [*] Waiting for logs. To exit press CTRL+C") await asyncio.Future() if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/4-routing/000077500000000000000000000000001476164671100261205ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/4-routing/emit_log_direct.py000066400000000000000000000016231476164671100316250ustar00rootroot00000000000000import asyncio import sys from aio_pika import DeliveryMode, ExchangeType, Message, connect async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() logs_exchange = await channel.declare_exchange( "logs", ExchangeType.DIRECT, ) message_body = b" ".join( arg.encode() for arg in sys.argv[2:] ) or b"Hello World!" message = Message( message_body, delivery_mode=DeliveryMode.PERSISTENT, ) # Sending the message routing_key = sys.argv[1] if len(sys.argv) > 2 else "info" await logs_exchange.publish(message, routing_key=routing_key) print(f" [x] Sent {message.body!r}") if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/4-routing/receive_logs_direct.py000066400000000000000000000024661476164671100325020ustar00rootroot00000000000000import asyncio import sys from aio_pika import ExchangeType, connect from aio_pika.abc import AbstractIncomingMessage async def on_message(message: AbstractIncomingMessage) -> None: async with message.process(): print(" [x] %r:%r" % (message.routing_key, message.body)) async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() await channel.set_qos(prefetch_count=1) severities = sys.argv[1:] if not severities: sys.stderr.write( "Usage: %s [info] [warning] [error]\n" % sys.argv[0], ) sys.exit(1) # Declare an exchange direct_logs_exchange = await channel.declare_exchange( "logs", ExchangeType.DIRECT, ) # Declaring random queue queue = await channel.declare_queue(durable=True) for severity in severities: await queue.bind(direct_logs_exchange, routing_key=severity) # Start listening the random queue await queue.consume(on_message) print(" [*] Waiting for messages. To exit press CTRL+C") await asyncio.Future() if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/4-routing/receive_logs_direct_simple.py000066400000000000000000000024761476164671100340540ustar00rootroot00000000000000import asyncio import sys from aio_pika import ExchangeType, connect from aio_pika.abc import AbstractIncomingMessage async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() await channel.set_qos(prefetch_count=1) severities = sys.argv[1:] if not severities: sys.stderr.write( f"Usage: {sys.argv[0]} [info] [warning] [error]\n", ) sys.exit(1) # Declare an exchange direct_logs_exchange = await channel.declare_exchange( "logs", ExchangeType.DIRECT, ) # Declaring random queue queue = await channel.declare_queue(durable=True) for severity in severities: await queue.bind(direct_logs_exchange, routing_key=severity) async with queue.iterator() as iterator: message: AbstractIncomingMessage async for message in iterator: async with message.process(): print(f" [x] {message.routing_key!r}:{message.body!r}") print(" [*] Waiting for messages. To exit press CTRL+C") await asyncio.Future() if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/5-topics/000077500000000000000000000000001476164671100257335ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/5-topics/emit_log_topic.py000066400000000000000000000016711476164671100313070ustar00rootroot00000000000000import asyncio import sys from aio_pika import DeliveryMode, ExchangeType, Message, connect async def main() -> None: # Perform connection connection = await connect( "amqp://guest:guest@localhost/", ) async with connection: # Creating a channel channel = await connection.channel() topic_logs_exchange = await channel.declare_exchange( "topic_logs", ExchangeType.TOPIC, ) routing_key = sys.argv[1] if len(sys.argv) > 2 else "anonymous.info" message_body = b" ".join( arg.encode() for arg in sys.argv[2:] ) or b"Hello World!" message = Message( message_body, delivery_mode=DeliveryMode.PERSISTENT, ) # Sending the message await topic_logs_exchange.publish(message, routing_key=routing_key) print(f" [x] Sent {message!r}") if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/5-topics/receive_logs_topic.py000066400000000000000000000023471476164671100321570ustar00rootroot00000000000000import asyncio import sys from aio_pika import ExchangeType, connect from aio_pika.abc import AbstractIncomingMessage async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") # Creating a channel channel = await connection.channel() await channel.set_qos(prefetch_count=1) # Declare an exchange topic_logs_exchange = await channel.declare_exchange( "topic_logs", ExchangeType.TOPIC, ) # Declaring queue queue = await channel.declare_queue( "task_queue", durable=True, ) binding_keys = sys.argv[1:] if not binding_keys: sys.stderr.write("Usage: %s [binding_key]...\n" % sys.argv[0]) sys.exit(1) for binding_key in binding_keys: await queue.bind(topic_logs_exchange, routing_key=binding_key) print(" [*] Waiting for messages. To exit press CTRL+C") # Start listening the queue with name 'task_queue' async with queue.iterator() as iterator: message: AbstractIncomingMessage async for message in iterator: async with message.process(): print(f" [x] {message.routing_key!r}:{message.body!r}") if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/6-rpc/000077500000000000000000000000001476164671100252175ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/6-rpc/rpc_client.py000066400000000000000000000035521476164671100277200ustar00rootroot00000000000000import asyncio import uuid from typing import MutableMapping from aio_pika import Message, connect from aio_pika.abc import ( AbstractChannel, AbstractConnection, AbstractIncomingMessage, AbstractQueue, ) class FibonacciRpcClient: connection: AbstractConnection channel: AbstractChannel callback_queue: AbstractQueue def __init__(self) -> None: self.futures: MutableMapping[str, asyncio.Future] = {} async def connect(self) -> "FibonacciRpcClient": self.connection = await connect("amqp://guest:guest@localhost/") self.channel = await self.connection.channel() self.callback_queue = await self.channel.declare_queue(exclusive=True) await self.callback_queue.consume(self.on_response, no_ack=True) return self async def on_response(self, message: AbstractIncomingMessage) -> None: if message.correlation_id is None: print(f"Bad message {message!r}") return future: asyncio.Future = self.futures.pop(message.correlation_id) future.set_result(message.body) async def call(self, n: int) -> int: correlation_id = str(uuid.uuid4()) loop = asyncio.get_running_loop() future = loop.create_future() self.futures[correlation_id] = future await self.channel.default_exchange.publish( Message( str(n).encode(), content_type="text/plain", correlation_id=correlation_id, reply_to=self.callback_queue.name, ), routing_key="rpc_queue", ) return int(await future) async def main() -> None: fibonacci_rpc = await FibonacciRpcClient().connect() print(" [x] Requesting fib(30)") response = await fibonacci_rpc.call(30) print(f" [.] Got {response!r}") if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/6-rpc/rpc_server.py000066400000000000000000000030241476164671100277420ustar00rootroot00000000000000import asyncio import logging from aio_pika import Message, connect from aio_pika.abc import AbstractIncomingMessage def fib(n: int) -> int: if n == 0: return 0 elif n == 1: return 1 else: return fib(n - 1) + fib(n - 2) async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") # Creating a channel channel = await connection.channel() exchange = channel.default_exchange # Declaring queue queue = await channel.declare_queue("rpc_queue") print(" [x] Awaiting RPC requests") # Start listening the queue with name 'hello' async with queue.iterator() as qiterator: message: AbstractIncomingMessage async for message in qiterator: try: async with message.process(requeue=False): assert message.reply_to is not None n = int(message.body.decode()) print(f" [.] fib({n})") response = str(fib(n)).encode() await exchange.publish( Message( body=response, correlation_id=message.correlation_id, ), routing_key=message.reply_to, ) print("Request complete") except Exception: logging.exception("Processing error for message %r", message) if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/000077500000000000000000000000001476164671100302475ustar00rootroot00000000000000publish_asynchronously.py000066400000000000000000000036301476164671100353720ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/7-publisher-confirmsimport asyncio from typing import Generator from aio_pika import Message, connect from aiormq.exceptions import DeliveryError from pamqp.commands import Basic from aio_pika.abc import AbstractExchange def get_messages_to_publish() -> Generator[bytes, None, None]: for i in range(10000): yield f"Hello World {i}!".encode() async def publish_and_handle_confirm( exchange: AbstractExchange, queue_name: str, message_body: bytes, ) -> None: try: confirmation = await exchange.publish( Message(message_body), routing_key=queue_name, timeout=5.0, ) except DeliveryError as e: print(f"Delivery of {message_body!r} failed with exception: {e}") except TimeoutError: print(f"Timeout occured for {message_body!r}") else: if not isinstance(confirmation, Basic.Ack): print(f"Message {message_body!r} was not acknowledged by broker!") async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() # Declaring queue queue = await channel.declare_queue("hello") # List for storing tasks tasks = [] # Sending the messages for msg in get_messages_to_publish(): task = asyncio.create_task( publish_and_handle_confirm( channel.default_exchange, queue.name, msg, ) ) tasks.append(task) # Yield control flow to event loop, so message sending is initiated: await asyncio.sleep(0) # Await all tasks await asyncio.gather(*tasks) print(" [x] Sent and confirmed multiple messages asynchronously. ") if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_batches.py000066400000000000000000000030021476164671100337530ustar00rootroot00000000000000import asyncio from typing import Generator from aio_pika import Message, connect def get_messages_to_publish() -> Generator[bytes, None, None]: for i in range(10000): yield f"Hello World {i}!".encode() async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() # Declaring queue queue = await channel.declare_queue("hello") batchsize = 100 outstanding_messages = [] # Sending the messages for msg in get_messages_to_publish(): outstanding_messages.append( asyncio.create_task( channel.default_exchange.publish( Message(msg), routing_key=queue.name, timeout=5.0, ) ) ) # Yield control flow to event loop, so message sending is initiated: await asyncio.sleep(0) if len(outstanding_messages) == batchsize: await asyncio.gather(*outstanding_messages) outstanding_messages.clear() if len(outstanding_messages) > 0: await asyncio.gather(*outstanding_messages) outstanding_messages.clear() # Done sending messages print(" [x] Sent and confirmed multiple messages in batches. ") if __name__ == "__main__": asyncio.run(main()) publish_individually.py000066400000000000000000000017471476164671100347760ustar00rootroot00000000000000python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/examples/7-publisher-confirmsimport asyncio from typing import Generator from aio_pika import Message, connect def get_messages_to_publish() -> Generator[bytes, None, None]: for i in range(10000): yield f"Hello World {i}!".encode() async def main() -> None: # Perform connection connection = await connect("amqp://guest:guest@localhost/") async with connection: # Creating a channel channel = await connection.channel() # Declaring queue queue = await channel.declare_queue("hello") # Sending the messages for msg in get_messages_to_publish(): # Waiting for publisher confirmation with timeout for every message await channel.default_exchange.publish( Message(msg), routing_key=queue.name, timeout=5.0, ) # Done sending messages print(" [x] Sent and confirmed multiple messages individually. ") if __name__ == "__main__": asyncio.run(main()) python-aio-pika-9.5.5/docs/source/rabbitmq-tutorial/index.rst000066400000000000000000000002101476164671100243040ustar00rootroot00000000000000RabbitMQ tutorial ================= .. toctree:: :glob: :maxdepth: 3 :caption: RabbitMQ tutorial adopted for aio-pika *-* python-aio-pika-9.5.5/gray.conf000066400000000000000000000001271476164671100163710ustar00rootroot00000000000000formatters = add-trailing-comma,isort,unify min-python-version = 3.7 log-level = error python-aio-pika-9.5.5/logo.svg000066400000000000000000000240131476164671100162410ustar00rootroot00000000000000 image/svg+xmlaio-pika python-aio-pika-9.5.5/noxfile.py000066400000000000000000000006471476164671100166050ustar00rootroot00000000000000import nox from nox import Session @nox.session def docs(session: Session) -> None: session.install(".") session.install("sphinx", "sphinx-autobuild") session.run("rm", "-rf", "build/html", external=True) sphinx_args = ["-W", "docs/source", "build/html"] if "serve" in session.posargs: session.run("sphinx-autobuild", *sphinx_args) else: session.run("sphinx-build", *sphinx_args) python-aio-pika-9.5.5/poetry.lock000066400000000000000000004421171476164671100167650ustar00rootroot00000000000000# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiomisc" version = "17.5.29" description = "aiomisc - miscellaneous utils for asyncio" optional = false python-versions = "<4.0,>=3.8" files = [ {file = "aiomisc-17.5.29-py3-none-any.whl", hash = "sha256:8587fbffe7ded7aa397092b2bb34d983104b2abf85f64d8584ea54dcffbe4b36"}, {file = "aiomisc-17.5.29.tar.gz", hash = "sha256:0e33a5e1eca6c83da7d5b6c3508facde21a25a34ff02146a9939a2669b9f2cd5"}, ] [package.dependencies] colorlog = ">=6.0,<7.0" logging-journald = {version = "*", markers = "sys_platform == \"linux\""} typing_extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] aiohttp = ["aiohttp (>3)"] asgi = ["aiohttp-asgi (>=0.5.2,<0.6.0)"] carbon = ["aiocarbon (>=0.15,<0.16)"] cron = ["croniter (==2.0)"] dns = ["dnslib (>=0.9,<0.10)"] grpc = ["grpcio (>=1.56,<2.0)", "grpcio-reflection (>=1.56,<2.0)", "grpcio-tools (>=1.56,<2.0)"] raven = ["aiohttp (>3)", "raven"] rich = ["rich"] uvicorn = ["asgiref (>=3.7,<4.0)", "uvicorn (>=0.27,<0.28)"] uvloop = ["uvloop (>=0.19,<1)"] [[package]] name = "aiomisc-pytest" version = "1.2.1" description = "pytest integration for aiomisc" optional = false python-versions = "<4.0,>=3.8" files = [ {file = "aiomisc_pytest-1.2.1-py3-none-any.whl", hash = "sha256:24a3802f0b794625a6bce0032f840c5e643e6f075c8a448297f1e92789c38736"}, {file = "aiomisc_pytest-1.2.1.tar.gz", hash = "sha256:e2658fefb4770a85fe5e1a13e816f142dc6920da5d99ea338d852c7a035f3325"}, ] [package.dependencies] aiomisc = ">=17" pytest = "8.2.0" [[package]] name = "aiormq" version = "6.8.1" description = "Pure python AMQP asynchronous client library" optional = false python-versions = "<4.0,>=3.8" files = [ {file = "aiormq-6.8.1-py3-none-any.whl", hash = "sha256:5da896c8624193708f9409ffad0b20395010e2747f22aa4150593837f40aa017"}, {file = "aiormq-6.8.1.tar.gz", hash = "sha256:a964ab09634be1da1f9298ce225b310859763d5cf83ef3a7eae1a6dc6bd1da1a"}, ] [package.dependencies] pamqp = "3.3.0" yarl = "*" [[package]] name = "alabaster" version = "0.7.16" description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.9" files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] [[package]] name = "argcomplete" version = "3.5.1" description = "Bash tab completion for argparse" optional = false python-versions = ">=3.8" files = [ {file = "argcomplete-3.5.1-py3-none-any.whl", hash = "sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363"}, {file = "argcomplete-3.5.1.tar.gz", hash = "sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4"}, ] [package.extras] test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] [[package]] name = "autodoc" version = "0.5.0" description = "Autodoc Python implementation." optional = false python-versions = "*" files = [ {file = "autodoc-0.5.0.tar.gz", hash = "sha256:c4387c5a0f1c09b055bb2e384542ee1e016542f313b2a33d904ca77f0460ded3"}, ] [package.dependencies] decorator = "*" webtest = "*" [[package]] name = "babel" version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "beautifulsoup4" version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" files = [ {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] cchardet = ["cchardet"] chardet = ["chardet"] charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] [[package]] name = "certifi" version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "charset-normalizer" version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] name = "collective-checkdocs" version = "0.2" description = "Distutils command to view and validate restructured text in package's long_description" optional = false python-versions = "*" files = [ {file = "collective.checkdocs-0.2.zip", hash = "sha256:3a5328257c5224bc72753820c182910d7fb336bc1dba5e09113d48566655e46e"}, ] [package.dependencies] docutils = "*" [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] name = "colorlog" version = "6.9.0" description = "Add colours to the output of Python's logging module." optional = false python-versions = ">=3.6" files = [ {file = "colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff"}, {file = "colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] development = ["black", "flake8", "mypy", "pytest", "types-colorama"] [[package]] name = "coverage" version = "6.5.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.7" files = [ {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] toml = ["tomli"] [[package]] name = "coveralls" version = "3.3.1" description = "Show coverage stats online via coveralls.io" optional = false python-versions = ">= 3.5" files = [ {file = "coveralls-3.3.1-py2.py3-none-any.whl", hash = "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026"}, {file = "coveralls-3.3.1.tar.gz", hash = "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea"}, ] [package.dependencies] coverage = ">=4.1,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<7.0" docopt = ">=0.6.1" requests = ">=1.0.0" [package.extras] yaml = ["PyYAML (>=3.10)"] [[package]] name = "decorator" version = "5.1.1" description = "Decorators for Humans" optional = false python-versions = ">=3.5" files = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] [[package]] name = "deprecation" version = "2.1.0" description = "A library to handle automated deprecations" optional = false python-versions = "*" files = [ {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, ] [package.dependencies] packaging = "*" [[package]] name = "distlib" version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" files = [ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] [[package]] name = "docker" version = "7.1.0" description = "A Python library for the Docker Engine API." optional = false python-versions = ">=3.8" files = [ {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, ] [package.dependencies] pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} requests = ">=2.26.0" urllib3 = ">=1.26.0" [package.extras] dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] ssh = ["paramiko (>=2.4.3)"] websockets = ["websocket-client (>=1.3.0)"] [[package]] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" optional = false python-versions = "*" files = [ {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, ] [[package]] name = "docutils" version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] [[package]] name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] test = ["pytest (>=6)"] [[package]] name = "filelock" version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "furo" version = "2024.8.6" description = "A clean customisable Sphinx documentation theme." optional = false python-versions = ">=3.8" files = [ {file = "furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c"}, {file = "furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01"}, ] [package.dependencies] beautifulsoup4 = "*" pygments = ">=2.7" sphinx = ">=6.0,<9.0" sphinx-basic-ng = ">=1.0.0.beta2" [[package]] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] [[package]] name = "importlib-metadata" version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] zipp = ">=3.20" [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] name = "jinja2" version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] [[package]] name = "legacy-cgi" version = "2.6.1" description = "Fork of the standard library cgi and cgitb modules, being deprecated in PEP-594" optional = false python-versions = "<4.0,>=3.10" files = [ {file = "legacy_cgi-2.6.1-py3-none-any.whl", hash = "sha256:8eacc1522d9f76451337a4b5a0abf494158d39250754b0d1bc19a14c6512af9b"}, {file = "legacy_cgi-2.6.1.tar.gz", hash = "sha256:f2ada99c747c3d72a473a6aaff6259a61f226b06fe9f3106e495ab83fd8f7a42"}, ] [[package]] name = "livereload" version = "2.7.0" description = "Python LiveReload is an awesome tool for web developers" optional = false python-versions = ">=3.7" files = [ {file = "livereload-2.7.0-py3-none-any.whl", hash = "sha256:19bee55aff51d5ade6ede0dc709189a0f904d3b906d3ea71641ed548acff3246"}, {file = "livereload-2.7.0.tar.gz", hash = "sha256:f4ba199ef93248902841e298670eebfe1aa9e148e19b343bc57dbf1b74de0513"}, ] [package.dependencies] tornado = "*" [[package]] name = "logging-journald" version = "0.6.10" description = "Pure python logging handler for writing logs to the journald using native protocol" optional = false python-versions = "<4.0,>=3.8" files = [ {file = "logging_journald-0.6.10-py3-none-any.whl", hash = "sha256:303664d945be136144905e6a6a68275ae095121e80ec3a3b769660a2bf3abb1a"}, {file = "logging_journald-0.6.10.tar.gz", hash = "sha256:cf53f658136879aa664da378ddb98d267b9ea9349e4d7d72753db705c70f2502"}, ] [[package]] name = "markupsafe" version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] [[package]] name = "multidict" version = "6.1.0" description = "multidict implementation" optional = false python-versions = ">=3.8" files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, ] [package.dependencies] typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} [[package]] name = "mypy" version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "nox" version = "2024.10.9" description = "Flexible test automation." optional = false python-versions = ">=3.8" files = [ {file = "nox-2024.10.9-py3-none-any.whl", hash = "sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab"}, {file = "nox-2024.10.9.tar.gz", hash = "sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95"}, ] [package.dependencies] argcomplete = ">=1.9.4,<4" colorlog = ">=2.6.1,<7" packaging = ">=20.9" tomli = {version = ">=1", markers = "python_version < \"3.11\""} virtualenv = ">=20.14.1" [package.extras] tox-to-nox = ["jinja2", "tox"] uv = ["uv (>=0.1.6)"] [[package]] name = "packaging" version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] name = "pamqp" version = "3.3.0" description = "RabbitMQ Focused AMQP low-level library" optional = false python-versions = ">=3.7" files = [ {file = "pamqp-3.3.0-py2.py3-none-any.whl", hash = "sha256:c901a684794157ae39b52cbf700db8c9aae7a470f13528b9d7b4e5f7202f8eb0"}, {file = "pamqp-3.3.0.tar.gz", hash = "sha256:40b8795bd4efcf2b0f8821c1de83d12ca16d5760f4507836267fd7a02b06763b"}, ] [package.extras] codegen = ["lxml", "requests", "yapf"] testing = ["coverage", "flake8", "flake8-comprehensions", "flake8-deprecated", "flake8-import-order", "flake8-print", "flake8-quotes", "flake8-rst-docstrings", "flake8-tuple", "yapf"] [[package]] name = "platformdirs" version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] name = "propcache" version = "0.2.1" description = "Accelerated property cache" optional = false python-versions = ">=3.9" files = [ {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, {file = "propcache-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea"}, {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212"}, {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3"}, {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d"}, {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634"}, {file = "propcache-0.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2"}, {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958"}, {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c"}, {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583"}, {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf"}, {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034"}, {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b"}, {file = "propcache-0.2.1-cp310-cp310-win32.whl", hash = "sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4"}, {file = "propcache-0.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba"}, {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16"}, {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717"}, {file = "propcache-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3"}, {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9"}, {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787"}, {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465"}, {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af"}, {file = "propcache-0.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7"}, {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f"}, {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54"}, {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505"}, {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82"}, {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca"}, {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e"}, {file = "propcache-0.2.1-cp311-cp311-win32.whl", hash = "sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034"}, {file = "propcache-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3"}, {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a"}, {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0"}, {file = "propcache-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d"}, {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4"}, {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d"}, {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5"}, {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24"}, {file = "propcache-0.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff"}, {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f"}, {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec"}, {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348"}, {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6"}, {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6"}, {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518"}, {file = "propcache-0.2.1-cp312-cp312-win32.whl", hash = "sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246"}, {file = "propcache-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1"}, {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc"}, {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9"}, {file = "propcache-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439"}, {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536"}, {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629"}, {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b"}, {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052"}, {file = "propcache-0.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce"}, {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d"}, {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce"}, {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95"}, {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf"}, {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f"}, {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30"}, {file = "propcache-0.2.1-cp313-cp313-win32.whl", hash = "sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6"}, {file = "propcache-0.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1"}, {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541"}, {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e"}, {file = "propcache-0.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4"}, {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097"}, {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd"}, {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681"}, {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16"}, {file = "propcache-0.2.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d"}, {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae"}, {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b"}, {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347"}, {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf"}, {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04"}, {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587"}, {file = "propcache-0.2.1-cp39-cp39-win32.whl", hash = "sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb"}, {file = "propcache-0.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1"}, {file = "propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54"}, {file = "propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64"}, ] [[package]] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] [[package]] name = "pycodestyle" version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" optional = false python-versions = ">=3.6" files = [ {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, ] [package.dependencies] snowballstemmer = ">=2.2.0" [package.extras] toml = ["tomli (>=1.2.3)"] [[package]] name = "pyflakes" version = "3.2.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" files = [ {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] [[package]] name = "pygments" version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" files = [ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pylama" version = "8.4.1" description = "Code audit tool for python" optional = false python-versions = ">=3.7" files = [ {file = "pylama-8.4.1-py3-none-any.whl", hash = "sha256:5bbdbf5b620aba7206d688ed9fc917ecd3d73e15ec1a89647037a09fa3a86e60"}, {file = "pylama-8.4.1.tar.gz", hash = "sha256:2d4f7aecfb5b7466216d48610c7d6bad1c3990c29cdd392ad08259b161e486f6"}, ] [package.dependencies] mccabe = ">=0.7.0" pycodestyle = ">=2.9.1" pydocstyle = ">=6.1.1" pyflakes = ">=2.5.0" [package.extras] all = ["eradicate", "mypy", "pylint", "radon", "vulture"] eradicate = ["eradicate"] mypy = ["mypy"] pylint = ["pylint"] radon = ["radon"] tests = ["eradicate (>=2.0.0)", "mypy", "pylama-quotes", "pylint (>=2.11.1)", "pytest (>=7.1.2)", "pytest-mypy", "radon (>=5.1.0)", "toml", "types-setuptools", "types-toml", "vulture"] toml = ["toml (>=0.10.2)"] vulture = ["vulture"] [[package]] name = "pytest" version = "8.2.0" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ {file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"}, {file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2.0" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-rst" version = "0.0.7" description = "Test code from RST documents with pytest" optional = false python-versions = "*" files = [ {file = "pytest-rst-0.0.7.tar.gz", hash = "sha256:2b34aa9d41ce8dce3e685e6f40dff432804962ffec49d3d37565f2dbad4014d5"}, ] [package.dependencies] docutils = "*" py = "*" pygments = "*" pytest = "*" [[package]] name = "pywin32" version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" files = [ {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] [[package]] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" version = "69.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shortuuid" version = "1.0.13" description = "A generator library for concise, unambiguous and URL-safe UUIDs." optional = false python-versions = ">=3.6" files = [ {file = "shortuuid-1.0.13-py3-none-any.whl", hash = "sha256:a482a497300b49b4953e15108a7913244e1bb0d41f9d332f5e9925dba33a3c5a"}, {file = "shortuuid-1.0.13.tar.gz", hash = "sha256:3bb9cf07f606260584b1df46399c0b87dd84773e7b25912b7e391e30797c5e72"}, ] [[package]] name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false python-versions = "*" files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] [[package]] name = "soupsieve" version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] name = "sphinx" version = "7.4.7" description = "Python documentation generator" optional = false python-versions = ">=3.9" files = [ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, ] [package.dependencies] alabaster = ">=0.7.14,<0.8.0" babel = ">=2.13" colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} docutils = ">=0.20,<0.22" imagesize = ">=1.3" importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} Jinja2 = ">=3.1" packaging = ">=23.0" Pygments = ">=2.17" requests = ">=2.30.0" snowballstemmer = ">=2.2" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" sphinxcontrib-serializinghtml = ">=1.1.9" tomli = {version = ">=2", markers = "python_version < \"3.11\""} [package.extras] docs = ["sphinxcontrib-websupport"] lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] [[package]] name = "sphinx-autobuild" version = "2021.3.14" description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." optional = false python-versions = ">=3.6" files = [ {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, {file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"}, ] [package.dependencies] colorama = "*" livereload = "*" sphinx = "*" [package.extras] test = ["pytest", "pytest-cov"] [[package]] name = "sphinx-basic-ng" version = "1.0.0b2" description = "A modern skeleton for Sphinx themes." optional = false python-versions = ">=3.7" files = [ {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"}, {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"}, ] [package.dependencies] sphinx = ">=4.0" [package.extras] docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] [[package]] name = "sphinxcontrib-applehelp" version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-googleanalytics" version = "0.4" description = "Sphinx extension googleanalytics" optional = false python-versions = "*" files = [ {file = "sphinxcontrib-googleanalytics-0.4.tar.gz", hash = "sha256:4b19c1f0fce5df6c7da5633201b64a9e5b0cb3210a14fdb4134942ceee8c5d12"}, {file = "sphinxcontrib_googleanalytics-0.4-py3-none-any.whl", hash = "sha256:a6574983f9a58e5864ec10d34dc99914c4d647108b22c9249c8f0038b0cb18b3"}, ] [package.dependencies] Sphinx = ">=0.6" [[package]] name = "sphinxcontrib-htmlhelp" version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = false python-versions = ">=3.5" files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, ] [package.extras] test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "testcontainers" version = "3.7.1" description = "Library provides lightweight, throwaway instances of common databases, Selenium web browsers, or anything else that can run in a Docker container" optional = false python-versions = ">=3.7" files = [ {file = "testcontainers-3.7.1-py2.py3-none-any.whl", hash = "sha256:7f48cef4bf0ccd78f1a4534d4b701a003a3bace851f24eae58a32f9e3f0aeba0"}, ] [package.dependencies] deprecation = "*" docker = ">=4.0.0" wrapt = "*" [package.extras] arangodb = ["python-arango"] azurite = ["azure-storage-blob"] clickhouse = ["clickhouse-driver"] docker-compose = ["docker-compose"] google-cloud-pubsub = ["google-cloud-pubsub (<2)"] kafka = ["kafka-python"] keycloak = ["python-keycloak"] mongo = ["pymongo"] mssqlserver = ["pymssql"] mysql = ["pymysql", "sqlalchemy"] neo4j = ["neo4j"] oracle = ["cx-Oracle", "sqlalchemy"] postgresql = ["psycopg2-binary", "sqlalchemy"] rabbitmq = ["pika"] redis = ["redis"] selenium = ["selenium"] [[package]] name = "timeout-decorator" version = "0.5.0" description = "Timeout decorator" optional = false python-versions = "*" files = [ {file = "timeout-decorator-0.5.0.tar.gz", hash = "sha256:6a2f2f58db1c5b24a2cc79de6345760377ad8bdc13813f5265f6c3e63d16b3d7"}, ] [[package]] name = "tomli" version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "tornado" version = "6.4.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">=3.8" files = [ {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, {file = "tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec"}, {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946"}, {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf"}, {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634"}, {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73"}, {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c"}, {file = "tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482"}, {file = "tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38"}, {file = "tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b"}, ] [[package]] name = "types-docutils" version = "0.21.0.20241128" description = "Typing stubs for docutils" optional = false python-versions = ">=3.8" files = [ {file = "types_docutils-0.21.0.20241128-py3-none-any.whl", hash = "sha256:e0409204009639e9b0bf4521eeabe58b5e574ce9c0db08421c2ac26c32be0039"}, {file = "types_docutils-0.21.0.20241128.tar.gz", hash = "sha256:4dd059805b83ac6ec5a223699195c4e9eeb0446a4f7f2aeff1759a4a7cc17473"}, ] [[package]] name = "types-setuptools" version = "65.7.0.4" description = "Typing stubs for setuptools" optional = false python-versions = "*" files = [ {file = "types-setuptools-65.7.0.4.tar.gz", hash = "sha256:147809433301fe7e0f4ef5c0782f9a0453788960575e1efb6da5fe8cb2493c9f"}, {file = "types_setuptools-65.7.0.4-py3-none-any.whl", hash = "sha256:522067dfd8e1771f8d7e047e451de2740dc4e0c9f48a22302a6cc96e6c964a13"}, ] [package.dependencies] types-docutils = "*" [[package]] name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "urllib3" version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvloop" version = "0.19.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" files = [ {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, ] [package.extras] docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] [[package]] name = "virtualenv" version = "20.28.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, ] [package.dependencies] distlib = ">=0.3.7,<1" filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "waitress" version = "3.0.2" description = "Waitress WSGI server" optional = false python-versions = ">=3.9.0" files = [ {file = "waitress-3.0.2-py3-none-any.whl", hash = "sha256:c56d67fd6e87c2ee598b76abdd4e96cfad1f24cacdea5078d382b1f9d7b5ed2e"}, {file = "waitress-3.0.2.tar.gz", hash = "sha256:682aaaf2af0c44ada4abfb70ded36393f0e307f4ab9456a215ce0020baefc31f"}, ] [package.extras] docs = ["Sphinx (>=1.8.1)", "docutils", "pylons-sphinx-themes (>=1.0.9)"] testing = ["coverage (>=7.6.0)", "pytest", "pytest-cov"] [[package]] name = "webob" version = "1.8.9" description = "WSGI request and response object" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ {file = "WebOb-1.8.9-py2.py3-none-any.whl", hash = "sha256:45e34c58ed0c7e2ecd238ffd34432487ff13d9ad459ddfd77895e67abba7c1f9"}, {file = "webob-1.8.9.tar.gz", hash = "sha256:ad6078e2edb6766d1334ec3dee072ac6a7f95b1e32ce10def8ff7f0f02d56589"}, ] [package.dependencies] legacy-cgi = {version = ">=2.6", markers = "python_version >= \"3.13\""} [package.extras] docs = ["Sphinx (>=1.7.5)", "pylons-sphinx-themes"] testing = ["coverage", "pytest (>=3.1.0)", "pytest-cov", "pytest-xdist"] [[package]] name = "webtest" version = "3.0.2" description = "Helper to test WSGI applications" optional = false python-versions = ">=3.7" files = [ {file = "WebTest-3.0.2-py3-none-any.whl", hash = "sha256:799846e169d15e0c1233ab4ab00ee4de59a5d964407d6f2945d89249328dbbdb"}, {file = "webtest-3.0.2.tar.gz", hash = "sha256:0b2de681c16f57b31da5cce6e94ff03cdc77bd86c37a57ba0ee27fed8e065ceb"}, ] [package.dependencies] beautifulsoup4 = "*" waitress = ">=3.0.2" WebOb = ">=1.2" [package.extras] docs = ["Sphinx (>=3.0.0)", "docutils", "pylons-sphinx-themes (>=1.0.8)"] tests = ["PasteDeploy", "WSGIProxy2", "coverage", "pyquery", "pytest", "pytest-cov"] [[package]] name = "wrapt" version = "1.17.0" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" files = [ {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"}, {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"}, {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"}, {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"}, {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"}, {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"}, {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"}, {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"}, {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"}, {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"}, {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"}, {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"}, {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"}, {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"}, {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"}, {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"}, {file = "wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569"}, {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504"}, {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451"}, {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1"}, {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106"}, {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada"}, {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4"}, {file = "wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635"}, {file = "wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7"}, {file = "wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181"}, {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393"}, {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4"}, {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b"}, {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721"}, {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90"}, {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a"}, {file = "wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045"}, {file = "wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838"}, {file = "wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b"}, {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379"}, {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d"}, {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f"}, {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c"}, {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b"}, {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab"}, {file = "wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf"}, {file = "wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a"}, {file = "wrapt-1.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69c40d4655e078ede067a7095544bcec5a963566e17503e75a3a3e0fe2803b13"}, {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f495b6754358979379f84534f8dd7a43ff8cff2558dcdea4a148a6e713a758f"}, {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa7ef4e0886a6f482e00d1d5bcd37c201b383f1d314643dfb0367169f94f04c"}, {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fc931382e56627ec4acb01e09ce66e5c03c384ca52606111cee50d931a342d"}, {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8f8909cdb9f1b237786c09a810e24ee5e15ef17019f7cecb207ce205b9b5fcce"}, {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad47b095f0bdc5585bced35bd088cbfe4177236c7df9984b3cc46b391cc60627"}, {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:948a9bd0fb2c5120457b07e59c8d7210cbc8703243225dbd78f4dfc13c8d2d1f"}, {file = "wrapt-1.17.0-cp38-cp38-win32.whl", hash = "sha256:5ae271862b2142f4bc687bdbfcc942e2473a89999a54231aa1c2c676e28f29ea"}, {file = "wrapt-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:f335579a1b485c834849e9075191c9898e0731af45705c2ebf70e0cd5d58beed"}, {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"}, {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"}, {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"}, {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"}, {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"}, {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"}, {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"}, {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"}, {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"}, {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"}, {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"}, ] [[package]] name = "yarl" version = "1.18.3" description = "Yet another URL library" optional = false python-versions = ">=3.9" files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"}, {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"}, {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"}, {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"}, {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"}, {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"}, {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"}, {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"}, {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"}, {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"}, {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"}, {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"}, {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"}, {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"}, {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"}, {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"}, {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"}, {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"}, {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"}, {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"}, {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"}, {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"}, {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"}, {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"}, {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"}, {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"}, {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"}, {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"}, {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"}, {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"}, {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"}, {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"}, {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"}, {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"}, {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"}, {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"}, {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"}, {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"}, {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"}, {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"}, {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"}, {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"}, {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"}, {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"}, {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"}, {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"}, {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"}, {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"}, {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"}, {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"}, {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"}, {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"}, {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"}, {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"}, {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"}, {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"}, {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"}, {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"}, {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"}, {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"}, {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"}, {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"}, {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"}, {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"}, {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"}, {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"}, {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"}, {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"}, {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"}, {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"}, {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"}, {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"}, {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"}, {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"}, {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"}, {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"}, {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"}, {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"}, {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"}, {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"}, ] [package.dependencies] idna = ">=2.0" multidict = ">=4.0" propcache = ">=0.2.0" [[package]] name = "zipp" version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" content-hash = "5262d0f8cebe01d8ed991e1e467a96de11934ed1dde0d64d2090c501b9ab77ed" python-aio-pika-9.5.5/poetry.toml000066400000000000000000000001561476164671100170010ustar00rootroot00000000000000cache-dir = ".cache" [virtualenvs] path = ".venv" in-project = true [installer] modern-installation = false python-aio-pika-9.5.5/pylama.ini000066400000000000000000000002701476164671100165430ustar00rootroot00000000000000[pylama] linters = mccabe,pycodestyle,pyflakes skip = *env*,.tox*,*build*,.*,env/*,.venv/* ignore = C901 [pylama:pycodestyle] max_line_length = 80 show-pep8 = True show-source = True python-aio-pika-9.5.5/pyproject.toml000066400000000000000000000065131476164671100175010ustar00rootroot00000000000000[tool.poetry] name = "aio-pika" version = "9.5.5" description = "Wrapper around the aiormq for asyncio and humans" authors = ["Dmitry Orlov "] readme = "README.rst" license = "Apache-2.0" keywords=["rabbitmq", "asyncio", "amqp", "amqp 0.9.1", "aiormq"] homepage = "https://github.com/mosquito/aio-pika" classifiers = [ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Operating System :: MacOS", "Operating System :: Microsoft", "Operating System :: POSIX", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python", "Topic :: Internet", "Topic :: Software Development :: Libraries", "Topic :: Software Development", "Typing :: Typed", ] packages = [{ include = "aio_pika" }] [tool.poetry.urls] "Source" = "https://github.com/mosquito/aio-pika" "Tracker" = "https://github.com/mosquito/aio-pika/issues" "Documentation" = "https://docs.aio-pika.com/" [tool.poetry.dependencies] python = "^3.9" aiormq = "~6.8" yarl = [{ version = '*'}] exceptiongroup = "^1" typing-extensions = [{ version = '*', python = "< 3.10" }] [tool.poetry.group.dev.dependencies] aiomisc = "^17.5" aiomisc-pytest = "^1.1.1" collective-checkdocs = "^0.2" coverage = "^6.5.0" coveralls = "^3.3.1" mypy = "^1" nox = "*" pylama = "^8.4.1" pytest = "^8.0" pytest-cov = "^4.0.0" pytest-rst = "^0.0" shortuuid = "^1.0" sphinx = "*" sphinx-autobuild = "^2021.3.14" timeout-decorator = "^0.5.0" types-setuptools = "^65.6.0.2" typing-extensions = "*" setuptools = "^69.0.3" testcontainers = "^3.7.1" autodoc = "*" furo = "*" sphinxcontrib-googleanalytics = "*" [tool.poetry.group.uvloop] optional = true [tool.poetry.group.uvloop.dependencies] uvloop = "^0.19" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" [tool.mypy] check_untyped_defs = true disallow_any_generics = false disallow_incomplete_defs = true disallow_subclassing_any = true disallow_untyped_calls = true disallow_untyped_decorators = true disallow_untyped_defs = true follow_imports = "silent" no_implicit_reexport = true strict_optional = true warn_redundant_casts = true warn_unused_configs = true warn_unused_ignores = true files = [ "aio_pika", "tests", "docs/source/examples", "docs/source/rabbitmq-tutorial/examples/1-introduction", "docs/source/rabbitmq-tutorial/examples/2-work-queues", "docs/source/rabbitmq-tutorial/examples/3-publish-subscribe", "docs/source/rabbitmq-tutorial/examples/4-routing", "docs/source/rabbitmq-tutorial/examples/5-topics", "docs/source/rabbitmq-tutorial/examples/6-rpc", ] [[tool.mypy.overrides]] module = ["tests.*"] check_untyped_defs = true disallow_incomplete_defs = false disallow_untyped_calls = false disallow_untyped_decorators = false disallow_untyped_defs = false warn_unused_ignores = false [[tool.mypy.overrides]] module = ["testcontainers.*"] ignore_missing_imports = true [tool.pytest.ini_options] log_cli = true addopts = "-p no:asyncio" markers = [ "asyncio: asyncio" ] python-aio-pika-9.5.5/tests/000077500000000000000000000000001476164671100157225ustar00rootroot00000000000000python-aio-pika-9.5.5/tests/__init__.py000066400000000000000000000003341476164671100200330ustar00rootroot00000000000000from typing import Any import shortuuid def get_random_name(*args: Any) -> str: prefix = ["test"] for item in args: prefix.append(item) prefix.append(shortuuid.uuid()) return ".".join(prefix) python-aio-pika-9.5.5/tests/conftest.py000066400000000000000000000141221476164671100201210ustar00rootroot00000000000000import asyncio import gc import socket import tracemalloc from contextlib import suppress from functools import partial from time import sleep from typing import Any, Generator import aiormq import pamqp import pytest from aiomisc import awaitable from testcontainers.core.container import DockerContainer from yarl import URL import aio_pika @pytest.fixture async def add_cleanup(event_loop): entities = [] def payload(func, *args, **kwargs): nonlocal entities func = partial(awaitable(func), *args, **kwargs) entities.append(func) try: yield payload finally: for func in entities[::-1]: await func() entities.clear() @pytest.fixture async def create_task(event_loop): tasks = [] def payload(coroutine): nonlocal tasks task = event_loop.create_task(coroutine) tasks.append(task) return task try: yield payload finally: cancelled = [] for task in tasks: if task.done(): continue task.cancel() cancelled.append(task) results = await asyncio.gather(*cancelled, return_exceptions=True) for result in results: if not isinstance(result, asyncio.CancelledError): raise result class RabbitmqContainer(DockerContainer): # type: ignore _amqp_port: int _amqps_port: int def get_amqp_url(self) -> URL: return URL.build( scheme="amqp", user="guest", password="guest", path="//", host=self.get_container_host_ip(), port=self._amqp_port, ) def get_amqps_url(self) -> URL: return URL.build( scheme="amqps", user="guest", password="guest", path="//", host=self.get_container_host_ip(), port=self._amqps_port, ) def readiness_probe(self) -> None: host = self.get_container_host_ip() port = int(self.get_exposed_port(5672)) while True: with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: try: sock.connect((host, port)) sock.send(b"AMQP\0x0\0x0\0x9\0x1") data = sock.recv(4) if len(data) != 4: sleep(0.3) continue except ConnectionError: sleep(0.3) continue return def start(self) -> "RabbitmqContainer": self.with_exposed_ports(5672, 5671) super().start() self.readiness_probe() self._amqp_port = int(self.get_exposed_port(5672)) self._amqps_port = int(self.get_exposed_port(5671)) return self @pytest.fixture(scope="module") def rabbitmq_container() -> Generator[RabbitmqContainer, Any, Any]: with RabbitmqContainer("mosquito/aiormq-rabbitmq") as container: yield container @pytest.fixture(scope="module") def amqp_direct_url(request, rabbitmq_container: RabbitmqContainer) -> URL: return rabbitmq_container.get_amqp_url().update_query( name=request.node.nodeid ) @pytest.fixture def amqp_url(request, amqp_direct_url) -> URL: query = dict(amqp_direct_url.query) query["name"] = request.node.nodeid return amqp_direct_url.with_query(**query) @pytest.fixture( scope="module", params=[aio_pika.connect, aio_pika.connect_robust], ids=["connect", "connect_robust"], ) def connection_fabric(request): return request.param @pytest.fixture def create_connection(connection_fabric, event_loop, amqp_url): return partial(connection_fabric, amqp_url, loop=event_loop) @pytest.fixture def create_channel(connection: aio_pika.Connection, add_cleanup): conn = connection async def fabric(cleanup=True, connection=None, *args, **kwargs): nonlocal add_cleanup, conn if connection is None: connection = conn channel = await connection.channel(*args, **kwargs) if cleanup: add_cleanup(channel.close) return channel return fabric # noinspection PyTypeChecker @pytest.fixture async def connection(create_connection) -> aio_pika.Connection: # type: ignore async with await create_connection() as conn: yield conn # noinspection PyTypeChecker @pytest.fixture async def channel( # type: ignore connection: aio_pika.Connection, ) -> aio_pika.Channel: async with connection.channel() as ch: yield ch @pytest.fixture def declare_queue(connection, channel, add_cleanup): ch = channel async def fabric( *args, cleanup=True, channel=None, **kwargs, ) -> aio_pika.Queue: nonlocal ch, add_cleanup if channel is None: channel = ch queue = await channel.declare_queue(*args, **kwargs) if cleanup and not kwargs.get("auto_delete"): add_cleanup(queue.delete) return queue return fabric @pytest.fixture def declare_exchange(connection, channel, add_cleanup): ch = channel async def fabric( *args, channel=None, cleanup=True, **kwargs, ) -> aio_pika.Exchange: nonlocal ch, add_cleanup if channel is None: channel = ch exchange = await channel.declare_exchange(*args, **kwargs) if cleanup and not kwargs.get("auto_delete"): add_cleanup(exchange.delete) return exchange return fabric @pytest.fixture(autouse=True) def memory_tracer(): tracemalloc.start() tracemalloc.clear_traces() filters = ( tracemalloc.Filter(True, aiormq.__file__), tracemalloc.Filter(True, pamqp.__file__), tracemalloc.Filter(True, aio_pika.__file__), ) snapshot_before = tracemalloc.take_snapshot().filter_traces(filters) try: yield with suppress(Exception): gc.collect() snapshot_after = tracemalloc.take_snapshot().filter_traces(filters) top_stats = snapshot_after.compare_to( snapshot_before, "lineno", cumulative=True, ) assert not top_stats finally: tracemalloc.stop() python-aio-pika-9.5.5/tests/test_amqp.py000066400000000000000000001665721476164671100203120ustar00rootroot00000000000000import asyncio import logging import os import time import uuid from datetime import datetime, timezone from typing import Callable, Optional, List from unittest import mock import aiormq.exceptions import pytest import shortuuid from yarl import URL import aio_pika import aio_pika.exceptions from aio_pika import Channel, DeliveryMode, Message from aio_pika.abc import ( AbstractConnection, AbstractIncomingMessage, MessageInfo, AbstractQueue, ) from aio_pika.exceptions import ( DeliveryError, MessageProcessError, ProbableAuthenticationError, ) from aio_pika.exchange import ExchangeType from aio_pika.message import ReturnedMessage from aio_pika.queue import QueueIterator from tests import get_random_name log = logging.getLogger(__name__) class TestCaseAmqpBase: @staticmethod def create_channel( connection: aio_pika.Connection, ) -> aio_pika.abc.AbstractChannel: return connection.channel() @staticmethod @pytest.fixture(name="declare_queue") def declare_queue_(declare_queue): return declare_queue @staticmethod @pytest.fixture(name="declare_exchange") def declare_exchange_(declare_exchange): return declare_exchange class TestCaseAmqp(TestCaseAmqpBase): async def test_properties( self, event_loop, connection: aio_pika.Connection, ): assert not connection.is_closed async def test_channel_close(self, connection: aio_pika.Connection): event = asyncio.Event() closed = False def on_close( ch: Optional[aio_pika.abc.AbstractChannel], exc: Optional[BaseException] = None, ): nonlocal event, closed log.info("Close called") closed = True assert ch is not None assert ch.is_closed event.set() channel = await self.create_channel(connection) channel.close_callbacks.add(on_close) await channel.close() await event.wait() assert closed with pytest.raises(RuntimeError): await channel.initialize() async with self.create_channel(connection) as ch: assert not ch.is_closed async def test_channel_reopen(self, connection): channel = await self.create_channel(connection) await channel.close() assert channel.is_closed await channel.reopen() assert not channel.is_closed async def test_delete_queue_and_exchange( self, connection, declare_exchange, declare_queue, ): queue_name = get_random_name("test_connection") exchange = get_random_name() channel = await self.create_channel(connection) await declare_exchange(exchange, auto_delete=True) await declare_queue(queue_name, auto_delete=True) await channel.queue_delete(queue_name) await channel.exchange_delete(exchange) async def test_temporary_queue(self, connection, declare_queue): channel = await self.create_channel(connection) queue = await declare_queue(auto_delete=True) assert queue.name != "" body = os.urandom(32) await channel.default_exchange.publish( Message(body=body), routing_key=queue.name, ) await asyncio.sleep(1) message = await queue.get() assert message.body == body await channel.queue_delete(queue.name) @pytest.mark.skip(reason="This was deprecated in AMQP 0-9-1") async def test_internal_exchange( self, channel: aio_pika.Channel, declare_exchange, declare_queue, ): routing_key = get_random_name() exchange_name = get_random_name("internal", "exchange") exchange = await declare_exchange( exchange_name, auto_delete=True, internal=True, ) queue = await declare_queue(auto_delete=True) await queue.bind(exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") with pytest.raises(ValueError): f = exchange.publish( Message( body, content_type="text/plain", headers={"foo": "bar"}, ), routing_key, ) await f await queue.unbind(exchange, routing_key) async def test_declare_exchange_with_passive_flag( self, connection, declare_exchange: Callable, ): exchange_name = get_random_name() channel = await self.create_channel(connection) with pytest.raises(aio_pika.exceptions.ChannelNotFoundEntity): await declare_exchange( exchange_name, auto_delete=True, passive=True, channel=channel, ) channel1 = await self.create_channel(connection) channel2 = await self.create_channel(connection) await declare_exchange( exchange_name, auto_delete=True, passive=False, channel=channel1, ) # Check ignoring different exchange options await declare_exchange( exchange_name, auto_delete=False, passive=True, channel=channel2, ) async def test_declare_queue_with_passive_flag( self, connection, declare_exchange: Callable, declare_queue: Callable, ): queue_name = get_random_name() ch1 = await self.create_channel(connection) ch2 = await self.create_channel(connection) ch3 = await self.create_channel(connection) with pytest.raises(aio_pika.exceptions.ChannelNotFoundEntity): await declare_queue( queue_name, auto_delete=True, passive=True, channel=ch1, ) await declare_queue( queue_name, auto_delete=True, passive=False, channel=ch2, ) # Check ignoring different queue options await declare_queue( queue_name, auto_delete=False, passive=True, channel=ch3, ) async def test_simple_publish_and_receive( self, channel: aio_pika.Channel, declare_queue: Callable, declare_exchange: Callable, ): queue_name = get_random_name("test_connection") routing_key = get_random_name() exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) queue = await declare_queue( queue_name, auto_delete=True, channel=channel, ) await queue.bind(exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") result = await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) assert result incoming_message = await queue.get(timeout=5) await incoming_message.ack() assert incoming_message.body == body await queue.unbind(exchange, routing_key) async def test_simple_publish_without_confirm( self, connection: aio_pika.Connection, declare_exchange: Callable, declare_queue: Callable, ): queue_name = get_random_name("test_connection") routing_key = get_random_name() channel = await connection.channel(publisher_confirms=False) exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) queue = await declare_queue( queue_name, auto_delete=True, channel=channel, ) await queue.bind(exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") result = await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) assert result is None incoming_message = await queue.get(timeout=5) await incoming_message.ack() assert incoming_message.body == body await queue.unbind(exchange, routing_key) async def test_simple_publish_and_receive_delivery_mode_explicitly( self, channel: aio_pika.Channel, declare_queue: Callable, declare_exchange: Callable, ): queue_name = get_random_name("test_connection") routing_key = get_random_name() exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) queue = await declare_queue( queue_name, auto_delete=True, channel=channel, ) await queue.bind(exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") await exchange.publish( Message( body, content_type="text/plain", headers={"foo": "bar"}, delivery_mode=None, ), routing_key, ) incoming_message = await queue.get(timeout=5) await incoming_message.ack() assert incoming_message.body == body await queue.unbind(exchange, routing_key) async def test_simple_publish_and_receive_to_bound_exchange( self, channel: aio_pika.Channel, declare_exchange: Callable, declare_queue: Callable, add_cleanup: Callable, ): routing_key = get_random_name() src_name = get_random_name("source", "exchange") dest_name = get_random_name("destination", "exchange") src_exchange = await declare_exchange(src_name, auto_delete=True) dest_exchange = await declare_exchange(dest_name, auto_delete=True) queue = await declare_queue(auto_delete=True) await queue.bind(dest_exchange, routing_key) await dest_exchange.bind(src_exchange, routing_key) add_cleanup(dest_exchange.unbind, src_exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") await src_exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) incoming_message = await queue.get(timeout=5) await incoming_message.ack() assert incoming_message.body == body await queue.unbind(dest_exchange, routing_key) async def test_simple_publish_with_closed_channel( self, connection: aio_pika.Connection, declare_exchange: Callable, declare_queue: Callable, ): routing_key = get_random_name() channel = await connection.channel(publisher_confirms=False) exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) await connection.close() body = bytes(shortuuid.uuid(), "utf-8") with pytest.raises(aiormq.exceptions.ChannelInvalidStateError): await exchange.publish( Message( body, content_type="text/plain", headers={"foo": "bar"}, ), routing_key, ) async def test_incoming_message_info( self, channel: aio_pika.Channel, declare_queue: Callable, declare_exchange: Callable, add_cleanup: Callable, ): queue_name = get_random_name("test_connection") routing_key = get_random_name() exchange = await declare_exchange("direct", auto_delete=True) queue = await declare_queue(queue_name, auto_delete=True) await queue.bind(exchange, routing_key) add_cleanup(queue.unbind, exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") info = MessageInfo( app_id="test", body_size=len(body), content_encoding="text", content_type="application/json", correlation_id="1", delivery_mode=DeliveryMode.PERSISTENT, expiration=1.5, headers={"foo": "bar"}, message_id=shortuuid.uuid(), priority=0, reply_to="test", timestamp=datetime.fromtimestamp(int(time.time()), tz=timezone.utc), type="0", user_id="guest", ) msg = Message( body=body, headers={"foo": "bar"}, content_type="application/json", content_encoding="text", delivery_mode=DeliveryMode.PERSISTENT, priority=0, correlation_id="1", reply_to="test", expiration=1.5, message_id=info["message_id"], timestamp=info["timestamp"], type="0", user_id="guest", app_id="test", ) await exchange.publish(msg, routing_key) incoming_message = await queue.get(timeout=5) await incoming_message.ack() info["routing_key"] = incoming_message.routing_key info["redelivered"] = incoming_message.redelivered info["exchange"] = incoming_message.exchange info["delivery_tag"] = incoming_message.delivery_tag info["consumer_tag"] = incoming_message.consumer_tag info["cluster_id"] = incoming_message.cluster_id assert incoming_message.body == body assert incoming_message.info() == info async def test_context_process( self, channel: aio_pika.Channel, declare_queue: Callable, declare_exchange: Callable, add_cleanup: Callable, ): queue_name = get_random_name("test_connection") routing_key = get_random_name() exchange = await declare_exchange("direct", auto_delete=True) queue = await declare_queue(queue_name, auto_delete=True) await queue.bind(exchange, routing_key) add_cleanup(queue.unbind, exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) if not channel.publisher_confirms: await asyncio.sleep(1) incoming_message: AbstractIncomingMessage = await queue.get(timeout=5) with pytest.raises(AssertionError): async with incoming_message.process(requeue=True): raise AssertionError assert incoming_message.locked incoming_message = await queue.get(timeout=5) async with incoming_message.process(): pass assert incoming_message.body == body await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) incoming_message = await queue.get(timeout=5) with pytest.raises(MessageProcessError): async with incoming_message.process(): await incoming_message.reject(requeue=True) assert incoming_message.locked incoming_message = await queue.get(timeout=5) async with incoming_message.process(ignore_processed=True): await incoming_message.reject(requeue=False) assert incoming_message.body == body await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) incoming_message = await queue.get(timeout=5) with pytest.raises(AssertionError): async with incoming_message.process( requeue=True, reject_on_redelivered=True, ): raise AssertionError incoming_message = await queue.get(timeout=5) with pytest.raises(AssertionError): async with incoming_message.process( requeue=True, reject_on_redelivered=True, ): raise AssertionError assert incoming_message.locked async def test_context_process_redelivery( self, channel: aio_pika.Channel, declare_exchange: Callable, declare_queue: Callable, add_cleanup: Callable, ): queue_name = get_random_name("test_connection") routing_key = get_random_name() exchange = await declare_exchange("direct", auto_delete=True) queue = await declare_queue(queue_name, auto_delete=True) await queue.bind(exchange, routing_key) add_cleanup(queue.unbind, exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) if not channel.publisher_confirms: await asyncio.sleep(1) incoming_message = await queue.get(timeout=5) with pytest.raises(AssertionError): async with incoming_message.process( requeue=True, reject_on_redelivered=True, ): raise AssertionError incoming_message = await queue.get(timeout=5) with mock.patch("aio_pika.message.log") as message_logger: with pytest.raises(Exception): async with incoming_message.process( requeue=True, reject_on_redelivered=True, ): raise Exception assert message_logger.info.called assert ( message_logger.info.mock_calls[0][1][1].body == incoming_message.body ) assert incoming_message.body == body async def test_no_ack_redelivery( self, connection, add_cleanup: Callable, declare_queue, declare_exchange, ): queue_name = get_random_name("test_connection") routing_key = get_random_name() channel = await self.create_channel(connection) exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) queue = await declare_queue( queue_name, auto_delete=False, channel=channel, cleanup=False, ) await queue.bind(exchange, routing_key) # publish 2 messages for _ in range(2): body = bytes(shortuuid.uuid(), "utf-8") msg = Message(body) await exchange.publish(msg, routing_key) if not channel.publisher_confirms: await asyncio.sleep(1) # ack 1 message out of 2 first_message = await queue.get(timeout=5) last_message = await queue.get(timeout=5) await last_message.ack() # close channel, not acked message should be redelivered await channel.close() channel = await self.create_channel(connection) exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) queue = await declare_queue( queue_name, auto_delete=False, channel=channel, ) # receive not acked message message = await queue.get(timeout=5) assert message.body == first_message.body await message.ack() await queue.unbind(exchange, routing_key) async def test_ack_multiple( self, connection, declare_exchange, declare_queue, add_cleanup: Callable, ): queue_name = get_random_name("test_connection") routing_key = get_random_name() channel = await self.create_channel(connection) exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) queue = await declare_queue( queue_name, auto_delete=False, cleanup=False, channel=channel, ) await queue.bind(exchange, routing_key) # publish 2 messages for _ in range(2): body = bytes(shortuuid.uuid(), "utf-8") msg = Message(body) await exchange.publish(msg, routing_key) if not channel.publisher_confirms: await asyncio.sleep(1) # ack only last mesage with multiple flag, first # message should be acked too await queue.get(timeout=5) last_message = await queue.get(timeout=5) await last_message.ack(multiple=True) # close channel, no messages should be redelivered await channel.close() channel = await self.create_channel(connection) exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) queue = await declare_queue( queue_name, auto_delete=False, cleanup=False, channel=channel, ) with pytest.raises(aio_pika.exceptions.QueueEmpty): await queue.get() await queue.unbind(exchange, routing_key) await queue.delete() async def test_ack_twice( self, channel: aio_pika.Connection, declare_queue, declare_exchange, ): queue_name = get_random_name("test_connection") routing_key = get_random_name() exchange = await declare_exchange("direct", auto_delete=True) queue = await declare_queue(queue_name, auto_delete=True) await queue.bind(exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) incoming_message = await queue.get(timeout=5) await incoming_message.ack() with pytest.raises(MessageProcessError): await incoming_message.ack() assert incoming_message.body == body await queue.unbind(exchange, routing_key) await queue.delete() async def test_reject_twice( self, channel: aio_pika.Channel, add_cleanup: Callable, declare_queue: Callable, declare_exchange: Callable, ): queue_name = get_random_name("test_connection") routing_key = get_random_name() exchange = await declare_exchange("direct", auto_delete=True) queue = await declare_queue(queue_name, auto_delete=True) await queue.bind(exchange, routing_key) add_cleanup(queue.unbind, exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) incoming_message = await queue.get(timeout=5) await incoming_message.reject(requeue=False) with pytest.raises(MessageProcessError): await incoming_message.reject(requeue=False) assert incoming_message.body == body async def test_consuming( self, event_loop, channel: aio_pika.Channel, declare_exchange: Callable, declare_queue: Callable, add_cleanup: Callable, ): queue_name = get_random_name("tc2") routing_key = get_random_name() exchange = await declare_exchange("direct", auto_delete=True) queue = await declare_queue(queue_name, auto_delete=True) await queue.bind(exchange, routing_key) add_cleanup(queue.unbind, exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") f = event_loop.create_future() async def handle(message): await message.ack() assert message.body == body assert message.routing_key == routing_key f.set_result(True) await queue.consume(handle) await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) if not f.done(): await f async def test_consuming_not_coroutine( self, event_loop, channel: aio_pika.Channel, declare_exchange: Callable, declare_queue: Callable, add_cleanup: Callable, ): queue_name = get_random_name("tc2") routing_key = get_random_name() exchange = await declare_exchange("direct", auto_delete=True) queue = await declare_queue(queue_name, auto_delete=True) add_cleanup(queue.unbind, exchange, routing_key) await queue.bind(exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") f = event_loop.create_future() async def handle(message): await message.ack() assert message.body == body assert message.routing_key == routing_key f.set_result(True) await queue.consume(handle) await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) if not f.done(): await f async def test_ack_reject( self, channel: aio_pika.Channel, declare_exchange: Callable, declare_queue: Callable, add_cleanup: Callable, ): queue_name = get_random_name("test_connection3") routing_key = get_random_name() exchange = await declare_exchange("direct", auto_delete=True) queue = await declare_queue(queue_name, auto_delete=True) await queue.bind(exchange, routing_key) add_cleanup(queue.unbind, exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) incoming_message = await queue.get(timeout=5, no_ack=True) with pytest.raises(TypeError): await incoming_message.ack() await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) incoming_message = await queue.get(timeout=5) await incoming_message.reject() await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) incoming_message = await queue.get(timeout=5, no_ack=True) with pytest.raises(TypeError): await incoming_message.reject() assert incoming_message.body == body async def test_purge_queue( self, declare_queue: Callable, declare_exchange: Callable, channel: aio_pika.Channel, ): queue_name = get_random_name("test_connection4") routing_key = get_random_name() exchange = await declare_exchange("direct", auto_delete=True) queue = await declare_queue(queue_name, auto_delete=True) await queue.bind(exchange, routing_key) try: body = bytes(shortuuid.uuid(), "utf-8") await exchange.publish( Message( body, content_type="text/plain", headers={"foo": "bar"}, ), routing_key, ) await queue.purge() with pytest.raises(asyncio.TimeoutError): await queue.get(timeout=1) except aio_pika.exceptions.QueueEmpty: await queue.unbind(exchange, routing_key) await queue.delete() async def test_connection_refused(self, connection_fabric: Callable): with pytest.raises(ConnectionError): await connection_fabric("amqp://guest:guest@localhost:9999") async def test_wrong_credentials( self, connection_fabric: Callable, amqp_url, ): amqp_url = amqp_url.with_user(uuid.uuid4().hex).with_password( uuid.uuid4().hex, ) with pytest.raises(ProbableAuthenticationError): await connection_fabric(amqp_url) async def test_set_qos(self, channel: aio_pika.Channel): await channel.set_qos(prefetch_count=1, global_=True) async def test_set_qos_deprecated_all_channels( self, channel: aio_pika.Channel, ): with pytest.deprecated_call(): await channel.set_qos(prefetch_count=1, all_channels=True) async def test_exchange_delete( self, channel: aio_pika.Channel, declare_exchange, ): exchange = await declare_exchange("test", auto_delete=True) await exchange.delete() async def test_dlx( self, event_loop, channel: aio_pika.Channel, declare_exchange: Callable, declare_queue: Callable, add_cleanup: Callable, ): suffix = get_random_name() routing_key = "%s_routing_key" % suffix dlx_routing_key = "%s_dlx_routing_key" % suffix f = event_loop.create_future() async def dlx_handle(message): await message.ack() assert message.body == body assert message.routing_key == dlx_routing_key f.set_result(True) direct_exchange = await declare_exchange( "direct", channel=channel, auto_delete=True, ) # type: aio_pika.Exchange dlx_exchange = await declare_exchange( "dlx", ExchangeType.DIRECT, auto_delete=True, ) direct_queue = await declare_queue( "%s_direct_queue" % suffix, auto_delete=True, arguments={ "x-message-ttl": 300, "x-dead-letter-exchange": "dlx", "x-dead-letter-routing-key": dlx_routing_key, }, ) dlx_queue = await declare_queue( "%s_dlx_queue" % suffix, auto_delete=True, ) await dlx_queue.consume(dlx_handle) await dlx_queue.bind(dlx_exchange, dlx_routing_key) await direct_queue.bind(direct_exchange, routing_key) add_cleanup(dlx_queue.unbind, dlx_exchange, routing_key) add_cleanup(direct_queue.unbind, direct_exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") await direct_exchange.publish( Message( body, content_type="text/plain", headers={ "x-message-ttl": 100, "x-dead-letter-exchange": "dlx", }, ), routing_key, ) if not f.done(): await f async def test_expiration( self, channel: aio_pika.Channel, event_loop, declare_exchange, declare_queue, ): dlx_queue = await declare_queue( get_random_name("test_dlx"), cleanup=False, ) # type: aio_pika.Queue dlx_exchange = await declare_exchange( get_random_name("dlx"), cleanup=False, ) # type: aio_pika.Exchange await dlx_queue.bind(dlx_exchange, routing_key=dlx_queue.name) queue = await declare_queue( get_random_name("test_expiration"), arguments={ "x-message-ttl": 10000, "x-dead-letter-exchange": dlx_exchange.name, "x-dead-letter-routing-key": dlx_queue.name, }, ) # type: aio_pika.Queue body = bytes(shortuuid.uuid(), "utf-8") await channel.default_exchange.publish( Message( body, content_type="text/plain", headers={"foo": "bar"}, expiration=0.5, ), queue.name, ) f = event_loop.create_future() await dlx_queue.consume(f.set_result, no_ack=True) message = await f assert message.body == body assert message.headers["x-death"][0]["original-expiration"] == "500" async def test_add_close_callback(self, create_connection: Callable): connection = await create_connection() shared_list = [] def share(*a, **kw): shared_list.append((a, kw)) connection.close_callbacks.add(share) del share assert len(connection.close_callbacks) == 1 await connection.close() assert len(shared_list) == 1 async def test_big_message( self, channel: aio_pika.Channel, add_cleanup: Callable, declare_queue, declare_exchange, ): queue_name = get_random_name("test_big") routing_key = get_random_name() exchange = await declare_exchange("direct", auto_delete=True) queue = await declare_queue(queue_name, auto_delete=True) await queue.bind(exchange, routing_key) add_cleanup(queue.unbind, exchange, routing_key) add_cleanup(queue.delete) body = bytes(shortuuid.uuid(), "utf-8") * 1000000 await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) incoming_message = await queue.get(timeout=5) await incoming_message.ack() assert incoming_message.body == body async def test_unexpected_channel_close( self, channel: aio_pika.Channel, declare_queue, ): with pytest.raises(aio_pika.exceptions.ChannelClosed): await declare_queue("amq.restricted_queue_name", auto_delete=True) with pytest.raises(aiormq.exceptions.ChannelInvalidStateError): await channel.set_qos(100) async def test_declaration_result( self, channel: aio_pika.Channel, declare_queue, ): queue = await declare_queue(auto_delete=True) assert queue.declaration_result.message_count == 0 assert queue.declaration_result.consumer_count == 0 async def test_declaration_result_with_consumers( self, connection, declare_queue, ): channel1 = await self.create_channel(connection) channel2 = await self.create_channel(connection) queue_name = get_random_name("queue", "declaration-result") queue1 = await declare_queue( queue_name, auto_delete=True, channel=channel1, ) await queue1.consume(print) queue2 = await declare_queue( queue_name, passive=True, channel=channel2, cleanup=False, ) assert queue2.declaration_result.consumer_count == 1 async def test_declaration_result_with_messages( self, connection, declare_queue, declare_exchange, ): channel1 = await self.create_channel(connection) channel2 = await self.create_channel(connection) queue_name = get_random_name("queue", "declaration-result") queue1 = await declare_queue( queue_name, auto_delete=True, channel=channel1, ) await channel1.default_exchange.publish( Message(body=b"test"), routing_key=queue1.name, ) await asyncio.sleep(1) queue2 = await declare_queue( queue_name, passive=True, channel=channel2, ) await queue2.get() await queue2.delete() assert queue2.declaration_result.consumer_count == 0 assert queue2.declaration_result.message_count == 1 async def test_queue_empty_exception( self, channel: aio_pika.Channel, add_cleanup: Callable, declare_queue, ): queue_name = get_random_name("test_get_on_empty_queue") queue = await declare_queue(queue_name, auto_delete=True) add_cleanup(queue.delete) with pytest.raises(aio_pika.exceptions.QueueEmpty): await queue.get(timeout=5) await channel.default_exchange.publish( Message(b"test"), queue_name, ) message = await queue.get(timeout=5) assert message.body == b"test" # test again for #110 with pytest.raises(aio_pika.exceptions.QueueEmpty): await queue.get(timeout=5) async def test_queue_empty_fail_false( self, channel: aio_pika.Channel, declare_queue, ): queue_name = get_random_name("test_get_on_empty_queue") queue = await declare_queue(queue_name, auto_delete=True) result = await queue.get(fail=False) assert result is None async def test_message_nack( self, channel: aio_pika.Channel, declare_queue, ): queue_name = get_random_name("test_nack_queue") body = uuid.uuid4().bytes queue = await declare_queue(queue_name, auto_delete=True) await channel.default_exchange.publish( Message(body=body), routing_key=queue_name, ) message = await queue.get() # type: aio_pika.IncomingMessage assert message.body == body await message.nack(requeue=True) message = await queue.get() assert message.redelivered assert message.body == body await message.ack() async def test_on_return_raises(self, connection: aio_pika.Connection): queue_name = get_random_name("test_on_return_raises") body = uuid.uuid4().bytes with pytest.raises(RuntimeError): await connection.channel( publisher_confirms=False, on_return_raises=True, ) channel = await connection.channel( publisher_confirms=True, on_return_raises=True, ) for _ in range(100): with pytest.raises(aio_pika.exceptions.DeliveryError): await channel.default_exchange.publish( Message(body=body), routing_key=queue_name, ) async def test_transaction_when_publisher_confirms_error( self, connection: aio_pika.Connection, ): async with connection.channel(publisher_confirms=True) as channel: with pytest.raises(RuntimeError): channel.transaction() async def test_transaction_simple_commit( self, connection: aio_pika.Connection, ): async with connection.channel(publisher_confirms=False) as channel: tx = channel.transaction() await tx.select() await tx.commit() async def test_transaction_simple_rollback( self, connection: aio_pika.Connection, ): async with connection.channel(publisher_confirms=False) as channel: tx = channel.transaction() await tx.select() await tx.rollback() async def test_transaction_simple_async_commit( self, connection: aio_pika.Connection, ): async with connection.channel(publisher_confirms=False) as channel: async with channel.transaction(): pass async def test_transaction_simple_async_rollback( self, connection: aio_pika.Connection, ): async with connection.channel(publisher_confirms=False) as channel: with pytest.raises(ValueError): async with channel.transaction(): raise ValueError async def test_async_for_queue( self, event_loop, connection, declare_queue, ): channel2 = await self.create_channel(connection) queue = await declare_queue( get_random_name("queue", "is_async", "for"), auto_delete=True, channel=channel2, ) messages = 100 async def publisher(): channel1 = await self.create_channel(connection) for i in range(messages): await channel1.default_exchange.publish( Message(body=str(i).encode()), routing_key=queue.name, ) event_loop.create_task(publisher()) count = 0 data = list() async for message in queue: async with message.process(): count += 1 data.append(message.body) if count >= messages: break assert data == list(map(lambda x: str(x).encode(), range(messages))) async def test_async_for_queue_context( self, event_loop, connection, declare_queue, ) -> None: channel2 = await self.create_channel(connection) queue = await declare_queue( get_random_name("queue", "is_async", "for"), auto_delete=True, channel=channel2, ) messages: asyncio.Queue[bytes] = asyncio.Queue(100) condition = asyncio.Condition() async def publisher() -> None: channel1 = await self.create_channel(connection) for i in range(messages.maxsize): body = str(i).encode() await messages.put(body) await channel1.default_exchange.publish( Message(body=body), routing_key=queue.name, ) async def consumer() -> None: async with queue.iterator() as queue_iterator: async for message in queue_iterator: async with message.process(): async with condition: data.append(message.body) messages.task_done() condition.notify() async def application_stop_request() -> None: async with condition: await condition.wait_for(messages.full) await messages.join() await asyncio.sleep(1) await connection.close() p = event_loop.create_task(publisher()) c = event_loop.create_task(consumer()) asr = event_loop.create_task(application_stop_request()) data: List[bytes] = list() await asyncio.gather(p, c, asr) assert data == list( map(lambda x: str(x).encode(), range(messages.maxsize)) ) async def test_async_with_connection( self, create_connection: Callable, connection, event_loop, declare_queue, ): async with await create_connection() as connection: channel = await self.create_channel(connection) queue = await declare_queue( get_random_name("queue", "is_async", "for"), auto_delete=True, channel=channel, ) condition = asyncio.Condition() messages: asyncio.Queue[bytes] = asyncio.Queue( 100 ) async def publisher(): channel1 = await self.create_channel(connection) for i in range(messages.maxsize): body = str(i).encode() await messages.put(body) await channel1.default_exchange.publish( Message(body=body), routing_key=queue.name, ) data = list() async def consume_loop(): async with queue.iterator() as queue_iterator: async for message in queue_iterator: async with message.process(): async with condition: data.append(message.body) condition.notify() messages.task_done() async def application_close_request(): async with condition: await condition.wait_for(messages.full) await messages.join() await asyncio.sleep(1) await connection.close() p = event_loop.create_task(publisher()) cl = event_loop.create_task(consume_loop()) acr = event_loop.create_task(application_close_request()) await asyncio.gather(p, cl, acr) assert data == list( map(lambda x: str(x).encode(), range(messages.maxsize)), ) assert channel.is_closed async def test_async_with_channel(self, connection: aio_pika.Connection): async with self.create_channel(connection) as channel: assert isinstance(channel, Channel) assert channel.is_closed async def test_delivery_fail( self, channel: aio_pika.Channel, declare_queue, ): queue = await declare_queue( exclusive=True, arguments={"x-max-length": 1, "x-overflow": "reject-publish"}, auto_delete=True, ) await channel.default_exchange.publish( aio_pika.Message(body=b"queue me"), routing_key=queue.name, ) with pytest.raises(DeliveryError): for _ in range(10): await channel.default_exchange.publish( aio_pika.Message(body=b"reject me"), routing_key=queue.name, ) async def test_channel_locked_resource( self, connection, declare_queue, add_cleanup: Callable, ): ch1 = await self.create_channel(connection) ch2 = await self.create_channel(connection) qname = get_random_name("channel", "locked", "resource") q1 = await declare_queue( qname, exclusive=True, channel=ch1, cleanup=False, ) add_cleanup(q1.delete) tag = await q1.consume(print, exclusive=True) add_cleanup(q1.cancel, tag) with pytest.raises(aiormq.exceptions.ChannelAccessRefused): q2 = await declare_queue( qname, exclusive=True, channel=ch2, cleanup=False, ) await q2.consume(print, exclusive=True) async def test_queue_iterator_close_was_called_twice( self, create_connection: Callable, event_loop, declare_queue, ): event = asyncio.Event() queue_name = get_random_name() iterator: QueueIterator async def task_inner(): nonlocal event nonlocal iterator nonlocal create_connection connection = await create_connection() async with connection: channel = await self.create_channel(connection) queue = await declare_queue( queue_name, channel=channel, cleanup=False, ) async with queue.iterator() as iterator: event.set() async for message in iterator: async with message.process(): pytest.fail("who sent this message?") task = event_loop.create_task(task_inner()) await event.wait() await iterator.close() await task async def test_queue_iterator_close_with_noack( self, create_connection: Callable, event_loop, add_cleanup: Callable, declare_queue, ): messages: asyncio.Queue = asyncio.Queue() queue_name = get_random_name("test_queue") body = get_random_name("test_body").encode() async def task_inner(): nonlocal messages nonlocal create_connection nonlocal add_cleanup connection = await create_connection() add_cleanup(connection.close) async with connection: channel = await self.create_channel(connection) queue = await declare_queue( queue_name, channel=channel, cleanup=False, passive=True, ) async with queue.iterator(no_ack=True) as q: async for message in q: await messages.put(message) return async with await create_connection() as connection: channel = await self.create_channel(connection) queue = await declare_queue( queue_name, channel=channel, cleanup=False, ) try: await channel.default_exchange.publish( Message(body), routing_key=queue_name, ) task = event_loop.create_task(task_inner()) message = await messages.get() assert message assert message.body == body finally: await task await queue.delete() async def test_queue_iterator_throws_cancelled_error( self, create_connection: Callable, event_loop, add_cleanup: Callable, declare_queue, ): event_loop.set_debug(True) queue_name = get_random_name("test_queue") connection = await create_connection() async with connection: channel = await self.create_channel(connection) queue = await channel.declare_queue( queue_name, ) iterator = queue.iterator() task = event_loop.create_task(iterator.__anext__()) done, pending = await asyncio.wait({task}, timeout=1) assert not done task.cancel() with pytest.raises(asyncio.CancelledError): await task async def test_queue_iterator_throws_timeout_error( self, create_connection: Callable, event_loop, add_cleanup: Callable, declare_queue, ): event_loop.set_debug(True) queue_name = get_random_name("test_queue") connection = await create_connection() async with connection: channel = await self.create_channel(connection) queue = await channel.declare_queue( queue_name, ) iterator = queue.iterator(timeout=1) task = event_loop.create_task(iterator.__anext__()) done, pending = await asyncio.wait({task}, timeout=5) assert done with pytest.raises(asyncio.TimeoutError): await task async def test_passive_for_exchange( self, declare_exchange: Callable, connection, add_cleanup: Callable, ): name = get_random_name("passive", "exchange") ch1 = await self.create_channel(connection) ch2 = await self.create_channel(connection) ch3 = await self.create_channel(connection) with pytest.raises(aio_pika.exceptions.ChannelNotFoundEntity): await declare_exchange(name, passive=True, channel=ch1) exchange = await declare_exchange(name, auto_delete=True, channel=ch2) exchange_passive = await declare_exchange( name, passive=True, channel=ch3, ) assert exchange.name == exchange_passive.name async def test_passive_queue( self, declare_queue: Callable, connection: aio_pika.Connection, ): name = get_random_name("passive", "queue") ch1 = await self.create_channel(connection) ch2 = await self.create_channel(connection) ch3 = await self.create_channel(connection) with pytest.raises(aio_pika.exceptions.ChannelNotFoundEntity): await declare_queue(name, passive=True, channel=ch1) queue = await declare_queue(name, auto_delete=True, channel=ch2) queue_passive = await declare_queue(name, passive=True, channel=ch3) assert queue.name == queue_passive.name async def test_get_exchange(self, connection, declare_exchange): channel = await self.create_channel(connection) name = get_random_name("passive", "exchange") with pytest.raises(aio_pika.exceptions.ChannelNotFoundEntity): await channel.get_exchange(name) channel = await self.create_channel(connection) exchange = await declare_exchange( name, auto_delete=True, channel=channel, ) exchange_passive = await channel.get_exchange(name) assert exchange.name == exchange_passive.name async def test_get_queue(self, connection, declare_queue): channel = await self.create_channel(connection) name = get_random_name("passive", "queue") with pytest.raises(aio_pika.exceptions.ChannelNotFoundEntity): await channel.get_queue(name) channel = await self.create_channel(connection) queue = await declare_queue(name, auto_delete=True, channel=channel) queue_passive = await channel.get_queue(name) assert queue.name, queue_passive.name @pytest.mark.skip(reason="temporary skip") async def test_channel_blocking_timeout(self, connection): channel = await connection.channel() close_reasons = [] close_event = asyncio.Event() def on_done(*args): close_reasons.append(args) close_event.set() return channel.close_callbacks.add(on_done) async def run(): await channel.set_qos(1) time.sleep(1) await channel.set_qos(0) with pytest.raises(asyncio.TimeoutError): await asyncio.wait_for(run(), timeout=0.2) await close_event.wait() assert channel.is_closed # Ensure close callback has been called assert close_reasons with pytest.raises(RuntimeError): await channel.set_qos(10) async def test_heartbeat_disabling( self, event_loop, amqp_url: URL, connection_fabric, ): url = amqp_url.update_query(heartbeat=0) connection: AbstractConnection = await connection_fabric(url) transport = connection.transport assert transport heartbeat = transport.connection.connection_tune.heartbeat async with connection: assert heartbeat == 0 async def test_non_acked_messages_are_redelivered_to_queue( self, channel: aio_pika.Channel, declare_queue: Callable, declare_exchange: Callable, ): queue_name = get_random_name("test_connection") routing_key = get_random_name() exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) queue: AbstractQueue = await declare_queue( queue_name, auto_delete=False, channel=channel, ) await queue.bind(exchange, routing_key) # Publish 5 messages to queue all_bodies = [] for _ in range(0, 5): body = bytes(shortuuid.uuid(), "utf-8") all_bodies.append(body) assert await exchange.publish(Message(body), routing_key) # Create a subscription but only process first message async with queue.iterator() as queue_iterator: first_message = await queue_iterator.__anext__() async with first_message.process(): assert first_message.body == all_bodies[0] # Confirm other messages are still in queue for i in range(1, 5): incoming_message = await queue.get(timeout=5) await incoming_message.ack() assert incoming_message.body == all_bodies[i] # Check if the queue is now empty assert await queue.get(fail=False, timeout=.5) is None # Cleanup, delete the queue await queue.delete() async def test_regression_only_messages_cancelled_subscription_are_nacked( self, channel: aio_pika.Channel, declare_queue: Callable, declare_exchange: Callable, ): queue_name1 = get_random_name("test_queue") queue_name2 = get_random_name("test_queue") routing_key1 = get_random_name() routing_key2 = get_random_name() exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) queue1: AbstractQueue = await declare_queue( queue_name1, auto_delete=False, channel=channel, ) queue2: AbstractQueue = await declare_queue( queue_name2, auto_delete=False, channel=channel, ) await queue1.bind(exchange, routing_key1) await queue2.bind(exchange, routing_key2) # Publish 5 messages to queue 1 all_bodies1 = [] for _ in range(0, 5): body = bytes(shortuuid.uuid(), "utf-8") all_bodies1.append(body) assert await exchange.publish(Message(body), routing_key1) # Publish 5 messages to queue 2 all_bodies2 = [] for _ in range(0, 5): body = bytes(shortuuid.uuid(), "utf-8") all_bodies2.append(body) assert await exchange.publish(Message(body), routing_key2) # Create a subscription to both queues but only process first message queue_iterator1 = await queue1.iterator().__aenter__() queue_iterator2 = await queue2.iterator().__aenter__() first_message1 = await queue_iterator1.__anext__() async with first_message1.process(): assert first_message1.body == all_bodies1[0] first_message2 = await queue_iterator2.__anext__() async with first_message2.process(): assert first_message2.body == all_bodies2[0] # The order of exit here is important. # Subscription to queue 1 is received first then to 2. # Therefore, the delivery tags of subscription to queue 2 will be # higher. # So first we cancel the subscription to 2, to test if we # accidentally also nacked the messages of queue 1. Then we cancel # subscription to queue 1 to test. await queue_iterator2.__aexit__(None, None, None) # To test if the wrong messages are nacked by stopping subscription to # queue 2, we ack a message received from queue 1. If it was nacked, # RabbitMQ will throw an exception. second_message1 = await queue_iterator1.__anext__() async with second_message1.process(): assert second_message1.body == all_bodies1[1] await queue_iterator1.__aexit__(None, None, None) # Confirm other messages are still in queue for i in range(2, 5): incoming_message = await queue1.get(timeout=5) await incoming_message.ack() assert incoming_message.body == all_bodies1[i] for i in range(1, 5): incoming_message = await queue2.get(timeout=5) await incoming_message.ack() assert incoming_message.body == all_bodies2[i] # Check if the queue is now empty assert await queue1.get(fail=False, timeout=.5) is None assert await queue2.get(fail=False, timeout=.5) is None # Cleanup, delete the queue await queue1.delete() await queue2.delete() class TestCaseAmqpNoConfirms(TestCaseAmqp): @staticmethod def create_channel(connection: aio_pika.Connection): return connection.channel(publisher_confirms=False) class TestCaseAmqpWithConfirms(TestCaseAmqpBase): @staticmethod def create_channel(connection: aio_pika.Connection): return connection.channel(publisher_confirms=True) @pytest.mark.skip(reason="Have to find another way to close connection") async def test_connection_close( self, connection: aio_pika.Connection, declare_exchange: Callable, ): routing_key = get_random_name() channel = await self.create_channel(connection) exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) try: with pytest.raises(aio_pika.exceptions.ChannelPreconditionFailed): msg = Message(bytes(shortuuid.uuid(), "utf-8")) msg.delivery_mode = 8 # type: ignore await exchange.publish(msg, routing_key) channel = await self.create_channel(connection) exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) finally: await exchange.delete() async def test_basic_return( self, connection: aio_pika.Connection, event_loop, ): channel = await self.create_channel(connection) f = event_loop.create_future() def handler(channel, message: ReturnedMessage): f.set_result(message) channel.return_callbacks.add(handler) body = bytes(shortuuid.uuid(), "utf-8") await channel.default_exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), get_random_name("test_basic_return"), ) returned = await f assert returned.body == body # handler with exception f = event_loop.create_future() await channel.close() channel = await self.create_channel(connection) def bad_handler( channel: aio_pika.abc.AbstractChannel, message: aio_pika.message.IncomingMessage, ): try: raise ValueError finally: f.set_result(message) channel.return_callbacks.add(bad_handler) body = bytes(shortuuid.uuid(), "utf-8") await channel.default_exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), get_random_name("test_basic_return"), ) returned = await f assert returned.body == body python-aio-pika-9.5.5/tests/test_amqp_robust.py000066400000000000000000000104051476164671100216670ustar00rootroot00000000000000import asyncio from functools import partial import pytest from aiormq import ChannelNotFoundEntity from aiormq.exceptions import ChannelPreconditionFailed import aio_pika from aio_pika import RobustChannel from tests import get_random_name from tests.test_amqp import ( TestCaseAmqp, TestCaseAmqpNoConfirms, TestCaseAmqpWithConfirms, ) @pytest.fixture def connection_fabric(): return aio_pika.connect_robust @pytest.fixture def create_connection(connection_fabric, event_loop, amqp_url): return partial(connection_fabric, amqp_url, loop=event_loop) class TestCaseNoRobust(TestCaseAmqp): PARAMS = [{"robust": True}, {"robust": False}] IDS = ["robust=1", "robust=0"] @staticmethod @pytest.fixture(name="declare_queue", params=PARAMS, ids=IDS) def declare_queue_(request, declare_queue): async def fabric(*args, **kwargs) -> aio_pika.Queue: kwargs.update(request.param) return await declare_queue(*args, **kwargs) return fabric @staticmethod @pytest.fixture(name="declare_exchange", params=PARAMS, ids=IDS) def declare_exchange_(request, declare_exchange): async def fabric(*args, **kwargs) -> aio_pika.Queue: kwargs.update(request.param) return await declare_exchange(*args, **kwargs) return fabric async def test_add_reconnect_callback(self, create_connection): connection = await create_connection() def cb(*a, **kw): pass connection.reconnect_callbacks.add(cb) del cb assert len(connection.reconnect_callbacks) == 1 async def test_channel_blocking_timeout_reopen(self, connection): channel: RobustChannel = await connection.channel() # type: ignore close_reasons = [] close_event = asyncio.Event() reopen_event = asyncio.Event() channel.reopen_callbacks.add(lambda *_: reopen_event.set()) queue_name = get_random_name("test_channel_blocking_timeout_reopen") def on_done(*args): close_reasons.append(args) close_event.set() return channel.close_callbacks.add(on_done) with pytest.raises(ChannelNotFoundEntity): await channel.declare_queue(queue_name, passive=True) await close_event.wait() assert channel.is_closed # Ensure close callback has been called assert close_reasons await asyncio.wait_for(reopen_event.wait(), timeout=60) await channel.declare_queue(queue_name, auto_delete=True) async def test_get_queue_fail(self, connection): channel: RobustChannel = await connection.channel() # type: ignore close_event = asyncio.Event() reopen_event = asyncio.Event() channel.close_callbacks.add(lambda *_: close_event.set()) channel.reopen_callbacks.add(lambda *_: reopen_event.set()) name = get_random_name("passive", "queue") await channel.declare_queue( name, auto_delete=True, arguments={"x-max-length": 1}, ) with pytest.raises(ChannelPreconditionFailed): await channel.declare_queue(name, auto_delete=True) await asyncio.sleep(0) await close_event.wait() await reopen_event.wait() with pytest.raises(ChannelPreconditionFailed): await channel.declare_queue(name, auto_delete=True) async def test_channel_is_ready_after_close_and_reopen(self, connection): channel: RobustChannel = await connection.channel() # type: ignore await channel.ready() await channel.close() assert channel.is_closed is True await channel.reopen() await asyncio.wait_for(channel.ready(), timeout=1) assert channel.is_closed is False async def test_channel_can_be_closed(self, connection): channel: RobustChannel = await connection.channel() # type: ignore await channel.ready() await channel.close() assert channel.is_closed with pytest.raises(asyncio.TimeoutError): await asyncio.wait_for(channel.ready(), timeout=1) assert channel.is_closed class TestCaseAmqpNoConfirmsRobust(TestCaseAmqpNoConfirms): pass class TestCaseAmqpWithConfirmsRobust(TestCaseAmqpWithConfirms): pass python-aio-pika-9.5.5/tests/test_amqp_robust_proxy.py000066400000000000000000000474561476164671100231500ustar00rootroot00000000000000import asyncio import itertools import logging from contextlib import suppress from functools import partial from typing import Callable, List, Type, Optional import aiomisc import aiormq.exceptions import pytest import shortuuid from aiomisc_pytest import TCPProxy # type: ignore from yarl import URL import aio_pika from aio_pika.abc import AbstractRobustChannel, AbstractRobustConnection from aio_pika.exceptions import QueueEmpty, CONNECTION_EXCEPTIONS from aio_pika.message import Message from aio_pika.robust_channel import RobustChannel from aio_pika.robust_connection import RobustConnection from aio_pika.robust_queue import RobustQueue from tests import get_random_name @pytest.fixture async def proxy(tcp_proxy: Type[TCPProxy], amqp_direct_url: URL): p = tcp_proxy( amqp_direct_url.host, amqp_direct_url.port, buffered=False, ) await p.start() try: yield p finally: await p.close() @pytest.fixture def amqp_url(amqp_direct_url, proxy: TCPProxy): return amqp_direct_url.with_host( proxy.proxy_host, ).with_port( proxy.proxy_port, ).update_query( reconnect_interval=1, heartbeat=1, ) @pytest.fixture def proxy_port(aiomisc_unused_port_factory) -> int: return aiomisc_unused_port_factory() @pytest.fixture(scope="module") def connection_fabric(): return aio_pika.connect_robust @pytest.fixture def create_direct_connection(event_loop, amqp_direct_url): return partial( aio_pika.connect, amqp_direct_url.update_query( name=amqp_direct_url.query["name"] + "::direct", heartbeat=30, ), loop=event_loop, ) @pytest.fixture def create_connection(connection_fabric, event_loop, amqp_url): return partial(connection_fabric, amqp_url, loop=event_loop) @pytest.fixture async def direct_connection( # type: ignore create_direct_connection, ) -> aio_pika.Connection: async with await create_direct_connection() as conn: yield conn async def test_channel_fixture(channel: aio_pika.RobustChannel): assert isinstance(channel, aio_pika.RobustChannel) async def test_connection_fixture(connection: aio_pika.RobustConnection): assert isinstance(connection, aio_pika.RobustConnection) def test_amqp_url_is_not_direct(amqp_url, amqp_direct_url): assert amqp_url != amqp_direct_url async def test_set_qos(channel: aio_pika.Channel): await channel.set_qos(prefetch_count=1) async def test_revive_passive_queue_on_reconnect( create_connection, direct_connection, proxy: TCPProxy, ): client = await create_connection() assert isinstance(client, RobustConnection) reconnect_event = asyncio.Event() reconnect_count = 0 def reconnect_callback(conn: Optional[AbstractRobustConnection]): nonlocal reconnect_count reconnect_count += 1 reconnect_event.set() reconnect_event.clear() client.reconnect_callbacks.add(reconnect_callback) queue_name = get_random_name() channel = await client.channel() assert isinstance(channel, RobustChannel) direct_channel = await direct_connection.channel() direct_queue = await direct_channel.declare_queue( queue_name, auto_delete=True, passive=False, ) queue2 = await channel.declare_queue( direct_queue.name, passive=True, auto_delete=False, ) assert isinstance(queue2, RobustQueue) await proxy.disconnect_all() await reconnect_event.wait() assert reconnect_count == 1 with suppress(asyncio.TimeoutError): await asyncio.wait_for( reconnect_event.wait(), client.reconnect_interval * 2, ) assert reconnect_count == 1 @aiomisc.timeout(30) async def test_robust_reconnect( create_connection, direct_connection, proxy: TCPProxy, event_loop, add_cleanup: Callable, ): read_conn = await create_connection() # type: aio_pika.RobustConnection reconnect_event = asyncio.Event() read_conn.reconnect_callbacks.add( lambda *_: reconnect_event.set(), ) assert isinstance(read_conn, aio_pika.RobustConnection) write_channel = await direct_connection.channel() async with read_conn: read_channel = await read_conn.channel() assert isinstance(read_channel, aio_pika.RobustChannel) qname = get_random_name("robust", "proxy", "shared") async with read_channel: shared = [] # Declaring temporary queue queue = await write_channel.declare_queue( qname, auto_delete=False, durable=True, ) consumer_event = asyncio.Event() async def reader(queue_name): nonlocal shared try: queue = await read_channel.declare_queue( name=queue_name, passive=True, ) async with queue.iterator() as q: event_loop.call_soon(consumer_event.set) async for message in q: shared.append(message) await message.ack() finally: logging.info("Exit reader task") try: reader_task = event_loop.create_task(reader(queue.name)) await consumer_event.wait() logging.info("Disconnect all clients") with proxy.slowdown(1, 1): for i in range(5): await write_channel.default_exchange.publish( Message(str(i).encode()), queue.name, ) await proxy.disconnect_all() # noinspection PyTypeChecker with pytest.raises(aiormq.AMQPError): await read_conn.channel() logging.info("Waiting reconnect") await reconnect_event.wait() logging.info("Waiting connections") await asyncio.wait_for(read_conn.ready(), timeout=20) for i in range(5, 10): await write_channel.default_exchange.publish( Message(str(i).encode()), queue.name, ) while len(shared) < 10: await asyncio.sleep(0.1) assert len(shared) == 10 reader_task.cancel() await asyncio.gather(reader_task, return_exceptions=True) with pytest.raises(QueueEmpty): await queue.get(timeout=0.5) finally: await queue.purge() # Waiting for rabbitmq queue not in use await asyncio.sleep(1) await queue.delete() async def test_channel_locked_resource2(connection: aio_pika.RobustConnection): ch1: AbstractRobustChannel = await connection.channel() # type: ignore ch2: AbstractRobustChannel = await connection.channel() # type: ignore qname = get_random_name("channel", "locked", "resource") q1: aio_pika.abc.AbstractRobustQueue = await ch1.declare_queue( qname, exclusive=True, robust=False, ) await q1.consume(print, exclusive=True) with pytest.raises(aiormq.exceptions.ChannelAccessRefused): q2 = await ch2.declare_queue(qname, exclusive=True, robust=False) await q2.consume(print, exclusive=True) async def test_channel_close_when_exclusive_queue( create_connection, create_direct_connection, proxy: TCPProxy, event_loop, ): logging.info("Creating connections") direct_conn, proxy_conn = await asyncio.gather( create_direct_connection(), create_connection(), ) logging.info("Creating channels") direct_channel, proxy_channel = await asyncio.gather( direct_conn.channel(), proxy_conn.channel(), ) reconnect_event = asyncio.Event() proxy_conn.reconnect_callbacks.add( lambda *_: reconnect_event.set(), weak=False, ) qname = get_random_name("robust", "exclusive", "queue") logging.info("Declaring exclusing queue: %s", qname) proxy_queue = await proxy_channel.declare_queue( qname, exclusive=True, durable=True, ) logging.info("Disconnecting all proxy connections") await proxy.disconnect_all() await asyncio.sleep(0.5) logging.info("Declaring exclusive queue through direct channel") await direct_channel.declare_queue( qname, exclusive=True, durable=True, ) async def close_after(delay, closer): await asyncio.sleep(delay) logging.info("Disconnecting direct connection") await closer() logging.info("Closed") await event_loop.create_task(close_after(5, direct_conn.close)) # reconnect fired await reconnect_event.wait() # Wait method ready await proxy_conn.connected.wait() await proxy_queue.delete() async def test_context_process_abrupt_channel_close( connection: aio_pika.RobustConnection, declare_exchange: Callable, declare_queue: Callable, ): # https://github.com/mosquito/aio-pika/issues/302 queue_name = get_random_name("test_connection") routing_key = get_random_name("rounting_key") channel = await connection.channel() exchange = await declare_exchange( "direct", auto_delete=True, channel=channel, ) queue = await declare_queue(queue_name, auto_delete=True, channel=channel) await queue.bind(exchange, routing_key) body = bytes(shortuuid.uuid(), "utf-8") await exchange.publish( Message(body, content_type="text/plain", headers={"foo": "bar"}), routing_key, ) incoming_message = await queue.get(timeout=5) # close aiormq channel to emulate abrupt connection/channel close underlay_channel = await channel.get_underlay_channel() await underlay_channel.close() with pytest.raises(aiormq.exceptions.ChannelInvalidStateError): async with incoming_message.process(): # emulate some activity on closed channel await channel.get_underlay_channel() # emulate connection/channel restoration of connect_robust await channel.reopen() # cleanup queue incoming_message = await queue.get(timeout=5) async with incoming_message.process(): pass await queue.unbind(exchange, routing_key) @aiomisc.timeout(10) async def test_robust_duplicate_queue( connection: aio_pika.RobustConnection, direct_connection: aio_pika.Connection, declare_exchange: Callable, declare_queue: Callable, proxy: TCPProxy, create_task: Callable, ): queue_name = get_random_name("test") channel = await connection.channel() direct_channel = await direct_connection.channel() reconnect_event = asyncio.Event() shared_condition = asyncio.Condition() connection.reconnect_callbacks.add( lambda *_: reconnect_event.set(), ) shared = {} # noinspection PyShadowingNames async def reader(queue: aio_pika.Queue): nonlocal shared async with queue.iterator() as q: async for message in q: # https://www.rabbitmq.com/confirms.html#automatic-requeueing async with shared_condition: shared[message.message_id] = message shared_condition.notify_all() await message.ack() queue = await declare_queue( queue_name, channel=channel, cleanup=False, ) create_task(reader(queue)) for x in range(5): await direct_channel.default_exchange.publish( aio_pika.Message(b"1234567890", message_id=f"0-{x}"), queue_name, ) async with shared_condition: await asyncio.wait_for( shared_condition.wait_for(lambda: len(shared) == 5), timeout=5, ) logging.info("Disconnect all clients") await proxy.disconnect_all() assert len(shared) == 5, shared for x in range(5): await direct_channel.default_exchange.publish( Message(b"1234567890", message_id=f"1-{x}"), queue_name, ) await asyncio.wait_for(reconnect_event.wait(), timeout=5) logging.info("Waiting connections") async with shared_condition: await asyncio.wait_for( shared_condition.wait_for(lambda: len(shared) == 10), timeout=5, ) assert len(shared) == 10 @aiomisc.timeout(10) async def test_channel_restore( connection_fabric, event_loop, amqp_url, proxy: TCPProxy, add_cleanup: Callable, ): heartbeat = 10 amqp_url = amqp_url.update_query(heartbeat=heartbeat) on_reopen = asyncio.Event() conn = await connection_fabric(amqp_url, loop=event_loop) assert isinstance(conn, aio_pika.RobustConnection) async with conn: channel: AbstractRobustChannel = await conn.channel() # type: ignore channel.reopen_callbacks.add( lambda *_: on_reopen.set(), weak=False, ) assert isinstance(channel, aio_pika.RobustChannel) async with channel: await channel.set_qos(0) await channel.set_qos(1) with pytest.raises(asyncio.TimeoutError): with proxy.slowdown(1, 1): await channel.set_qos(0, timeout=0.5) await on_reopen.wait() await channel.set_qos(0) await channel.set_qos(1) @aiomisc.timeout(20) async def test_channel_reconnect( connection_fabric, event_loop, amqp_url, proxy: TCPProxy, add_cleanup: Callable, ): on_reconnect = asyncio.Event() conn = await connection_fabric(amqp_url, loop=event_loop) assert isinstance(conn, aio_pika.RobustConnection) conn.reconnect_callbacks.add(lambda *_: on_reconnect.set(), weak=False) async with conn: channel = await conn.channel() assert isinstance(channel, aio_pika.RobustChannel) async with channel: await channel.set_qos(0) await channel.set_qos(1) await proxy.disconnect_all() await on_reconnect.wait() await channel.set_qos(0) await channel.set_qos(1) class BadNetwork5KB: def __init__(self, proxy): self.proxy = proxy self.num_bytes = 0 self.loop = asyncio.get_event_loop() self.lock = asyncio.Lock() proxy.set_content_processors( self.client_to_server, self.server_to_client, ) async def disconnect(self): async with self.lock: await self.proxy.disconnect_all() self.num_bytes = 0 async def server_to_client(self, chunk: bytes) -> bytes: async with self.lock: self.num_bytes += len(chunk) if self.num_bytes < 5000: return chunk self.loop.create_task(self.disconnect()) return chunk @staticmethod def client_to_server(chunk: bytes) -> bytes: return chunk @aiomisc.timeout(15) @pytest.mark.parametrize( "reconnect_timeout", ["0", "1", "0.5", "0.1", "0.05", "0.025"], ) async def test_channel_reconnect_after_5kb( reconnect_timeout, amqp_url, amqp_direct_url, connection_fabric, event_loop: asyncio.AbstractEventLoop, proxy: TCPProxy, add_cleanup: Callable, ): connection = await aio_pika.connect_robust( amqp_url.update_query(reconnect_interval=reconnect_timeout), loop=event_loop, ) direct_connection = await aio_pika.connect( amqp_direct_url, loop=event_loop, ) on_reconnect = asyncio.Event() connection.reconnect_callbacks.add( lambda *_: on_reconnect.set(), weak=False, ) BadNetwork5KB(proxy) messages_to_exchange = 50 async with connection.channel() as channel: await channel.set_qos(prefetch_count=5) queue = await channel.declare_queue(auto_delete=False) async with direct_connection.channel() as publish_channel: for _ in range(messages_to_exchange): await publish_channel.default_exchange.publish( aio_pika.Message(body=b"Hello world " * 100), routing_key=queue.name, ) messages = [] async for message in queue.iterator(): messages.append(message) if len(messages) == messages_to_exchange: break assert messages assert on_reconnect.is_set() await connection.close() await direct_connection.close() class BadNetwork: def __init__(self, proxy, stair: int, disconnect_time: float): self.proxy = proxy self.stair = stair self.disconnect_time = disconnect_time self.num_bytes = 0 self.loop = asyncio.get_event_loop() self.lock = asyncio.Lock() proxy.set_content_processors( self.client_to_server, self.server_to_client, ) async def disconnect(self): async with self.lock: await asyncio.sleep(self.disconnect_time) await self.proxy.disconnect_all() self.stair *= 2 self.num_bytes = 0 async def server_to_client(self, chunk: bytes) -> bytes: async with self.lock: self.num_bytes += len(chunk) if self.num_bytes < self.stair: return chunk self.loop.create_task(self.disconnect()) return chunk @staticmethod def client_to_server(chunk: bytes) -> bytes: return chunk DISCONNECT_OFFSETS = [2 << i for i in range(5, 12)] STAIR_STEPS = list(itertools.product([0.1, 0.0], DISCONNECT_OFFSETS)) STAIR_STEPS_IDS = [ f"[{i // len(DISCONNECT_OFFSETS)}] {t}-{s}" for i, (t, s) in enumerate(STAIR_STEPS) ] @aiomisc.timeout(30) @pytest.mark.parametrize( "reconnect_timeout,stair", STAIR_STEPS, ids=STAIR_STEPS_IDS, ) async def test_channel_reconnect_stairway( reconnect_timeout: float, stair: int, amqp_url: URL, amqp_direct_url: URL, connection_fabric, event_loop: asyncio.AbstractEventLoop, proxy: TCPProxy, add_cleanup: Callable, ): event_loop.set_debug(True) connection = await aio_pika.connect_robust( amqp_url.update_query( reconnect_interval=f"{reconnect_timeout:.2f}", name="proxy", ), loop=event_loop, ) direct_connection = await aio_pika.connect( amqp_direct_url.update_query("name=direct"), loop=event_loop, ) on_reconnect = asyncio.Event() connection.reconnect_callbacks.add( lambda *_: on_reconnect.set(), weak=False, ) BadNetwork(proxy, stair, reconnect_timeout) messages_to_exchange = 100 body = b"Hello world " * 1000 async with connection.channel() as channel: queue = await channel.declare_queue(auto_delete=False) async with direct_connection.channel() as publish_channel: for _ in range(messages_to_exchange): await publish_channel.default_exchange.publish( aio_pika.Message(body=body), routing_key=queue.name, ) messages: List[aio_pika.abc.AbstractIncomingMessage] = [] while True: try: await channel.set_qos(prefetch_count=1) break except CONNECTION_EXCEPTIONS: await asyncio.sleep(0.1) continue while len(messages) < messages_to_exchange: try: message: aio_pika.abc.AbstractIncomingMessage async for message in queue.iterator(): # noinspection PyBroadException try: await message.ack() except Exception: continue messages.append(message) if len(messages) >= messages_to_exchange: break except Exception: continue assert messages assert on_reconnect.is_set() await connection.close() await direct_connection.close() python-aio-pika-9.5.5/tests/test_amqps.py000066400000000000000000000022141476164671100204530ustar00rootroot00000000000000import ssl from functools import partial import pytest import aio_pika from tests import test_amqp as amqp @pytest.fixture( scope="module", params=[aio_pika.connect, aio_pika.connect_robust], ) def connection_fabric(request): return request.param @pytest.fixture def create_connection(connection_fabric, event_loop, rabbitmq_container): ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.VerifyMode.CERT_NONE return partial( connection_fabric, rabbitmq_container.get_amqps_url(), loop=event_loop, ssl_context=ssl_context, ) async def test_default_context(connection_fabric, amqp_url): with pytest.raises(ConnectionError): await connection_fabric( amqp_url.with_scheme("amqps").with_port(5671), ssl_context=None, ) ssl_context = ssl.create_default_context() with pytest.raises(ConnectionError): await connection_fabric( amqp_url.with_scheme("amqps").with_port(5671), ssl_context=ssl_context, ) class TestCaseAMQPS(amqp.TestCaseAmqp): pass python-aio-pika-9.5.5/tests/test_connect.py000066400000000000000000000023241476164671100207650ustar00rootroot00000000000000import asyncio import pytest from yarl import URL from aio_pika import connect VARIANTS = ( (dict(url="amqp://localhost/"), "amqp://localhost/"), (dict(url="amqp://localhost"), "amqp://localhost/"), (dict(url="amqp://localhost:5674"), "amqp://localhost:5674/"), (dict(url="amqp://localhost:5674//"), "amqp://localhost:5674//"), (dict(url="amqp://localhost:5674/"), "amqp://localhost:5674/"), (dict(host="localhost", port=8888), "amqp://guest:guest@localhost:8888//"), ( dict(host="localhost", port=8888, virtualhost="foo"), "amqp://guest:guest@localhost:8888/foo", ), ( dict(host="localhost", port=8888, virtualhost="/foo"), "amqp://guest:guest@localhost:8888//foo", ), ) class FakeConnection: def __init__(self, url, **kwargs): self.url = URL(url) self.kwargs = kwargs async def connect(self, timeout=None, **kwargs): return @pytest.mark.parametrize("kwargs,expected", VARIANTS) def test_simple(kwargs, expected): loop = asyncio.get_event_loop() conn: FakeConnection = loop.run_until_complete( connect(connection_class=FakeConnection, **kwargs), # type: ignore ) assert conn.url == URL(expected) python-aio-pika-9.5.5/tests/test_connection_params.py000066400000000000000000000107701476164671100230420ustar00rootroot00000000000000from typing import Type from aiormq.connection import parse_bool, parse_int, parse_timeout from yarl import URL from aio_pika import connect from aio_pika.abc import AbstractConnection from aio_pika.connection import Connection from aio_pika.robust_connection import RobustConnection, connect_robust class MockConnection(Connection): async def connect(self, timeout=None, **kwargs): return self class MockConnectionRobust(RobustConnection): async def connect(self, timeout=None, **kwargs): return self VALUE_GENERATORS = { parse_int: { "-1": -1, "0": 0, "43": 43, "9999999999999999": 9999999999999999, "hello": 0, }, parse_bool: { "disabled": False, "enable": True, "yes": True, "no": False, "": False, }, parse_timeout: { "0": 0, "Vasyan": 0, "0.1": 0.1, "0.54": 0.54, "1": 1, "100": 100, "1000:": 0, }, float: { "0": 0., "0.0": 0., ".0": 0., "0.1": 0.1, "1": 1., "hello": None, }, } class TestCase: CONNECTION_CLASS: Type[AbstractConnection] = MockConnection async def get_instance(self, url, **kwargs) -> AbstractConnection: return await connect( # type: ignore url, connection_class=self.CONNECTION_CLASS, **kwargs, ) async def test_kwargs(self): instance = await self.get_instance("amqp://localhost/") for parameter in self.CONNECTION_CLASS.PARAMETERS: if parameter.is_kwarg: continue assert hasattr(instance, parameter.name) assert ( getattr(instance, parameter.name) is parameter.parse(parameter.default) ) async def test_kwargs_values(self): for parameter in self.CONNECTION_CLASS.PARAMETERS: positives = VALUE_GENERATORS[parameter.parser] # type: ignore for example, expected in positives.items(): # type: ignore instance = await self.get_instance( f"amqp://localhost/?{parameter.name}={example}", ) assert parameter.parse(example) == expected if parameter.is_kwarg: assert instance.kwargs[parameter.name] == expected else: assert hasattr(instance, parameter.name) assert getattr(instance, parameter.name) == expected instance = await self.get_instance( "amqp://localhost", **{parameter.name: example}, ) assert hasattr(instance, parameter.name) assert getattr(instance, parameter.name) == expected class TestCaseRobust(TestCase): CONNECTION_CLASS: Type[MockConnectionRobust] = MockConnectionRobust async def get_instance(self, url, **kwargs) -> AbstractConnection: return await connect_robust( # type: ignore url, connection_class=self.CONNECTION_CLASS, # type: ignore **kwargs, ) def test_connection_interleave(amqp_url: URL): url = amqp_url.update_query(interleave="1") connection = Connection(url=url) assert "interleave" in connection.kwargs assert connection.kwargs["interleave"] == 1 connection = Connection(url=amqp_url) assert "interleave" not in connection.kwargs def test_connection_happy_eyeballs_delay(amqp_url: URL): url = amqp_url.update_query(happy_eyeballs_delay=".1") connection = Connection(url=url) assert "happy_eyeballs_delay" in connection.kwargs assert connection.kwargs["happy_eyeballs_delay"] == 0.1 connection = Connection(url=amqp_url) assert "happy_eyeballs_delay" not in connection.kwargs def test_robust_connection_interleave(amqp_url: URL): url = amqp_url.update_query(interleave="1") connection = RobustConnection(url=url) assert "interleave" in connection.kwargs assert connection.kwargs["interleave"] == 1 connection = RobustConnection(url=amqp_url) assert "interleave" not in connection.kwargs def test_robust_connection_happy_eyeballs_delay(amqp_url: URL): url = amqp_url.update_query(happy_eyeballs_delay=".1") connection = RobustConnection(url=url) assert "happy_eyeballs_delay" in connection.kwargs assert connection.kwargs["happy_eyeballs_delay"] == 0.1 connection = RobustConnection(url=amqp_url) assert "happy_eyeballs_delay" not in connection.kwargs python-aio-pika-9.5.5/tests/test_master.py000066400000000000000000000055161476164671100206350ustar00rootroot00000000000000import asyncio from typing import Any, List import aio_pika from aio_pika.patterns.master import ( CompressedJsonMaster, JsonMaster, Master, NackMessage, RejectMessage, ) class TestMaster: MASTER_CLASS = Master async def test_simple(self, channel: aio_pika.Channel): master = self.MASTER_CLASS(channel) event = asyncio.Event() self.state: List[Any] = [] def worker_func(*, foo, bar): nonlocal event self.state.append((foo, bar)) event.set() worker = await master.create_worker( "worker.foo", worker_func, auto_delete=True, ) await master.proxy.worker.foo(foo=1, bar=2) await event.wait() assert self.state == [(1, 2)] await worker.close() async def test_simple_coro(self, channel: aio_pika.Channel): master = self.MASTER_CLASS(channel) event = asyncio.Event() self.state = [] async def worker_func(*, foo, bar): nonlocal event self.state.append((foo, bar)) event.set() worker = await master.create_worker( "worker.foo", worker_func, auto_delete=True, ) await master.proxy.worker.foo(foo=1, bar=2) await event.wait() assert self.state == [(1, 2)] await worker.close() async def test_simple_many(self, channel: aio_pika.Channel): master = self.MASTER_CLASS(channel) tasks = 100 state = [] def worker_func(*, foo): nonlocal tasks, state state.append(foo) tasks -= 1 worker = await master.create_worker( "worker.foo", worker_func, auto_delete=True, ) for item in range(100): await master.proxy.worker.foo(foo=item) while tasks > 0: await asyncio.sleep(0) assert state == list(range(100)) await worker.close() async def test_exception_classes(self, channel: aio_pika.Channel): master = self.MASTER_CLASS(channel) counter = 200 self.state = [] def worker_func(*, foo): nonlocal counter counter -= 1 if foo < 50: raise RejectMessage(requeue=False) if foo > 100: raise NackMessage(requeue=False) self.state.append(foo) worker = await master.create_worker( "worker.foo", worker_func, auto_delete=True, ) for item in range(200): await master.proxy.worker.foo(foo=item) while counter > 0: await asyncio.sleep(0) assert self.state == list(range(50, 101)) await worker.close() class TestJsonMaster(TestMaster): MASTER_CLASS = JsonMaster class TestCompressedJsonMaster(TestMaster): MASTER_CLASS = CompressedJsonMaster python-aio-pika-9.5.5/tests/test_memory_leak.py000066400000000000000000000020631476164671100216400ustar00rootroot00000000000000import gc import weakref from typing import AbstractSet import aio_pika async def test_leak_unclosed_channel(create_connection): rabbitmq_connection = await create_connection() weakset: AbstractSet[aio_pika.abc.AbstractChannel] = weakref.WeakSet() async def f(rabbitmq_connection: aio_pika.Connection, weakset): weakset.add(await rabbitmq_connection.channel()) async with rabbitmq_connection: for i in range(5): await f(rabbitmq_connection, weakset) gc.collect() assert len(tuple(weakset)) == 0 async def test_leak_closed_channel(create_connection): rabbitmq_connection = await create_connection() weakset: AbstractSet[aio_pika.abc.AbstractConnection] = weakref.WeakSet() async def f(rabbitmq_connection: aio_pika.Connection, weakset): async with rabbitmq_connection.channel() as channel: weakset.add(channel) async with rabbitmq_connection: for i in range(5): await f(rabbitmq_connection, weakset) gc.collect() assert len(tuple(weakset)) == 0 python-aio-pika-9.5.5/tests/test_message.py000066400000000000000000000052151476164671100207620ustar00rootroot00000000000000import time from copy import copy from datetime import datetime, timezone from typing import List, Tuple import shortuuid from aio_pika import DeliveryMode, Message from aio_pika.abc import FieldValue, HeadersType, MessageInfo def test_message_copy(): msg1 = Message( bytes(shortuuid.uuid(), "utf-8"), content_type="application/json", content_encoding="text", timestamp=datetime(2000, 1, 1), headers={"h1": "v1", "h2": "v2"}, ) msg2 = copy(msg1) msg1.lock() assert not msg2.locked def test_message_info(): body = bytes(shortuuid.uuid(), "utf-8") info = MessageInfo( app_id="test", body_size=len(body), cluster_id=None, consumer_tag=None, content_encoding="text", content_type="application/json", correlation_id="1", delivery_mode=DeliveryMode.PERSISTENT, delivery_tag=None, exchange=None, expiration=1.5, headers={"foo": "bar"}, message_id=shortuuid.uuid(), priority=0, redelivered=None, reply_to="test", routing_key=None, timestamp=datetime.fromtimestamp(int(time.time()), tz=timezone.utc), type="0", user_id="guest", ) msg = Message( body=body, headers={"foo": "bar"}, content_type="application/json", content_encoding="text", delivery_mode=DeliveryMode.PERSISTENT, priority=0, correlation_id="1", reply_to="test", expiration=1.5, message_id=info["message_id"], timestamp=info["timestamp"], type="0", user_id="guest", app_id="test", ) assert info == msg.info() def test_headers_setter(): data: HeadersType = {"foo": "bar"} data_expected = {"foo": "bar"} msg = Message(b"", headers={"bar": "baz"}) msg.headers = data assert msg.headers == data_expected def test_headers_content(): data: Tuple[List[FieldValue], ...] = ( [42, 42], [b"foo", b"foo"], [b"\00", b"\00"], ) for src, value in data: msg = Message(b"", headers={"value": src}) assert msg.headers["value"] == value def test_headers_set(): msg = Message(b"", headers={"header": "value"}) data = ( ["header-1", 42, 42], ["header-2", b"foo", b"foo"], ["header-3", b"\00", b"\00"], ["header-4", {"foo": "bar"}, {"foo": "bar"}], ) for name, src, value in data: # type: ignore msg.headers[name] = value # type: ignore assert msg.headers[name] == value # type: ignore assert msg.headers["header"] == "value" python-aio-pika-9.5.5/tests/test_pool.py000066400000000000000000000077221476164671100203140ustar00rootroot00000000000000import asyncio from collections import Counter import pytest from aio_pika.pool import Pool, PoolInstance @pytest.mark.parametrize("max_size", [50, 10, 5, 1]) async def test_simple(max_size, event_loop): counter = 0 async def create_instance(): nonlocal counter await asyncio.sleep(0) counter += 1 return counter pool: Pool = Pool(create_instance, max_size=max_size, loop=event_loop) async def getter(): nonlocal counter, pool async with pool.acquire() as instance: assert instance > 0 await asyncio.sleep(1 if counter < max_size else 0) return instance, counter results = await asyncio.gather(*[getter() for _ in range(200)]) for instance, total in results: assert instance > -1 assert total > -1 assert counter == max_size class TestInstanceBase: class Instance(PoolInstance): def __init__(self): self.closed = False async def close(self): if self.closed: raise RuntimeError self.closed = True @pytest.fixture def instances(self): return set() @pytest.fixture(params=[50, 40, 30, 20, 10]) def max_size(self, request): return request.param @pytest.fixture def pool(self, max_size, instances, event_loop): async def create_instance(): nonlocal instances obj = TestInstanceBase.Instance() instances.add(obj) return obj return Pool(create_instance, max_size=max_size, loop=event_loop) class TestInstance(TestInstanceBase): async def test_close(self, pool, instances, event_loop, max_size): async def getter(): async with pool.acquire(): await asyncio.sleep(0.05) assert not pool.is_closed assert len(instances) == 0 await asyncio.gather(*[getter() for _ in range(200)]) assert len(instances) == max_size for instance in instances: assert not instance.closed await pool.close() for instance in instances: assert instance.closed assert pool.is_closed async def test_close_context_manager(self, pool, instances): async def getter(): async with pool.acquire(): await asyncio.sleep(0.05) async with pool: assert not pool.is_closed assert len(instances) == 0 await asyncio.gather(*[getter() for _ in range(200)]) assert len(instances) > 1 for instance in instances: assert not instance.closed assert not pool.is_closed assert pool.is_closed for instance in instances: assert instance.closed class TestCaseNoMaxSize(TestInstance): async def test_simple(self, pool, event_loop): call_count = 200 counter = 0 async def getter(): nonlocal counter async with pool.acquire() as instance: await asyncio.sleep(1) assert isinstance(instance, TestInstanceBase.Instance) counter += 1 return counter results = await asyncio.gather(*[getter() for _ in range(call_count)]) for result in results: assert result > -1 assert counter == call_count class TestCaseItemReuse(TestInstanceBase): @pytest.fixture def call_count(self, max_size): return max_size * 5 async def test_simple(self, pool, call_count, instances): counter: Counter = Counter() async def getter(): nonlocal counter async with pool.acquire() as instance: await asyncio.sleep(0.05) counter[instance] += 1 await asyncio.gather(*[getter() for _ in range(call_count)]) assert sum(counter.values()) == call_count assert set(counter) == set(instances) assert len(set(counter.values())) == 1 python-aio-pika-9.5.5/tests/test_rpc.py000066400000000000000000000203301476164671100201150ustar00rootroot00000000000000import asyncio import logging import warnings from functools import partial import pytest import aio_pika from aio_pika import Message from aio_pika.exceptions import MessageProcessError from aio_pika.message import IncomingMessage from aio_pika.patterns.rpc import RPC from aio_pika.patterns.rpc import log as rpc_logger from tests import get_random_name async def rpc_func(*, foo, bar): assert not foo assert not bar return {"foo": "bar"} async def rpc_func2(*, foo, bar): assert not foo assert not bar return {"foo": "bar2"} class TestCase: async def test_simple(self, channel: aio_pika.Channel): rpc = await RPC.create(channel, auto_delete=True) await rpc.register("test.rpc", rpc_func, auto_delete=True) result = await rpc.proxy.test.rpc(foo=None, bar=None) assert result == {"foo": "bar"} await rpc.unregister(rpc_func) await rpc.close() # Close already closed await rpc.close() async def test_error(self, channel: aio_pika.Channel): rpc = await RPC.create(channel, auto_delete=True) await rpc.register("test.rpc", rpc_func, auto_delete=True) with pytest.raises(AssertionError): await rpc.proxy.test.rpc(foo=True, bar=None) await rpc.unregister(rpc_func) await rpc.close() async def test_unroutable(self, channel: aio_pika.Channel): rpc = await RPC.create(channel, auto_delete=True) await rpc.register("test.rpc", rpc_func, auto_delete=True) with pytest.raises(MessageProcessError): await rpc.proxy.unroutable() await rpc.unregister(rpc_func) await rpc.close() async def test_timed_out(self, channel: aio_pika.Channel): rpc = await RPC.create(channel, auto_delete=True) await rpc.register("test.rpc", rpc_func, auto_delete=True) await channel.declare_queue( "test.timed_out", auto_delete=True, arguments={"x-dead-letter-exchange": RPC.DLX_NAME}, ) with pytest.raises(asyncio.TimeoutError): await rpc.call("test.timed_out", expiration=1) await rpc.unregister(rpc_func) await rpc.close() async def test_close_twice(self, channel: aio_pika.Channel): rpc = await RPC.create(channel, auto_delete=True) await rpc.close() await rpc.close() async def test_init_twice(self, channel: aio_pika.Channel): rpc = await RPC.create(channel, auto_delete=True) await rpc.initialize() await rpc.close() async def test_send_unknown_message( self, channel: aio_pika.Channel, caplog, ): rpc = await RPC.create(channel, auto_delete=True) body = b"test body" with caplog.at_level(logging.WARNING, logger=rpc_logger.name): await channel.default_exchange.publish( Message(body), routing_key=rpc.result_queue.name, ) await asyncio.sleep(0.5) for log_record in caplog.records: if log_record.levelno == logging.WARNING: break else: raise pytest.fail("Expected log message") incoming = log_record.args[0] assert isinstance(incoming, IncomingMessage) assert incoming.body == body assert ( "Message without correlation_id was received:" in log_record.message ) with caplog.at_level(logging.WARNING, logger=rpc_logger.name): await channel.default_exchange.publish( Message(body), routing_key="should-returned", ) await asyncio.sleep(0.5) for log_record in caplog.records: if log_record.levelno == logging.WARNING: break else: raise pytest.fail("Expected log message") incoming = log_record.args[0] assert isinstance(incoming, IncomingMessage) assert incoming.body == body assert ( "Message without correlation_id was received:" in log_record.message ) await rpc.close() async def test_close_cancelling( self, channel: aio_pika.Channel, event_loop, ): rpc = await RPC.create(channel, auto_delete=True) async def sleeper(): await asyncio.sleep(60) method_name = get_random_name("test", "sleeper") await rpc.register(method_name, sleeper, auto_delete=True) tasks = set() for _ in range(10): tasks.add(event_loop.create_task(rpc.call(method_name))) await rpc.close() logging.info("Waiting for results") for task in tasks: with pytest.raises(asyncio.CancelledError): await task async def test_register_twice(self, channel: aio_pika.Channel): rpc = await RPC.create(channel, auto_delete=True) async def bypass(_: aio_pika.abc.AbstractIncomingMessage): return await rpc.register("test.sleeper", bypass, auto_delete=True) with pytest.raises(RuntimeError): await rpc.register( "test.sleeper", bypass, auto_delete=True, ) await rpc.register("test.one", rpc_func, auto_delete=True) with pytest.raises(RuntimeError): await rpc.register("test.two", rpc_func, auto_delete=True) await rpc.unregister(rpc_func) await rpc.unregister(rpc_func) await rpc.close() async def test_register_non_coroutine(self, channel: aio_pika.Channel): rpc = await RPC.create(channel, auto_delete=True) def bypass(_): return with pytest.deprecated_call(): await rpc.register( "test.non-coroutine", bypass, # type: ignore auto_delete=True, ) async def coro(_): return with pytest.warns(UserWarning) as record: warnings.warn("Test", UserWarning) await rpc.register( "test.coroutine", coro, # type: ignore auto_delete=True, ) assert len(record) == 1 with pytest.warns() as record: warnings.warn("Test", UserWarning) await rpc.register( "test.coroutine_partial", partial(partial(coro)), # type: ignore auto_delete=True, ) assert len(record) == 1 async def test_non_serializable_result(self, channel: aio_pika.Channel): rpc = await RPC.create(channel, auto_delete=True) async def bad_func(): async def inner(): await asyncio.sleep(0) return inner() await rpc.register( "test.not-serializable", bad_func, auto_delete=True, ) with pytest.raises(TypeError): await rpc.call("test.not-serializable") async def test_custom_exchange(self, channel: aio_pika.Channel): rpc_ex1 = await RPC.create(channel, auto_delete=True, exchange="ex1") rpc_ex2 = await RPC.create(channel, auto_delete=True, exchange="ex2") rpc_default = await RPC.create(channel, auto_delete=True) await rpc_ex1.register("test.rpc", rpc_func, auto_delete=True) result = await rpc_ex1.proxy.test.rpc(foo=None, bar=None) assert result == {"foo": "bar"} with pytest.raises(MessageProcessError): await rpc_ex2.proxy.test.rpc(foo=None, bar=None) await rpc_ex2.register("test.rpc", rpc_func2, auto_delete=True) result = await rpc_ex2.proxy.test.rpc(foo=None, bar=None) assert result == {"foo": "bar2"} with pytest.raises(MessageProcessError): await rpc_default.proxy.test.rpc(foo=None, bar=None) await rpc_default.register("test.rpc", rpc_func, auto_delete=True) result = await rpc_default.proxy.test.rpc(foo=None, bar=None) assert result == {"foo": "bar"} await rpc_ex1.unregister(rpc_func) await rpc_ex1.close() await rpc_ex2.unregister(rpc_func2) await rpc_ex2.close() await rpc_default.unregister(rpc_func) await rpc_default.close() python-aio-pika-9.5.5/tests/test_tools.py000066400000000000000000000113161476164671100204750ustar00rootroot00000000000000import asyncio import logging from copy import copy from typing import Any, List from unittest import mock import pytest from aio_pika.tools import CallbackCollection, ensure_awaitable log = logging.getLogger(__name__) # noinspection PyTypeChecker class TestCase: @pytest.fixture def instance(self) -> mock.MagicMock: return mock.MagicMock() @pytest.fixture def collection(self, instance): return CallbackCollection(instance) def test_basic(self, collection): def func(sender, *args, **kwargs): pass collection.add(func) assert func in collection with pytest.raises(ValueError): collection.add(None) collection.remove(func) with pytest.raises(LookupError): collection.remove(func) for _ in range(10): collection.add(func) assert len(collection) == 1 collection.freeze() with pytest.raises(RuntimeError): collection.freeze() assert len(collection) == 1 with pytest.raises(RuntimeError): collection.add(func) with pytest.raises(RuntimeError): collection.remove(func) with pytest.raises(RuntimeError): collection.clear() collection2 = copy(collection) collection.unfreeze() assert not copy(collection).is_frozen assert collection.is_frozen != collection2.is_frozen with pytest.raises(RuntimeError): collection.unfreeze() collection.clear() assert collection2 assert not collection def test_callback_call(self, collection): l1: List[Any] = list() l2: List[Any] = list() assert l1 == l2 collection.add(lambda sender, x: l1.append(x)) collection.add(lambda sender, x: l2.append(x)) collection(1) collection(2) assert l1 == l2 assert l1 == [1, 2] async def test_blank_awaitable_callback(self, collection): await collection() async def test_awaitable_callback( self, event_loop, collection, instance, ): future = event_loop.create_future() shared = [] async def coro(arg): nonlocal shared shared.append(arg) def task_maker(arg): return event_loop.create_task(coro(arg)) collection.add(future.set_result) collection.add(coro) collection.add(task_maker) await collection() assert shared == [instance, instance] assert await future == instance async def test_collection_create_tasks( self, event_loop, collection, instance, ): future = event_loop.create_future() async def coro(arg): await asyncio.sleep(0.5) future.set_result(arg) collection.add(coro) # noinspection PyAsyncCall collection() assert await future == instance async def test_collection_run_tasks_parallel(self, collection): class Callable: def __init__(self): self.counter = 0 async def __call__(self, *args, **kwargs): await asyncio.sleep(1) self.counter += 1 callables = [Callable() for _ in range(100)] for callable in callables: collection.add(callable) await asyncio.wait_for(collection(), timeout=2) assert [c.counter for c in callables] == [1] * 100 class TestEnsureAwaitable: async def test_non_coroutine(self): with pytest.deprecated_call(match="You probably registering the"): func = ensure_awaitable(lambda x: x * x) with pytest.deprecated_call(match="Function"): assert await func(2) == 4 with pytest.deprecated_call(match="Function"): assert await func(4) == 16 async def test_coroutine(self): async def square(x): return x * x func = ensure_awaitable(square) assert await func(2) == 4 assert await func(4) == 16 async def test_something_awaitable_returned(self): def non_coro(x): async def coro(x): return x * x return coro(x) with pytest.deprecated_call(match="You probably registering the"): func = ensure_awaitable(non_coro) assert await func(2) == 4 async def test_something_non_awaitable_returned(self): def non_coro(x): def coro(x): return x * x return coro(x) with pytest.deprecated_call(match="You probably registering the"): func = ensure_awaitable(non_coro) with pytest.deprecated_call(match="Function"): assert await func(2) == 4 python-aio-pika-9.5.5/tests/test_types.py000066400000000000000000000017221476164671100205010ustar00rootroot00000000000000import aio_pika import aio_pika.abc async def test_connect_robust(amqp_url) -> None: async with await aio_pika.connect_robust(amqp_url) as connection: assert isinstance(connection, aio_pika.abc.AbstractRobustConnection) assert isinstance(connection, aio_pika.abc.AbstractConnection) channel = await connection.channel() assert isinstance(channel, aio_pika.abc.AbstractRobustChannel) assert isinstance(channel, aio_pika.abc.AbstractChannel) async def test_connect(amqp_url) -> None: async with await aio_pika.connect(amqp_url) as connection: assert isinstance(connection, aio_pika.abc.AbstractConnection) assert not isinstance( connection, aio_pika.abc.AbstractRobustConnection, ) channel = await connection.channel() assert isinstance(channel, aio_pika.abc.AbstractChannel) assert not isinstance( channel, aio_pika.abc.AbstractRobustChannel, )