pax_global_header00006660000000000000000000000064142231421560014512gustar00rootroot0000000000000052 comment=802bb9efad467b37e7be5f7e29c01621878b7d7e aioamqp-aioamqp-0.15.0/000077500000000000000000000000001422314215600146515ustar00rootroot00000000000000aioamqp-aioamqp-0.15.0/.github/000077500000000000000000000000001422314215600162115ustar00rootroot00000000000000aioamqp-aioamqp-0.15.0/.github/workflows/000077500000000000000000000000001422314215600202465ustar00rootroot00000000000000aioamqp-aioamqp-0.15.0/.github/workflows/tests.yml000066400000000000000000000025501422314215600221350ustar00rootroot00000000000000name: Run tests and QA checks on: push: branches: [master] tags: [aioamqp-*] pull_request: branches: [master] workflow_dispatch: jobs: quality: name: Run QA checks on Python ${{ matrix.python }} runs-on: ubuntu-latest strategy: matrix: python: ["3.7", "3.8", "3.9", "3.10"] steps: - name: Check out repository uses: actions/checkout@v2 - name: Install Python ${{ matrix.python }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} - name: Update base packages run: pip install -U setuptools pip - name: Install deps run: make update - name: Run pylint run: make pylint tests: name: Run test suite Python ${{ matrix.python }} runs-on: ubuntu-latest services: rabbitmq: image: rabbitmq:3.6.9-management ports: - 5672:5672 - 15672:15672 strategy: matrix: python: ["3.7", "3.8", "3.9", "3.10"] steps: - name: Check out repository uses: actions/checkout@v2 - name: Install Python ${{ matrix.python }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} - name: Update base packages run: pip install -U setuptools pip - name: Install deps run: make update - name: Run tests run: make test aioamqp-aioamqp-0.15.0/.gitignore000066400000000000000000000011241422314215600166370ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python env/ venv/ venv3 bin/ build/ develop-eggs/ dist/ eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports .tox/ .coverage .cache nosetests.xml coverage.xml # Translations *.mo # Mr Developer .mr.developer.cfg .project .pydevproject # Rope .ropeproject # Django stuff: *.log *.pot # Sphinx documentation docs/_build/ # editor stuffs *.swp # Pycharm/IntelliJ .idea/*aioamqp-aioamqp-0.15.0/.pylintrc000066400000000000000000000044021422314215600165160ustar00rootroot00000000000000[MASTER] # Add files or directories to the blacklist. They should be base names, not # paths. ignore=migrations,south_migrations # Add files or directories matching the regex patterns to the blacklist. The # regex matches against base names, not paths. ignore-patterns=.*_pb2.py [MESSAGES CONTROL] # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once).You can also use "--disable=all" to # disable everything first and then reenable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" disable= attribute-defined-outside-init, bad-continuation, cyclic-import, duplicate-code, fixme, file-ignored, invalid-name, locally-enabled, locally-disabled, missing-docstring, no-init, no-member, no-self-use, old-style-class, protected-access, redefined-variable-type, superfluous-parens, too-few-public-methods, too-many-ancestors, too-many-arguments, too-many-branches, too-many-instance-attributes, too-many-lines, too-many-locals, too-many-public-methods, too-many-statements, unused-argument, wrong-import-order [REPORTS] # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details msg-template="{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}" [FORMAT] # Maximum number of characters on a single line. max-line-length=120 [VARIABLES] # A regular expression matching the beginning of the name of dummy variables # (i.e. not used). # Don't use pylint 1.6.1 default regexp, it doesn't accept underscores in the middle dummy-variables-rgx=_|dummy [DESIGN] # Maximum number of arguments for function / method max-args=8 # Maximum number of parents for a class (see R0901). max-parents=10 # Maximum number of attributes for a class (see R0902). max-attributes=10 aioamqp-aioamqp-0.15.0/.travis.yml000066400000000000000000000013131422314215600167600ustar00rootroot00000000000000language: python dist: focal python: - 3.6 - 3.7 - 3.8 - 3.9 services: - rabbitmq env: - PYTHONASYNCIODEBUG=1 install: - pip install -r requirements_dev.txt - pip install --editable . - pip freeze addons: apt: packages: - rabbitmq-server before_script: "sudo rabbitmq-plugins enable rabbitmq_management" script: "make test && make pylint" notifications: email: true irc: channels: - chat.freenode.net#aioamqp on_success: change on_failure: always use_notice: true skip_join: true slack: secure: apz1q5tPlt/F/YMMbP/1n7DhlCgfl6TZPo+IY79lrj8ko51srq043OksWnrwiCPz0e8pXG4QPOBhnP/IkkupeH9qQ/JziN35Cn7tfCsY9Id0i35KDe6gJPZKJd9+w9fF1d+ceYMyyN86JIpyuKCta0DP2FV+wwdAmP5TJv+kh6E= aioamqp-aioamqp-0.15.0/AUTHORS.rst000066400000000000000000000011231422314215600165250ustar00rootroot00000000000000aioamqp was originally created in early 2014 at Polyconseil. AUTHORS are (and/or have been):: * Benoît Calvez * Thomas Recouvreux * Guillaume Gauvrit * Morgan Delahaye-Prat * Mathias Fröjdman * Dmitry Maslov * Matthias Urlichs * Rémi Cardona * Marco Mariani * David Honour * Igor `mastak` * Hans Lellelid * `iceboy-sjtu` * Sergio Medina Toledo * Alexander Gromyko * Nick Humrich * Pavel Kamaev * Mads Sejersen * Dave Shawley * Jacob Hagstedt P Suorra * Corey `notmeta` * Paul Wistrand * fullylegit aioamqp-aioamqp-0.15.0/CHANGES.rst000066400000000000000000000006631422314215600164600ustar00rootroot00000000000000aioamqp changelog ================= Here you can see the full list of changes between each aioamqp release. Version 0.2.0 ------------- * Remove the `asyncio.Queue` and adds a callback parameter to basic_consume * Add a `on_error` callback when creating a channel or a connection Version 0.1.1 ------------- * Add `no_wait` and `timeout` parameters to `close` method Version 0.1 ----------- * First public preview release. aioamqp-aioamqp-0.15.0/Dockerfile000066400000000000000000000001311422314215600166360ustar00rootroot00000000000000FROM python:3.9 WORKDIR /usr/src/app COPY . . RUN pip install -r requirements_dev.txt aioamqp-aioamqp-0.15.0/LICENSE000066400000000000000000000027071422314215600156640ustar00rootroot00000000000000Copyright (c) 2014-2017 aioamqp's authors (see AUTHORS.rst) Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the University nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. aioamqp-aioamqp-0.15.0/MANIFEST.in000066400000000000000000000003471422314215600164130ustar00rootroot00000000000000include README.rst include CHANGES.rst include LICENSE include AUTHORS.rst recursive-include docs * recursive-include examples *.py recursive-exclude * *.pyc recursive-exclude * *.pyo recursive-exclude * *.swp prune docs/_build aioamqp-aioamqp-0.15.0/Makefile000066400000000000000000000024061422314215600163130ustar00rootroot00000000000000.PHONY: doc test update PACKAGE = aioamqp TEST_LAUNCHER ?= pytest TEST_OPTIONS ?= -v -s --timeout=60 PYLINT_RC ?= .pylintrc BUILD_DIR ?= build INPUT_DIR ?= docs # Sphinx options (are passed to build_docs, which passes them to sphinx-build) # -W : turn warning into errors # -a : write all files # -b html : use html builder # -i [pat] : ignore pattern SPHINXOPTS ?= -a -W -b html AUTOSPHINXOPTS := -i *~ -i *.sw* -i Makefile* SPHINXBUILDDIR ?= $(BUILD_DIR)/sphinx/html ALLSPHINXOPTS ?= -d $(BUILD_DIR)/sphinx/doctrees $(SPHINXOPTS) docs doc: sphinx-build -a $(INPUT_DIR) build livehtml: docs sphinx-autobuild $(AUTOSPHINXOPTS) $(ALLSPHINXOPTS) $(SPHINXBUILDDIR) test: $(TEST_LAUNCHER) $(TEST_OPTIONS) $(PACKAGE) update: pip install --upgrade --upgrade-strategy=eager -r requirements_dev.txt pylint: pylint aioamqp ### semi-private targets used by polyconseil's CI (copy-pasted from blease) ### .PHONY: reports jenkins-test jenkins-quality reports: mkdir -p reports jenkins-test: reports $(MAKE) test TEST_OPTIONS="--cov=$(PACKAGE) \ --cov-report xml:reports/xmlcov.xml \ --junitxml=reports/TEST-$(PACKAGE).xml \ -v \ $(TEST_OPTIONS)" jenkins-quality: reports pylint --rcfile=$(PYLINT_RC) $(PACKAGE) > reports/pylint.report || true aioamqp-aioamqp-0.15.0/README.rst000066400000000000000000000031501422314215600163370ustar00rootroot00000000000000aioamqp ======= .. image:: https://badge.fury.io/py/aioamqp.svg :target: http://badge.fury.io/py/aioamqp .. image:: https://travis-ci.org/Polyconseil/aioamqp.svg?branch=master :target: https://travis-ci.org/Polyconseil/aioamqp .. image:: https://readthedocs.org/projects/aioamqp/badge/?version=latest :alt: Documentation Status :scale: 100% :target: https://aioamqp.readthedocs.org/en/latest/?badge=latest ``aioamqp`` library is a pure-Python implementation of the `AMQP 0.9.1 protocol`_. Built on top on Python's asynchronous I/O support introduced in `PEP 3156`_, it provides an API based on coroutines, making it easy to write highly concurrent applications. Bug reports, patches and suggestions welcome! Just open an issue_ or send a `pull request`_. tests ----- To run the tests, you'll need to install the Python test dependencies:: pip install -r requirements_dev.txt Tests require an instance of RabbitMQ. You can start a new instance using docker:: docker run -d --log-driver=syslog -e RABBITMQ_NODENAME=my-rabbit --name rabbitmq -p 5672:5672 -p 15672:15672 rabbitmq:3-management Then you can run the tests with ``make test``. tests using docker-compose ^^^^^^^^^^^^^^^^^^^^^^^^^^ Start RabbitMQ using ``docker-compose up -d rabbitmq``. When RabbitMQ has started, start the tests using ``docker-compose up --build aioamqp-test`` .. _AMQP 0.9.1 protocol: https://www.rabbitmq.com/amqp-0-9-1-quickref.html .. _PEP 3156: http://www.python.org/dev/peps/pep-3156/ .. _issue: https://github.com/Polyconseil/aioamqp/issues/new .. _pull request: https://github.com/Polyconseil/aioamqp/compare/ aioamqp-aioamqp-0.15.0/aioamqp/000077500000000000000000000000001422314215600163005ustar00rootroot00000000000000aioamqp-aioamqp-0.15.0/aioamqp/__init__.py000066400000000000000000000072051422314215600204150ustar00rootroot00000000000000import asyncio import socket import ssl from urllib.parse import urlparse from .exceptions import * # pylint: disable=wildcard-import from .protocol import AmqpProtocol from .version import __version__ from .version import __packagename__ async def connect(host='localhost', port=None, login='guest', password='guest', virtualhost='/', ssl=None, login_method='PLAIN', insist=False, # pylint: disable=redefined-outer-name protocol_factory=AmqpProtocol, **kwargs): """Convenient method to connect to an AMQP broker @host: the host to connect to @port: broker port @login: login @password: password @virtualhost: AMQP virtualhost to use for this connection @ssl: SSL context used for secure connections, omit for no SSL - see https://docs.python.org/3/library/ssl.html @login_method: AMQP auth method @insist: Insist on connecting to a server @protocol_factory: Factory to use, if you need to subclass AmqpProtocol @kwargs: Arguments to be given to the protocol_factory instance Returns: a tuple (transport, protocol) of an AmqpProtocol instance """ factory = lambda: protocol_factory(**kwargs) # pylint: disable=unnecessary-lambda create_connection_kwargs = {} if ssl is not None: create_connection_kwargs['ssl'] = ssl if port is None: if ssl: port = 5671 else: port = 5672 transport, protocol = await asyncio.get_running_loop().create_connection( factory, host, port, **create_connection_kwargs ) # these 2 flags *may* show up in sock.type. They are only available on linux # see https://bugs.python.org/issue21327 nonblock = getattr(socket, 'SOCK_NONBLOCK', 0) cloexec = getattr(socket, 'SOCK_CLOEXEC', 0) sock = transport.get_extra_info('socket') if sock is not None and (sock.type & ~nonblock & ~cloexec) == socket.SOCK_STREAM: sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) try: await protocol.start_connection(host, port, login, password, virtualhost, ssl=ssl, login_method=login_method, insist=insist) except Exception: await protocol.wait_closed() raise return transport, protocol async def from_url( url, login_method='PLAIN', insist=False, protocol_factory=AmqpProtocol, **kwargs): """ Connect to the AMQP using a single url parameter and return the client. For instance: amqp://user:password@hostname:port/vhost @insist: Insist on connecting to a server @protocol_factory: Factory to use, if you need to subclass AmqpProtocol @kwargs: Arguments to be given to the protocol_factory instance Returns: a tuple (transport, protocol) of an AmqpProtocol instance """ url = urlparse(url) if url.scheme not in ('amqp', 'amqps'): raise ValueError(f'Invalid protocol {url.scheme}, valid protocols are amqp or amqps') if url.scheme == 'amqps' and not kwargs.get('ssl'): kwargs['ssl'] = ssl.create_default_context() transport, protocol = await connect( host=url.hostname or 'localhost', port=url.port, login=url.username or 'guest', password=url.password or 'guest', virtualhost=(url.path[1:] if len(url.path) > 1 else '/'), login_method=login_method, insist=insist, protocol_factory=protocol_factory, **kwargs) return transport, protocol aioamqp-aioamqp-0.15.0/aioamqp/channel.py000066400000000000000000000710761422314215600202750ustar00rootroot00000000000000""" Amqp channel specification """ import asyncio import logging import uuid import io from itertools import count import warnings import pamqp.commands from . import frame as amqp_frame from . import exceptions from . import properties as amqp_properties from .envelope import Envelope, ReturnEnvelope logger = logging.getLogger(__name__) class Channel: def __init__(self, protocol, channel_id, return_callback=None): self.protocol = protocol self.channel_id = channel_id self.consumer_queues = {} self.consumer_callbacks = {} self.cancellation_callbacks = [] self.return_callback = return_callback self.response_future = None self.close_event = asyncio.Event() self.cancelled_consumers = set() self.last_consumer_tag = None self.publisher_confirms = False self.delivery_tag_iter = None # used for mapping delivered messages to publisher confirms self._exchange_declare_lock = asyncio.Lock() self._queue_bind_lock = asyncio.Lock() self._futures = {} self._ctag_events = {} def _set_waiter(self, rpc_name): if rpc_name in self._futures: raise exceptions.SynchronizationError("Waiter already exists") fut = asyncio.Future() self._futures[rpc_name] = fut return fut def _get_waiter(self, rpc_name): fut = self._futures.pop(rpc_name, None) if not fut: raise exceptions.SynchronizationError(f"Call {rpc_name} didn't set a waiter") return fut @property def is_open(self): return not self.close_event.is_set() def connection_closed(self, server_code=None, server_reason=None, exception=None): for future in self._futures.values(): if future.done(): continue if exception is None: kwargs = {} if server_code is not None: kwargs['code'] = server_code if server_reason is not None: kwargs['message'] = server_reason exception = exceptions.ChannelClosed(**kwargs) future.set_exception(exception) self.protocol.release_channel_id(self.channel_id) self.close_event.set() async def dispatch_frame(self, frame): methods = { pamqp.commands.Channel.OpenOk.name: self.open_ok, pamqp.commands.Channel.FlowOk.name: self.flow_ok, pamqp.commands.Channel.CloseOk.name: self.close_ok, pamqp.commands.Channel.Close.name: self.server_channel_close, pamqp.commands.Exchange.DeclareOk.name: self.exchange_declare_ok, pamqp.commands.Exchange.BindOk.name: self.exchange_bind_ok, pamqp.commands.Exchange.UnbindOk.name: self.exchange_unbind_ok, pamqp.commands.Exchange.DeleteOk.name: self.exchange_delete_ok, pamqp.commands.Queue.DeclareOk.name: self.queue_declare_ok, pamqp.commands.Queue.DeleteOk.name: self.queue_delete_ok, pamqp.commands.Queue.BindOk.name: self.queue_bind_ok, pamqp.commands.Queue.UnbindOk.name: self.queue_unbind_ok, pamqp.commands.Queue.PurgeOk.name: self.queue_purge_ok, pamqp.commands.Basic.QosOk.name: self.basic_qos_ok, pamqp.commands.Basic.ConsumeOk.name: self.basic_consume_ok, pamqp.commands.Basic.CancelOk.name: self.basic_cancel_ok, pamqp.commands.Basic.GetOk.name: self.basic_get_ok, pamqp.commands.Basic.GetEmpty.name: self.basic_get_empty, pamqp.commands.Basic.Deliver.name: self.basic_deliver, pamqp.commands.Basic.Cancel.name: self.server_basic_cancel, pamqp.commands.Basic.Ack.name: self.basic_server_ack, pamqp.commands.Basic.Nack.name: self.basic_server_nack, pamqp.commands.Basic.RecoverOk.name: self.basic_recover_ok, pamqp.commands.Basic.Return.name: self.basic_return, pamqp.commands.Confirm.SelectOk.name: self.confirm_select_ok, } if frame.name not in methods: raise NotImplementedError(f"Frame {frame.name} is not implemented") await methods[frame.name](frame) async def _write_frame(self, channel_id, request, check_open=True, drain=True): await self.protocol.ensure_open() if not self.is_open and check_open: raise exceptions.ChannelClosed() amqp_frame.write(self.protocol._stream_writer, channel_id, request) if drain: await self.protocol._drain() async def _write_frame_awaiting_response(self, waiter_id, channel_id, request, no_wait, check_open=True, drain=True): '''Write a frame and set a waiter for the response (unless no_wait is set)''' if no_wait: await self._write_frame(channel_id, request, check_open=check_open, drain=drain) return None f = self._set_waiter(waiter_id) try: await self._write_frame(channel_id, request, check_open=check_open, drain=drain) except Exception: self._get_waiter(waiter_id) f.cancel() raise return (await f) # ## Channel class implementation # async def open(self): """Open the channel on the server.""" request = pamqp.commands.Channel.Open() return (await self._write_frame_awaiting_response( 'open', self.channel_id, request, no_wait=False, check_open=False)) async def open_ok(self, frame): self.close_event.clear() fut = self._get_waiter('open') fut.set_result(True) logger.debug("Channel is open") async def close(self, reply_code=0, reply_text="Normal Shutdown"): """Close the channel.""" if not self.is_open: raise exceptions.ChannelClosed("channel already closed or closing") self.close_event.set() request = pamqp.commands.Channel.Close(reply_code, reply_text, class_id=0, method_id=0) return (await self._write_frame_awaiting_response( 'close', self.channel_id, request, no_wait=False, check_open=False)) async def close_ok(self, frame): self._get_waiter('close').set_result(True) logger.info("Channel closed") self.protocol.release_channel_id(self.channel_id) async def _send_channel_close_ok(self): request = pamqp.commands.Channel.CloseOk() await self._write_frame(self.channel_id, request) async def server_channel_close(self, frame): await self._send_channel_close_ok() results = { 'reply_code': frame.reply_code, 'reply_text': frame.reply_text, 'class_id': frame.class_id, 'method_id': frame.method_id, } self.connection_closed(results['reply_code'], results['reply_text']) async def flow(self, active): request = pamqp.commands.Channel.Flow(active) return (await self._write_frame_awaiting_response( 'flow', self.channel_id, request, no_wait=False, check_open=False)) async def flow_ok(self, frame): self.close_event.clear() fut = self._get_waiter('flow') fut.set_result({'active': frame.active}) logger.debug("Flow ok") # ## Exchange class implementation # async def exchange_declare(self, exchange_name, type_name, passive=False, durable=False, auto_delete=False, no_wait=False, arguments=None): request = pamqp.commands.Exchange.Declare( exchange=exchange_name, exchange_type=type_name, passive=passive, durable=durable, auto_delete=auto_delete, nowait=no_wait, arguments=arguments ) async with self._exchange_declare_lock: return (await self._write_frame_awaiting_response( 'exchange_declare', self.channel_id, request, no_wait)) async def exchange_declare_ok(self, frame): future = self._get_waiter('exchange_declare') future.set_result(True) logger.debug("Exchange declared") return future async def exchange_delete(self, exchange_name, if_unused=False, no_wait=False): request = pamqp.commands.Exchange.Delete(exchange=exchange_name, if_unused=if_unused, nowait=no_wait) return await self._write_frame_awaiting_response( 'exchange_delete', self.channel_id, request, no_wait) async def exchange_delete_ok(self, frame): future = self._get_waiter('exchange_delete') future.set_result(True) logger.debug("Exchange deleted") async def exchange_bind(self, exchange_destination, exchange_source, routing_key, no_wait=False, arguments=None): if arguments is None: arguments = {} request = pamqp.commands.Exchange.Bind( destination=exchange_destination, source=exchange_source, routing_key=routing_key, nowait=no_wait, arguments=arguments ) return (await self._write_frame_awaiting_response( 'exchange_bind', self.channel_id, request, no_wait)) async def exchange_bind_ok(self, frame): future = self._get_waiter('exchange_bind') future.set_result(True) logger.debug("Exchange bound") async def exchange_unbind(self, exchange_destination, exchange_source, routing_key, no_wait=False, arguments=None): if arguments is None: arguments = {} request = pamqp.commands.Exchange.Unbind( destination=exchange_destination, source=exchange_source, routing_key=routing_key, nowait=no_wait, arguments=arguments, ) return (await self._write_frame_awaiting_response( 'exchange_unbind', self.channel_id, request, no_wait)) async def exchange_unbind_ok(self, frame): future = self._get_waiter('exchange_unbind') future.set_result(True) logger.debug("Exchange bound") # ## Queue class implementation # async def queue_declare(self, queue_name=None, passive=False, durable=False, exclusive=False, auto_delete=False, no_wait=False, arguments=None): """Create or check a queue on the broker Args: queue_name: str, the queue to receive message from. The server generate a queue_name if not specified. passive: bool, if set, the server will reply with Declare-Ok if the queue already exists with the same name, and raise an error if not. Checks for the same parameter as well. durable: bool: If set when creating a new queue, the queue will be marked as durable. Durable queues remain active when a server restarts. exclusive: bool, request exclusive consumer access, meaning only this consumer can access the queue no_wait: bool, if set, the server will not respond to the method arguments: dict, AMQP arguments to be passed when creating the queue. """ if arguments is None: arguments = {} if not queue_name: queue_name = 'aioamqp.gen-' + str(uuid.uuid4()) request = pamqp.commands.Queue.Declare( queue=queue_name, passive=passive, durable=durable, exclusive=exclusive, auto_delete=auto_delete, nowait=no_wait, arguments=arguments ) return (await self._write_frame_awaiting_response( 'queue_declare' + queue_name, self.channel_id, request, no_wait)) async def queue_declare_ok(self, frame): results = { 'queue': frame.queue, 'message_count': frame.message_count, 'consumer_count': frame.consumer_count, } future = self._get_waiter('queue_declare' + results['queue']) future.set_result(results) logger.debug("Queue declared") async def queue_delete(self, queue_name, if_unused=False, if_empty=False, no_wait=False): """Delete a queue in RabbitMQ Args: queue_name: str, the queue to receive message from if_unused: bool, the queue is deleted if it has no consumers. Raise if not. if_empty: bool, the queue is deleted if it has no messages. Raise if not. no_wait: bool, if set, the server will not respond to the method """ request = pamqp.commands.Queue.Delete( queue=queue_name, if_unused=if_unused, if_empty=if_empty, nowait=no_wait ) return (await self._write_frame_awaiting_response( 'queue_delete', self.channel_id, request, no_wait)) async def queue_delete_ok(self, frame): future = self._get_waiter('queue_delete') future.set_result(True) logger.debug("Queue deleted") async def queue_bind(self, queue_name, exchange_name, routing_key, no_wait=False, arguments=None): """Bind a queue and a channel.""" if arguments is None: arguments = {} request = pamqp.commands.Queue.Bind( queue=queue_name, exchange=exchange_name, routing_key=routing_key, nowait=no_wait, arguments=arguments ) # short reserved-1 async with self._queue_bind_lock: return (await self._write_frame_awaiting_response( 'queue_bind', self.channel_id, request, no_wait)) async def queue_bind_ok(self, frame): future = self._get_waiter('queue_bind') future.set_result(True) logger.debug("Queue bound") async def queue_unbind(self, queue_name, exchange_name, routing_key, arguments=None): if arguments is None: arguments = {} request = pamqp.commands.Queue.Unbind( queue=queue_name, exchange=exchange_name, routing_key=routing_key, arguments=arguments ) return (await self._write_frame_awaiting_response( 'queue_unbind', self.channel_id, request, no_wait=False)) async def queue_unbind_ok(self, frame): future = self._get_waiter('queue_unbind') future.set_result(True) logger.debug("Queue unbound") async def queue_purge(self, queue_name, no_wait=False): request = pamqp.commands.Queue.Purge( queue=queue_name, nowait=no_wait ) return (await self._write_frame_awaiting_response( 'queue_purge', self.channel_id, request, no_wait=no_wait)) async def queue_purge_ok(self, frame): future = self._get_waiter('queue_purge') future.set_result({'message_count': frame.message_count}) # ## Basic class implementation # async def basic_publish(self, payload, exchange_name, routing_key, properties=None, mandatory=False, immediate=False): if not isinstance(payload, (bytes, bytearray, memoryview)): raise ValueError('payload must be bytes type') if properties is None: properties = {} method_request = pamqp.commands.Basic.Publish( exchange=exchange_name, routing_key=routing_key, mandatory=mandatory, immediate=immediate ) await self._write_frame(self.channel_id, method_request, drain=False) header_request = pamqp.header.ContentHeader( body_size=len(payload), properties=pamqp.commands.Basic.Properties(**properties) ) await self._write_frame(self.channel_id, header_request, drain=False) # split the payload frame_max = self.protocol.server_frame_max or len(payload) for chunk in (payload[0+i:frame_max+i] for i in range(0, len(payload), frame_max)): content_request = pamqp.body.ContentBody(chunk) await self._write_frame(self.channel_id, content_request, drain=False) await self.protocol._drain() async def basic_qos(self, prefetch_size=0, prefetch_count=0, connection_global=False): """Specifies quality of service. Args: prefetch_size: int, request that messages be sent in advance so that when the client finishes processing a message, the following message is already held locally prefetch_count: int: Specifies a prefetch window in terms of whole messages. This field may be used in combination with the prefetch-size field; a message will only be sent in advance if both prefetch windows (and those at the channel and connection level) allow it connection_global: bool: global=false means that the QoS settings should apply per-consumer channel; and global=true to mean that the QoS settings should apply per-channel. """ request = pamqp.commands.Basic.Qos( prefetch_size, prefetch_count, connection_global ) return (await self._write_frame_awaiting_response( 'basic_qos', self.channel_id, request, no_wait=False) ) async def basic_qos_ok(self, frame): future = self._get_waiter('basic_qos') future.set_result(True) logger.debug("Qos ok") async def basic_server_nack(self, frame, delivery_tag=None): if delivery_tag is None: delivery_tag = frame.delivery_tag fut = self._get_waiter(f'basic_server_ack_{delivery_tag}') logger.debug('Received nack for delivery tag %r', delivery_tag) fut.set_exception(exceptions.PublishFailed(delivery_tag)) async def basic_consume(self, callback, queue_name='', consumer_tag='', no_local=False, no_ack=False, exclusive=False, no_wait=False, arguments=None): """Starts the consumption of message into a queue. the callback will be called each time we're receiving a message. Args: callback: coroutine, the called callback queue_name: str, the queue to receive message from consumer_tag: str, optional consumer tag no_local: bool, if set the server will not send messages to the connection that published them. no_ack: bool, if set the server does not expect acknowledgements for messages exclusive: bool, request exclusive consumer access, meaning only this consumer can access the queue no_wait: bool, if set, the server will not respond to the method arguments: dict, AMQP arguments to be passed to the server """ # If a consumer tag was not passed, create one consumer_tag = consumer_tag or f'ctag{self.channel_id}.{uuid.uuid4().hex}' if arguments is None: arguments = {} request = pamqp.commands.Basic.Consume( queue=queue_name, consumer_tag=consumer_tag, no_local=no_local, no_ack=no_ack, exclusive=exclusive, nowait=no_wait, arguments=arguments ) self.consumer_callbacks[consumer_tag] = callback self.last_consumer_tag = consumer_tag return_value = await self._write_frame_awaiting_response( 'basic_consume' + consumer_tag, self.channel_id, request, no_wait) if no_wait: return_value = {'consumer_tag': consumer_tag} else: self._ctag_events[consumer_tag].set() return return_value async def basic_consume_ok(self, frame): ctag = frame.consumer_tag results = { 'consumer_tag': ctag, } future = self._get_waiter('basic_consume' + ctag) future.set_result(results) self._ctag_events[ctag] = asyncio.Event() async def basic_deliver(self, frame): consumer_tag = frame.consumer_tag delivery_tag = frame.delivery_tag is_redeliver = frame.redelivered exchange_name = frame.exchange routing_key = frame.routing_key _channel, content_header_frame = await self.protocol.get_frame() buffer = io.BytesIO() while(buffer.tell() < content_header_frame.body_size): _channel, content_body_frame = await self.protocol.get_frame() buffer.write(content_body_frame.value) body = buffer.getvalue() envelope = Envelope(consumer_tag, delivery_tag, exchange_name, routing_key, is_redeliver) properties = amqp_properties.from_pamqp(content_header_frame.properties) callback = self.consumer_callbacks[consumer_tag] event = self._ctag_events.get(consumer_tag) if event: await event.wait() del self._ctag_events[consumer_tag] await callback(self, body, envelope, properties) async def server_basic_cancel(self, frame): # https://www.rabbitmq.com/consumer-cancel.html consumer_tag = frame.consumer_tag _no_wait = frame.nowait self.cancelled_consumers.add(consumer_tag) logger.info("consume cancelled received") for callback in self.cancellation_callbacks: try: await callback(self, consumer_tag) except Exception as error: # pylint: disable=broad-except logger.error("cancellation callback %r raised exception %r", callback, error) async def basic_cancel(self, consumer_tag, no_wait=False): request = pamqp.commands.Basic.Cancel(consumer_tag, no_wait) return (await self._write_frame_awaiting_response( 'basic_cancel' + consumer_tag, self.channel_id, request, no_wait=no_wait) ) async def basic_cancel_ok(self, frame): results = { 'consumer_tag': frame.consumer_tag, } future = self._get_waiter('basic_cancel' + frame.consumer_tag) future.set_result(results) logger.debug("Cancel ok") async def basic_get(self, queue_name='', no_ack=False): request = pamqp.commands.Basic.Get(queue=queue_name, no_ack=no_ack) return (await self._write_frame_awaiting_response( 'basic_get', self.channel_id, request, no_wait=False) ) async def basic_get_ok(self, frame): data = { 'delivery_tag': frame.delivery_tag, 'redelivered': frame.redelivered, 'exchange_name': frame.exchange, 'routing_key': frame.routing_key, 'message_count': frame.message_count, } _channel, content_header_frame = await self.protocol.get_frame() buffer = io.BytesIO() while(buffer.tell() < content_header_frame.body_size): _channel, content_body_frame = await self.protocol.get_frame() buffer.write(content_body_frame.value) data['message'] = buffer.getvalue() data['properties'] = amqp_properties.from_pamqp(content_header_frame.properties) future = self._get_waiter('basic_get') future.set_result(data) async def basic_get_empty(self, frame): future = self._get_waiter('basic_get') future.set_exception(exceptions.EmptyQueue) async def basic_client_ack(self, delivery_tag, multiple=False): request = pamqp.commands.Basic.Ack(delivery_tag, multiple) await self._write_frame(self.channel_id, request) async def basic_client_nack(self, delivery_tag, multiple=False, requeue=True): request = pamqp.commands.Basic.Nack(delivery_tag, multiple, requeue) await self._write_frame(self.channel_id, request) async def basic_server_ack(self, frame): delivery_tag = frame.delivery_tag fut = self._get_waiter(f'basic_server_ack_{delivery_tag}') logger.debug('Received ack for delivery tag %s', delivery_tag) fut.set_result(True) async def basic_reject(self, delivery_tag, requeue=False): request = pamqp.commands.Basic.Reject(delivery_tag, requeue) await self._write_frame(self.channel_id, request) async def basic_recover_async(self, requeue=True): request = pamqp.commands.Basic.RecoverAsync(requeue) await self._write_frame(self.channel_id, request) async def basic_recover(self, requeue=True): request = pamqp.commands.Basic.Recover(requeue) return (await self._write_frame_awaiting_response( 'basic_recover', self.channel_id, request, no_wait=False) ) async def basic_recover_ok(self, frame): future = self._get_waiter('basic_recover') future.set_result(True) logger.debug("Cancel ok") async def basic_return(self, frame): reply_code = frame.reply_code reply_text = frame.reply_text exchange_name = frame.exchange routing_key = frame.routing_key _channel, content_header_frame = await self.protocol.get_frame() buffer = io.BytesIO() while(buffer.tell() < content_header_frame.body_size): _channel, content_body_frame = await self.protocol.get_frame() buffer.write(content_body_frame.value) body = buffer.getvalue() envelope = ReturnEnvelope(reply_code, reply_text, exchange_name, routing_key) properties = amqp_properties.from_pamqp(content_header_frame.properties) callback = self.return_callback if callback is None: # they have set mandatory bit, but havent added a callback logger.warning('You have received a returned message, but dont have a callback registered for returns.' ' Please set channel.return_callback') else: await callback(self, body, envelope, properties) # ## convenient aliases # queue = queue_declare exchange = exchange_declare async def publish(self, payload, exchange_name, routing_key, properties=None, mandatory=False, immediate=False): if isinstance(payload, str): warnings.warn("Str payload support will be removed in next release", DeprecationWarning) payload = payload.encode() if properties is None: properties = {} if self.publisher_confirms: delivery_tag = next(self.delivery_tag_iter) # pylint: disable=stop-iteration-return fut = self._set_waiter(f'basic_server_ack_{delivery_tag}') method_request = pamqp.commands.Basic.Publish( exchange=exchange_name, routing_key=routing_key, mandatory=mandatory, immediate=immediate ) await self._write_frame(self.channel_id, method_request, drain=False) properties = pamqp.commands.Basic.Properties(**properties) header_request = pamqp.header.ContentHeader( body_size=len(payload), properties=properties ) await self._write_frame(self.channel_id, header_request, drain=False) # split the payload frame_max = self.protocol.server_frame_max or len(payload) for chunk in (payload[0+i:frame_max+i] for i in range(0, len(payload), frame_max)): content_request = pamqp.body.ContentBody(chunk) await self._write_frame(self.channel_id, content_request, drain=False) await self.protocol._drain() if self.publisher_confirms: await fut async def confirm_select(self, *, no_wait=False): if self.publisher_confirms: raise ValueError('publisher confirms already enabled') request = pamqp.commands.Confirm.Select(nowait=no_wait) return (await self._write_frame_awaiting_response( 'confirm_select', self.channel_id, request, no_wait) ) async def confirm_select_ok(self, frame): self.publisher_confirms = True self.delivery_tag_iter = count(1) fut = self._get_waiter('confirm_select') fut.set_result(True) logger.debug("Confirm selected") def add_cancellation_callback(self, callback): """Add a callback that is invoked when a consumer is cancelled. :param callback: function to call `callback` is called with the channel and consumer tag as positional parameters. The callback can be either a plain callable or an asynchronous co-routine. """ self.cancellation_callbacks.append(callback) aioamqp-aioamqp-0.15.0/aioamqp/constants.py000066400000000000000000000042561422314215600206750ustar00rootroot00000000000000""" Defines our constants """ PROTOCOL_DEFAULT_TIMEOUT = 60 PROTOCOL_DEFAULT_PORT = 5672 PROTOCOL_HEADER = b'AMQP\x01\x01\x00\x09' MAX_CHANNELS = 65535 # protocol TYPE_METHOD = 1 TYPE_HEADER = 2 TYPE_BODY = 3 TYPE_HEARTBEAT = 8 FRAME_END = b'\xce' # classes CLASS_CONNECTION = 10 CLASS_CHANNEL = 20 CLASS_EXCHANGE = 40 CLASS_QUEUE = 50 CLASS_BASIC = 60 CLASS_TX = 90 CLASS_CONFIRM = 85 CONNECTION_START = 10 CONNECTION_START_OK = 11 CONNECTION_SECURE = 20 CONNECTION_SECURE_OK = 21 CONNECTION_TUNE = 30 CONNECTION_TUNE_OK = 31 CONNECTION_OPEN = 40 CONNECTION_OPEN_OK = 41 CONNECTION_CLOSE = 50 CONNECTION_CLOSE_OK = 51 CHANNEL_OPEN = 10 CHANNEL_OPEN_OK = 11 CHANNEL_FLOW = 20 CHANNEL_FLOW_OK = 21 CHANNEL_CLOSE = 40 CHANNEL_CLOSE_OK = 41 EXCHANGE_DECLARE = 10 EXCHANGE_DECLARE_OK = 11 EXCHANGE_DELETE = 20 EXCHANGE_DELETE_OK = 21 EXCHANGE_BIND = 30 EXCHANGE_BIND_OK = 31 EXCHANGE_UNBIND = 40 EXCHANGE_UNBIND_OK = 51 QUEUE_DECLARE = 10 QUEUE_DECLARE_OK = 11 QUEUE_BIND = 20 QUEUE_BIND_OK = 21 QUEUE_UNBIND = 50 QUEUE_UNBIND_OK = 51 QUEUE_PURGE = 30 QUEUE_PURGE_OK = 31 QUEUE_DELETE = 40 QUEUE_DELETE_OK = 41 BASIC_QOS = 10 BASIC_QOS_OK = 11 BASIC_CONSUME = 20 BASIC_CONSUME_OK = 21 BASIC_CANCEL = 30 BASIC_CANCEL_OK = 31 BASIC_PUBLISH = 40 BASIC_RETURN = 50 BASIC_DELIVER = 60 BASIC_GET = 70 BASIC_GET_OK = 71 BASIC_GET_EMPTY = 72 BASIC_ACK = 80 BASIC_REJECT = 90 BASIC_RECOVER_ASYNC = 100 BASIC_RECOVER = 110 BASIC_RECOVER_OK = 111 BASIC_NACK = 120 TX_SELECT = 10 TX_SELECT_OK = 11 TX_COMMIT = 20 TX_COMMIT_OK = 21 TX_ROLLBACK = 30 TX_ROLLBACK_OK = 31 CONFIRM_SELECT = 10 CONFIRM_SELECT_OK = 11 MESSAGE_PROPERTIES = ( 'content_type', 'content_encoding', 'headers', 'delivery_mode', 'priority', 'correlation_id', 'reply_to', 'expiration', 'message_id', 'timestamp', 'message_type', 'user_id', 'app_id', 'cluster_id', ) FLAG_CONTENT_TYPE = (1 << 15) FLAG_CONTENT_ENCODING = (1 << 14) FLAG_HEADERS = (1 << 13) FLAG_DELIVERY_MODE = (1 << 12) FLAG_PRIORITY = (1 << 11) FLAG_CORRELATION_ID = (1 << 10) FLAG_REPLY_TO = (1 << 9) FLAG_EXPIRATION = (1 << 8) FLAG_MESSAGE_ID = (1 << 7) FLAG_TIMESTAMP = (1 << 6) FLAG_TYPE = (1 << 5) FLAG_USER_ID = (1 << 4) FLAG_APP_ID = (1 << 3) FLAG_CLUSTER_ID = (1 << 2) aioamqp-aioamqp-0.15.0/aioamqp/envelope.py000066400000000000000000000015751422314215600204770ustar00rootroot00000000000000""" Delivery of messages to consumers """ class Envelope: """Class for basic deliver message fields""" __slots__ = ('consumer_tag', 'delivery_tag', 'exchange_name', 'routing_key', 'is_redeliver') def __init__(self, consumer_tag, delivery_tag, exchange_name, routing_key, is_redeliver): self.consumer_tag = consumer_tag self.delivery_tag = delivery_tag self.exchange_name = exchange_name self.routing_key = routing_key self.is_redeliver = is_redeliver class ReturnEnvelope: """ Class for basic return message fields""" __slots__ = ('reply_code', 'reply_text', 'exchange_name', 'routing_key') def __init__(self, reply_code, reply_text, exchange_name, routing_key): self.reply_code = reply_code self.reply_text = reply_text self.exchange_name = exchange_name self.routing_key = routing_key aioamqp-aioamqp-0.15.0/aioamqp/exceptions.py000066400000000000000000000023251422314215600210350ustar00rootroot00000000000000""" aioamqp exceptions """ class AioamqpException(Exception): pass class AmqpClosedConnection(AioamqpException): pass class SynchronizationError(AioamqpException): pass class EmptyQueue(AioamqpException): pass class NoChannelAvailable(AioamqpException): """There is no room left for more channels""" class ChannelClosed(AioamqpException): def __init__(self, code=0, message='Channel is closed'): super().__init__(code, message) self.code = code self.message = message class DuplicateConsumerTag(AioamqpException): def __repr__(self): # pylint: disable=unsubscriptable-object return (f'The consumer tag specified already exists for this ' f'channel: {self.args[0]}') class ConsumerCancelled(AioamqpException): def __repr__(self): # pylint: disable=unsubscriptable-object return (f'The consumer {self.args[0]} has been cancelled') class PublishFailed(AioamqpException): def __init__(self, delivery_tag): super().__init__(delivery_tag) self.delivery_tag = delivery_tag def __repr__(self): return f'Publish failed because a nack was received for delivery_tag {self.delivery_tag}' aioamqp-aioamqp-0.15.0/aioamqp/frame.py000066400000000000000000000054771422314215600177610ustar00rootroot00000000000000""" Helper class to decode AMQP responses AMQP Frame implementations 0 1 3 7 size+7 size+8 +------+---------+---------+ +-------------+ +-----------+ | type | channel | size | | payload | | frame-end | +------+---------+---------+ +-------------+ +-----------+ octets short long 'size' octets octet The frame-end octet MUST always be the hexadecimal value %xCE type: Type = 1, "METHOD": method frame. Type = 2, "HEADER": content header frame. Type = 3, "BODY": content body frame. Type = 4, "HEARTBEAT": heartbeat frame. Method Payload 0 2 4 +----------+-----------+-------------- - - | class-id | method-id | arguments... +----------+-----------+-------------- - - short short ... Content Payload 0 2 4 12 14 +----------+--------+-----------+----------------+------------- - - | class-id | weight | body size | property flags | property list... +----------+--------+-----------+----------------+------------- - - short short long long short remainder... """ import asyncio import pamqp.encode import pamqp.frame from . import exceptions from . import constants as amqp_constants DUMP_FRAMES = False def write(writer, channel, encoder): """Writes the built frame from the encoder writer: asyncio StreamWriter channel: amqp Channel identifier encoder: frame encoder from pamqp which can be marshalled Returns int, the number of bytes written. """ return writer.write(pamqp.frame.marshal(encoder, channel)) async def read(reader): """Read a new frame from the wire reader: asyncio StreamReader Returns (channel, frame) a tuple containing both channel and the pamqp frame, the object describing the frame """ if not reader: raise exceptions.AmqpClosedConnection() try: data = await reader.readexactly(7) except (asyncio.IncompleteReadError, OSError) as ex: raise exceptions.AmqpClosedConnection() from ex frame_type, channel, frame_length = pamqp.frame.frame_parts(data) payload_data = await reader.readexactly(frame_length) frame = None if frame_type == amqp_constants.TYPE_METHOD: frame = pamqp.frame._unmarshal_method_frame(payload_data) elif frame_type == amqp_constants.TYPE_HEADER: frame = pamqp.frame._unmarshal_header_frame(payload_data) elif frame_type == amqp_constants.TYPE_BODY: frame = pamqp.frame._unmarshal_body_frame(payload_data) elif frame_type == amqp_constants.TYPE_HEARTBEAT: frame = pamqp.heartbeat.Heartbeat() frame_end = await reader.readexactly(1) assert frame_end == amqp_constants.FRAME_END return channel, frame aioamqp-aioamqp-0.15.0/aioamqp/properties.py000066400000000000000000000035131422314215600210500ustar00rootroot00000000000000# pylint: disable=redefined-builtin import datetime from .constants import MESSAGE_PROPERTIES class Properties: """Class for basic message properties""" __slots__ = tuple(MESSAGE_PROPERTIES) def __init__( self, content_type=None, content_encoding=None, headers=None, delivery_mode=None, priority=None, correlation_id=None, reply_to=None, expiration=None, message_id=None, timestamp=None, message_type=None, user_id=None, app_id=None, cluster_id=None): self.content_type = content_type self.content_encoding = content_encoding self.headers = headers self.delivery_mode = delivery_mode self.priority = priority self.correlation_id = correlation_id self.reply_to = reply_to self.expiration = expiration self.message_id = message_id self.timestamp = timestamp self.message_type = message_type self.user_id = user_id self.app_id = app_id self.cluster_id = cluster_id def from_pamqp(instance): props = Properties() props.content_type = instance.content_type props.content_encoding = instance.content_encoding props.headers = instance.headers props.delivery_mode = instance.delivery_mode props.priority = instance.priority props.correlation_id = instance.correlation_id props.reply_to = instance.reply_to props.expiration = instance.expiration props.message_id = instance.message_id if instance.timestamp is not None: # pamqp uses naive datetimes representing UTC, let's use TZ-aware datetimes props.timestamp = instance.timestamp.replace(tzinfo=datetime.timezone.utc) props.message_type = instance.message_type props.user_id = instance.user_id props.app_id = instance.app_id props.cluster_id = instance.cluster_id return props aioamqp-aioamqp-0.15.0/aioamqp/protocol.py000066400000000000000000000430521422314215600205170ustar00rootroot00000000000000""" Amqp Protocol """ import asyncio import logging import pamqp.commands import pamqp.frame import pamqp.heartbeat from . import channel as amqp_channel from . import constants as amqp_constants from . import frame as amqp_frame from . import exceptions from . import version logger = logging.getLogger(__name__) CONNECTING, OPEN, CLOSING, CLOSED = range(4) class _StreamWriter(asyncio.StreamWriter): def write(self, data): super().write(data) self._protocol._heartbeat_timer_send_reset() def writelines(self, data): super().writelines(data) self._protocol._heartbeat_timer_send_reset() def write_eof(self): ret = super().write_eof() self._protocol._heartbeat_timer_send_reset() return ret class AmqpProtocol(asyncio.StreamReaderProtocol): """The AMQP protocol for asyncio. See http://docs.python.org/3.4/library/asyncio-protocol.html#protocols for more information on asyncio's protocol API. """ CHANNEL_FACTORY = amqp_channel.Channel def __init__(self, *args, **kwargs): """Defines our new protocol instance Args: channel_max: int, specifies highest channel number that the server permits. Usable channel numbers are in the range 1..channel-max. Zero indicates no specified limit. frame_max: int, the largest frame size that the server proposes for the connection, including frame header and end-byte. The client can negotiate a lower value. Zero means that the server does not impose any specific limit but may reject very large frames if it cannot allocate resources for them. heartbeat: int, the delay, in seconds, of the connection heartbeat that the server wants. Zero means the server does not want a heartbeat. client_properties: dict, client-props to tune the client identification """ self._reader = asyncio.StreamReader() super().__init__(self._reader) self._on_error_callback = kwargs.get('on_error') self.client_properties = kwargs.get('client_properties', {}) self.connection_tunning = {} if 'channel_max' in kwargs: self.connection_tunning['channel_max'] = kwargs.get('channel_max') if 'frame_max' in kwargs: self.connection_tunning['frame_max'] = kwargs.get('frame_max') if 'heartbeat' in kwargs: self.connection_tunning['heartbeat'] = kwargs.get('heartbeat') self.connecting = asyncio.Future() self.connection_closed = asyncio.Event() self.stop_now = asyncio.Event() self.state = CONNECTING self.version_major = None self.version_minor = None self.server_properties = None self.server_mechanisms = None self.server_locales = None self.worker = None self.server_heartbeat = None self._heartbeat_last_recv = None self._heartbeat_last_send = None self._heartbeat_recv_worker = None self._heartbeat_send_worker = None self.channels = {} self.server_frame_max = None self.server_channel_max = None self.channels_ids_ceil = 0 self.channels_ids_free = set() self._drain_lock = asyncio.Lock() def connection_made(self, transport): super().connection_made(transport) self._stream_writer = _StreamWriter(transport, self, self._stream_reader, loop=asyncio.get_running_loop()) def eof_received(self): super().eof_received() # Python 3.5+ started returning True here to keep the transport open. # We really couldn't care less so keep the behavior from 3.4 to make # sure connection_lost() is called. return False def connection_lost(self, exc): if exc is not None: logger.warning("Connection lost exc=%r", exc) self.connection_closed.set() self.state = CLOSED self._close_channels(exception=exc) self._heartbeat_stop() super().connection_lost(exc) def data_received(self, data): self._heartbeat_timer_recv_reset() super().data_received(data) async def ensure_open(self): # Raise a suitable exception if the connection isn't open. # Handle cases from the most common to the least common. if self.state == OPEN: return if self.state == CLOSED: raise exceptions.AmqpClosedConnection() # If the closing handshake is in progress, let it complete. if self.state == CLOSING: await self.wait_closed() raise exceptions.AmqpClosedConnection() # Control may only reach this point in buggy third-party subclasses. assert self.state == CONNECTING raise exceptions.AioamqpException("connection isn't established yet.") async def _drain(self): async with self._drain_lock: # drain() cannot be called concurrently by multiple coroutines: # http://bugs.python.org/issue29930. Remove this lock when no # version of Python where this bugs exists is supported anymore. await self._stream_writer.drain() async def _write_frame(self, channel_id, request, drain=True): amqp_frame.write(self._stream_writer, channel_id, request) if drain: await self._drain() async def close(self, no_wait=False, timeout=None): """Close connection (and all channels)""" await self.ensure_open() self.state = CLOSING request = pamqp.commands.Connection.Close( reply_code=0, reply_text='', class_id=0, method_id=0 ) await self._write_frame(0, request) if not no_wait: await self.wait_closed(timeout=timeout) async def wait_closed(self, timeout=None): await asyncio.wait_for(self.connection_closed.wait(), timeout=timeout) if self._heartbeat_send_worker is not None: try: await asyncio.wait_for(self._heartbeat_send_worker, timeout=timeout) except asyncio.CancelledError: pass async def start_connection(self, host, port, login, password, virtualhost, ssl=False, login_method='PLAIN', insist=False): """Initiate a connection at the protocol level We send `PROTOCOL_HEADER' """ if login_method != 'PLAIN': logger.warning('login_method %s is not supported, falling back to PLAIN', login_method) self._stream_writer.write(amqp_constants.PROTOCOL_HEADER) # Wait 'start' method from the server await self.dispatch_frame() client_properties = { 'capabilities': { 'consumer_cancel_notify': True, 'connection.blocked': False, }, 'copyright': 'BSD', 'product': version.__package__, 'product_version': version.__version__, } client_properties.update(self.client_properties) auth = { 'LOGIN': login, 'PASSWORD': password, } # waiting reply start with credentions and co await self.start_ok(client_properties, 'PLAIN', auth, self.server_locales) # wait for a "tune" reponse await self.dispatch_frame() tune_ok = { 'channel_max': self.connection_tunning.get('channel_max', self.server_channel_max), 'frame_max': self.connection_tunning.get('frame_max', self.server_frame_max), 'heartbeat': self.connection_tunning.get('heartbeat', self.server_heartbeat), } # "tune" the connexion with max channel, max frame, heartbeat await self.tune_ok(**tune_ok) # update connection tunning values self.server_frame_max = tune_ok['frame_max'] self.server_channel_max = tune_ok['channel_max'] self.server_heartbeat = tune_ok['heartbeat'] if self.server_heartbeat > 0: self._heartbeat_timer_recv_reset() self._heartbeat_timer_send_reset() # open a virtualhost await self.open(virtualhost, capabilities='', insist=insist) # wait for open-ok channel, frame = await self.get_frame() await self.dispatch_frame(channel, frame) await self.ensure_open() # for now, we read server's responses asynchronously self.worker = asyncio.ensure_future(self.run()) async def get_frame(self): """Read the frame, and only decode its header """ return await amqp_frame.read(self._stream_reader) async def dispatch_frame(self, frame_channel=None, frame=None): """Dispatch the received frame to the corresponding handler""" method_dispatch = { pamqp.commands.Connection.Close.name: self.server_close, pamqp.commands.Connection.CloseOk.name: self.close_ok, pamqp.commands.Connection.Tune.name: self.tune, pamqp.commands.Connection.Start.name: self.start, pamqp.commands.Connection.OpenOk.name: self.open_ok, } if frame_channel is None and frame is None: frame_channel, frame = await self.get_frame() if isinstance(frame, pamqp.heartbeat.Heartbeat): return if frame_channel != 0: channel = self.channels.get(frame_channel) if channel is not None: await channel.dispatch_frame(frame) else: logger.info("Unknown channel %s", frame_channel) return if frame.name not in method_dispatch: logger.info("frame %s is not handled", frame.name) return await method_dispatch[frame.name](frame) def release_channel_id(self, channel_id): """Called from the channel instance, it relase a previously used channel_id """ self.channels_ids_free.add(channel_id) @property def channels_ids_count(self): return self.channels_ids_ceil - len(self.channels_ids_free) def _close_channels(self, reply_code=None, reply_text=None, exception=None): """Cleanly close channels Args: reply_code: int, the amqp error code reply_text: str, the text associated to the error_code exc: the exception responsible of this error """ if exception is None: exception = exceptions.ChannelClosed(reply_code, reply_text) if self._on_error_callback: if asyncio.iscoroutinefunction(self._on_error_callback): asyncio.ensure_future(self._on_error_callback(exception)) else: self._on_error_callback(exceptions.ChannelClosed(exception)) for channel in self.channels.values(): channel.connection_closed(reply_code, reply_text, exception) async def run(self): while not self.stop_now.is_set(): try: await self.dispatch_frame() except exceptions.AmqpClosedConnection as exc: logger.info("Close connection") self.stop_now.set() self._close_channels(exception=exc) except Exception: # pylint: disable=broad-except logger.exception('error on dispatch') async def heartbeat(self): """ deprecated heartbeat coroutine This coroutine is now a no-op as the heartbeat is handled directly by the rest of the AmqpProtocol class. This is kept around for backwards compatibility purposes only. """ await self.stop_now.wait() async def send_heartbeat(self): """Sends an heartbeat message. It can be an ack for the server or the client willing to check for the connexion timeout """ request = pamqp.heartbeat.Heartbeat() await self._write_frame(0, request) def _heartbeat_timer_recv_reset(self): if self.server_heartbeat is None or self.server_heartbeat == 0: return self._heartbeat_last_recv = asyncio.get_running_loop().time() if self._heartbeat_recv_worker is None: self._heartbeat_recv_worker = asyncio.ensure_future(self._heartbeat_recv()) def _heartbeat_timer_send_reset(self): if self.server_heartbeat is None or self.server_heartbeat == 0: return self._heartbeat_last_send = asyncio.get_running_loop().time() if self._heartbeat_send_worker is None: self._heartbeat_send_worker = asyncio.ensure_future(self._heartbeat_send()) def _heartbeat_stop(self): self.server_heartbeat = None if self._heartbeat_recv_worker is not None: self._heartbeat_recv_worker.cancel() if self._heartbeat_send_worker is not None: self._heartbeat_send_worker.cancel() async def _heartbeat_recv(self): # 4.2.7 If a peer detects no incoming traffic (i.e. received octets) for # two heartbeat intervals or longer, it should close the connection # without following the Connection.Close/Close-Ok handshaking, and log # an error. # TODO(rcardona) raise a "timeout" exception somewhere now = asyncio.get_running_loop().time() time_since_last_recv = now - self._heartbeat_last_recv while self.state != CLOSED: sleep_for = self.server_heartbeat * 2 - time_since_last_recv await asyncio.sleep(sleep_for) now = asyncio.get_running_loop().time() time_since_last_recv = now - self._heartbeat_last_recv if time_since_last_recv >= self.server_heartbeat * 2: self._stream_writer.close() async def _heartbeat_send(self): now = asyncio.get_running_loop().time() time_since_last_send = now - self._heartbeat_last_send while self.state != CLOSED: sleep_for = self.server_heartbeat - time_since_last_send await asyncio.sleep(sleep_for) now = asyncio.get_running_loop().time() time_since_last_send = now - self._heartbeat_last_send if time_since_last_send >= self.server_heartbeat: await self.send_heartbeat() time_since_last_send = now - self._heartbeat_last_send # Amqp specific methods async def start(self, frame): """Method sent from the server to begin a new connection""" self.version_major = frame.version_major self.version_minor = frame.version_minor self.server_properties = frame.server_properties self.server_mechanisms = frame.mechanisms self.server_locales = frame.locales async def start_ok(self, client_properties, mechanism, auth, locale): def credentials(): return f'\0{auth["LOGIN"]}\0{auth["PASSWORD"]}' request = pamqp.commands.Connection.StartOk( client_properties=client_properties, mechanism=mechanism, locale=locale, response=credentials() ) await self._write_frame(0, request) async def close_ok(self, frame): """In response to server close confirmation""" self.stop_now.set() self._stream_writer.close() async def server_close(self, frame): """The server is closing the connection""" self.state = CLOSING reply_code = frame.reply_code reply_text = frame.reply_text class_id = frame.class_id method_id = frame.method_id logger.warning("Server closed connection: %s, code=%s, class_id=%s, method_id=%s", reply_text, reply_code, class_id, method_id) self._close_channels(reply_code, reply_text) await self._close_ok() self.stop_now.set() self._stream_writer.close() async def _close_ok(self): """Send client close confirmation""" request = pamqp.commands.Connection.CloseOk() await self._write_frame(0, request) async def tune(self, frame): self.server_channel_max = frame.channel_max self.server_frame_max = frame.frame_max self.server_heartbeat = frame.heartbeat async def tune_ok(self, channel_max, frame_max, heartbeat): request = pamqp.commands.Connection.TuneOk( channel_max, frame_max, heartbeat ) await self._write_frame(0, request) async def secure_ok(self, login_response): pass async def open(self, virtual_host, capabilities='', insist=False): """Open connection to virtual host.""" request = pamqp.commands.Connection.Open( virtual_host, capabilities, insist ) await self._write_frame(0, request) async def open_ok(self, frame): self.state = OPEN logger.info("Recv open ok") # ## aioamqp public methods # async def channel(self, **kwargs): """Factory to create a new channel """ await self.ensure_open() try: channel_id = self.channels_ids_free.pop() except KeyError as ex: assert self.server_channel_max is not None, 'connection channel-max tuning not performed' # channel-max = 0 means no limit if self.server_channel_max and self.channels_ids_ceil > self.server_channel_max: raise exceptions.NoChannelAvailable() from ex self.channels_ids_ceil += 1 channel_id = self.channels_ids_ceil channel = self.CHANNEL_FACTORY(self, channel_id, **kwargs) self.channels[channel_id] = channel await channel.open() return channel aioamqp-aioamqp-0.15.0/aioamqp/tests/000077500000000000000000000000001422314215600174425ustar00rootroot00000000000000aioamqp-aioamqp-0.15.0/aioamqp/tests/__init__.py000066400000000000000000000000001422314215600215410ustar00rootroot00000000000000aioamqp-aioamqp-0.15.0/aioamqp/tests/test_basic.py000066400000000000000000000161211422314215600221350ustar00rootroot00000000000000""" Amqp basic class tests """ import asyncio import asynctest from . import testcase from .. import exceptions from .. import properties class QosTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_basic_qos_default_args(self): result = await self.channel.basic_qos() self.assertTrue(result) async def test_basic_qos(self): result = await self.channel.basic_qos( prefetch_size=0, prefetch_count=100, connection_global=False) self.assertTrue(result) async def test_basic_qos_prefetch_size(self): with self.assertRaises(exceptions.ChannelClosed) as cm: await self.channel.basic_qos( prefetch_size=10, prefetch_count=100, connection_global=False) self.assertEqual(cm.exception.code, 540) async def test_basic_qos_wrong_values(self): with self.assertRaises(TypeError): await self.channel.basic_qos( prefetch_size=100000, prefetch_count=1000000000, connection_global=False) class BasicCancelTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_basic_cancel(self): async def callback(channel, body, envelope, _properties): pass queue_name = 'queue_name' exchange_name = 'exchange_name' await self.channel.queue_declare(queue_name) await self.channel.exchange_declare(exchange_name, type_name='direct') await self.channel.queue_bind(queue_name, exchange_name, routing_key='') result = await self.channel.basic_consume(callback, queue_name=queue_name) result = await self.channel.basic_cancel(result['consumer_tag']) result = await self.channel.publish("payload", exchange_name, routing_key='') await asyncio.sleep(5) result = await self.channel.queue_declare(queue_name, passive=True) self.assertEqual(result['message_count'], 1) self.assertEqual(result['consumer_count'], 0) async def test_basic_cancel_unknown_ctag(self): result = await self.channel.basic_cancel("unknown_ctag") self.assertTrue(result) class BasicGetTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_basic_get(self): queue_name = 'queue_name' exchange_name = 'exchange_name' routing_key = '' await self.channel.queue_declare(queue_name) await self.channel.exchange_declare(exchange_name, type_name='direct') await self.channel.queue_bind(queue_name, exchange_name, routing_key=routing_key) await self.channel.publish("payload", exchange_name, routing_key=routing_key) result = await self.channel.basic_get(queue_name) self.assertEqual(result['routing_key'], routing_key) self.assertFalse(result['redelivered']) self.assertIn('delivery_tag', result) self.assertEqual(result['exchange_name'].split('.')[-1], exchange_name) self.assertEqual(result['message'], b'payload') self.assertIsInstance(result['properties'], properties.Properties) async def test_basic_get_empty(self): queue_name = 'queue_name' exchange_name = 'exchange_name' routing_key = '' await self.channel.queue_declare(queue_name) await self.channel.exchange_declare(exchange_name, type_name='direct') await self.channel.queue_bind(queue_name, exchange_name, routing_key=routing_key) with self.assertRaises(exceptions.EmptyQueue): await self.channel.basic_get(queue_name) class BasicDeliveryTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def publish(self, queue_name, exchange_name, routing_key, payload): await self.channel.queue_declare(queue_name, exclusive=False, no_wait=False) await self.channel.exchange_declare(exchange_name, type_name='fanout') await self.channel.queue_bind(queue_name, exchange_name, routing_key=routing_key) await self.channel.publish(payload, exchange_name, queue_name) async def test_ack_message(self): queue_name = 'queue_name' exchange_name = 'exchange_name' routing_key = '' await self.publish( queue_name, exchange_name, routing_key, "payload" ) qfuture = asyncio.Future() async def qcallback(channel, body, envelope, _properties): qfuture.set_result(envelope) await self.channel.basic_consume(qcallback, queue_name=queue_name) envelope = await qfuture await qfuture await self.channel.basic_client_ack(envelope.delivery_tag) async def test_basic_nack(self): queue_name = 'queue_name' exchange_name = 'exchange_name' routing_key = '' await self.publish( queue_name, exchange_name, routing_key, "payload" ) qfuture = asyncio.Future() async def qcallback(channel, body, envelope, _properties): await self.channel.basic_client_nack( envelope.delivery_tag, multiple=True, requeue=False ) qfuture.set_result(True) await self.channel.basic_consume(qcallback, queue_name=queue_name) await qfuture async def test_basic_nack_norequeue(self): queue_name = 'queue_name' exchange_name = 'exchange_name' routing_key = '' await self.publish( queue_name, exchange_name, routing_key, "payload" ) qfuture = asyncio.Future() async def qcallback(channel, body, envelope, _properties): await self.channel.basic_client_nack(envelope.delivery_tag, requeue=False) qfuture.set_result(True) await self.channel.basic_consume(qcallback, queue_name=queue_name) await qfuture async def test_basic_nack_requeue(self): queue_name = 'queue_name' exchange_name = 'exchange_name' routing_key = '' await self.publish( queue_name, exchange_name, routing_key, "payload" ) qfuture = asyncio.Future() called = False async def qcallback(channel, body, envelope, _properties): nonlocal called if not called: called = True await self.channel.basic_client_nack(envelope.delivery_tag, requeue=True) else: await self.channel.basic_client_ack(envelope.delivery_tag) qfuture.set_result(True) await self.channel.basic_consume(qcallback, queue_name=queue_name) await qfuture async def test_basic_reject(self): queue_name = 'queue_name' exchange_name = 'exchange_name' routing_key = '' await self.publish( queue_name, exchange_name, routing_key, "payload" ) qfuture = asyncio.Future() async def qcallback(channel, body, envelope, _properties): qfuture.set_result(envelope) await self.channel.basic_consume(qcallback, queue_name=queue_name) envelope = await qfuture await self.channel.basic_reject(envelope.delivery_tag) aioamqp-aioamqp-0.15.0/aioamqp/tests/test_channel.py000066400000000000000000000061441422314215600224700ustar00rootroot00000000000000""" Tests the "Channel" amqp class implementation """ import os import unittest import asynctest from . import testcase from .. import exceptions IMPLEMENT_CHANNEL_FLOW = os.environ.get('IMPLEMENT_CHANNEL_FLOW', False) class ChannelTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): _multiprocess_can_split_ = True async def test_open(self): channel = await self.amqp.channel() self.assertNotEqual(channel.channel_id, 0) self.assertTrue(channel.is_open) async def test_close(self): channel = await self.amqp.channel() result = await channel.close() self.assertEqual(result, True) self.assertFalse(channel.is_open) async def test_server_initiated_close(self): channel = await self.amqp.channel() try: await channel.basic_get(queue_name='non-existant') except exceptions.ChannelClosed as e: self.assertEqual(e.code, 404) self.assertFalse(channel.is_open) channel = await self.amqp.channel() async def test_alreadyclosed_channel(self): channel = await self.amqp.channel() result = await channel.close() self.assertEqual(result, True) with self.assertRaises(exceptions.ChannelClosed): result = await channel.close() async def test_multiple_open(self): channel1 = await self.amqp.channel() channel2 = await self.amqp.channel() self.assertNotEqual(channel1.channel_id, channel2.channel_id) async def test_channel_active_flow(self): channel = await self.amqp.channel() result = await channel.flow(active=True) self.assertTrue(result['active']) @unittest.skipIf(IMPLEMENT_CHANNEL_FLOW is False, "active=false is not implemented in RabbitMQ") async def test_channel_inactive_flow(self): channel = await self.amqp.channel() result = await channel.flow(active=False) self.assertFalse(result['active']) result = await channel.flow(active=True) async def test_channel_active_flow_twice(self): channel = await self.amqp.channel() result = await channel.flow(active=True) self.assertTrue(result['active']) result = await channel.flow(active=True) @unittest.skipIf(IMPLEMENT_CHANNEL_FLOW is False, "active=false is not implemented in RabbitMQ") async def test_channel_active_inactive_flow(self): channel = await self.amqp.channel() result = await channel.flow(active=True) self.assertTrue(result['active']) result = await channel.flow(active=False) self.assertFalse(result['active']) class ChannelIdTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_channel_id_release_close(self): channels_count_start = self.amqp.channels_ids_count channel = await self.amqp.channel() self.assertEqual(self.amqp.channels_ids_count, channels_count_start + 1) result = await channel.close() self.assertEqual(result, True) self.assertFalse(channel.is_open) self.assertEqual(self.amqp.channels_ids_count, channels_count_start) aioamqp-aioamqp-0.15.0/aioamqp/tests/test_close.py000066400000000000000000000033251422314215600221630ustar00rootroot00000000000000import asyncio import asynctest from . import testcase from .. import exceptions class CloseTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): def setUp(self): super().setUp() self.consume_future = asyncio.Future() async def callback(self, body, envelope, properties): self.consume_future.set_result((body, envelope, properties)) async def get_callback_result(self): await self.consume_future result = self.consume_future.result() self.consume_future = asyncio.Future() return result async def test_close(self): channel = await self.create_channel() self.assertTrue(channel.is_open) await channel.close() self.assertFalse(channel.is_open) async def test_multiple_close(self): channel = await self.create_channel() await channel.close() self.assertFalse(channel.is_open) with self.assertRaises(exceptions.ChannelClosed): await channel.close() async def test_cannot_publish_after_close(self): channel = self.channel await channel.close() with self.assertRaises(exceptions.ChannelClosed): await self.channel.publish("coucou", "my_e", "") async def test_cannot_declare_queue_after_close(self): channel = self.channel await channel.close() with self.assertRaises(exceptions.ChannelClosed): await self.channel.queue_declare("qq") async def test_cannot_consume_after_close(self): channel = self.channel await self.channel.queue_declare("q") await channel.close() with self.assertRaises(exceptions.ChannelClosed): await channel.basic_consume(self.callback) aioamqp-aioamqp-0.15.0/aioamqp/tests/test_connect.py000066400000000000000000000032661422314215600225130ustar00rootroot00000000000000"""Aioamqp tests""" import asynctest import socket from aioamqp import connect from aioamqp.protocol import OPEN from . import testcase class AmqpConnectionTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_connect(self): _transport, proto = await connect(host=self.host, port=self.port, virtualhost=self.vhost) self.assertEqual(proto.state, OPEN) self.assertIsNotNone(proto.server_properties) await proto.close() async def test_connect_tuning(self): # frame_max should be higher than 131072 frame_max = 131072 channel_max = 10 heartbeat = 100 _transport, proto = await connect( host=self.host, port=self.port, virtualhost=self.vhost, channel_max=channel_max, frame_max=frame_max, heartbeat=heartbeat, ) self.assertEqual(proto.state, OPEN) self.assertIsNotNone(proto.server_properties) self.assertDictEqual(proto.connection_tunning, { 'frame_max': frame_max, 'channel_max': channel_max, 'heartbeat': heartbeat }) self.assertEqual(proto.server_channel_max, channel_max) self.assertEqual(proto.server_frame_max, frame_max) self.assertEqual(proto.server_heartbeat, heartbeat) await proto.close() async def test_socket_nodelay(self): transport, proto = await connect(host=self.host, port=self.port, virtualhost=self.vhost) sock = transport.get_extra_info('socket') opt_val = sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) self.assertNotEqual(opt_val, 0) await proto.close() aioamqp-aioamqp-0.15.0/aioamqp/tests/test_connection_close.py000066400000000000000000000021411422314215600243750ustar00rootroot00000000000000import asynctest from aioamqp.protocol import OPEN, CLOSED from aioamqp.exceptions import AmqpClosedConnection from . import testcase class CloseTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_close(self): amqp = self.amqp self.assertEqual(amqp.state, OPEN) # grab a ref here because py36 sets _stream_reader to None in # StreamReaderProtocol.connection_lost() transport = amqp._stream_reader._transport await amqp.close() self.assertEqual(amqp.state, CLOSED) if hasattr(transport, 'is_closing'): self.assertTrue(transport.is_closing()) else: # TODO: remove with python <3.4.4 support self.assertTrue(transport._closing) # make sure those 2 tasks/futures are properly set as finished await amqp.stop_now.wait() await amqp.worker async def test_multiple_close(self): amqp = self.amqp await amqp.close() self.assertEqual(amqp.state, CLOSED) with self.assertRaises(AmqpClosedConnection): await amqp.close() aioamqp-aioamqp-0.15.0/aioamqp/tests/test_connection_lost.py000066400000000000000000000015611422314215600242560ustar00rootroot00000000000000import asynctest import asynctest.mock import asyncio from aioamqp.protocol import OPEN, CLOSED from . import testcase class ConnectionLostTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): _multiprocess_can_split_ = True async def test_connection_lost(self): self.callback_called = False def callback(*args, **kwargs): self.callback_called = True amqp = self.amqp amqp._on_error_callback = callback channel = self.channel self.assertEqual(amqp.state, OPEN) self.assertTrue(channel.is_open) amqp._stream_reader._transport.close() # this should have the same effect as the tcp connection being lost await asyncio.wait_for(amqp.worker, 1) self.assertEqual(amqp.state, CLOSED) self.assertFalse(channel.is_open) self.assertTrue(self.callback_called) aioamqp-aioamqp-0.15.0/aioamqp/tests/test_consume.py000066400000000000000000000162571422314215600225370ustar00rootroot00000000000000import asyncio import asynctest from . import testcase from .. import exceptions from ..properties import Properties class ConsumeTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): _multiprocess_can_split_ = True def setUp(self): super().setUp() self.consume_future = asyncio.Future() async def callback(self, channel, body, envelope, properties): self.consume_future.set_result((body, envelope, properties)) async def get_callback_result(self): await self.consume_future result = self.consume_future.result() self.consume_future = asyncio.Future() return result async def test_consume(self): # declare await self.channel.queue_declare("q", exclusive=True, no_wait=False) await self.channel.exchange_declare("e", "fanout") await self.channel.queue_bind("q", "e", routing_key='') # get a different channel channel = await self.create_channel() # publish await channel.publish("coucou", "e", routing_key='',) # start consume await channel.basic_consume(self.callback, queue_name="q") # get one body, envelope, properties = await self.get_callback_result() self.assertIsNotNone(envelope.consumer_tag) self.assertIsNotNone(envelope.delivery_tag) self.assertEqual(b"coucou", body) self.assertIsInstance(properties, Properties) async def test_big_consume(self): # declare await self.channel.queue_declare("q", exclusive=True, no_wait=False) await self.channel.exchange_declare("e", "fanout") await self.channel.queue_bind("q", "e", routing_key='') # get a different channel channel = await self.create_channel() # publish await channel.publish("a"*1000000, "e", routing_key='',) # start consume await channel.basic_consume(self.callback, queue_name="q") # get one body, envelope, properties = await self.get_callback_result() self.assertIsNotNone(envelope.consumer_tag) self.assertIsNotNone(envelope.delivery_tag) self.assertEqual(b"a"*1000000, body) self.assertIsInstance(properties, Properties) async def test_consume_multiple_queues(self): await self.channel.queue_declare("q1", exclusive=True, no_wait=False) await self.channel.queue_declare("q2", exclusive=True, no_wait=False) await self.channel.exchange_declare("e", "direct") await self.channel.queue_bind("q1", "e", routing_key="q1") await self.channel.queue_bind("q2", "e", routing_key="q2") # get a different channel channel = await self.create_channel() q1_future = asyncio.Future() async def q1_callback(channel, body, envelope, properties): q1_future.set_result((body, envelope, properties)) q2_future = asyncio.Future() async def q2_callback(channel, body, envelope, properties): q2_future.set_result((body, envelope, properties)) # start consumers result = await channel.basic_consume(q1_callback, queue_name="q1") ctag_q1 = result['consumer_tag'] result = await channel.basic_consume(q2_callback, queue_name="q2") ctag_q2 = result['consumer_tag'] # put message in q1 await channel.publish("coucou1", "e", "q1") # get it body1, envelope1, properties1 = await q1_future self.assertEqual(ctag_q1, envelope1.consumer_tag) self.assertIsNotNone(envelope1.delivery_tag) self.assertEqual(b"coucou1", body1) self.assertIsInstance(properties1, Properties) # put message in q2 await channel.publish("coucou2", "e", "q2") # get it body2, envelope2, properties2 = await q2_future self.assertEqual(ctag_q2, envelope2.consumer_tag) self.assertEqual(b"coucou2", body2) self.assertIsInstance(properties2, Properties) # close consuming await asyncio.gather(self.channel.basic_cancel(ctag_q1), self.channel.basic_cancel(ctag_q2)) async def test_duplicate_consumer_tag(self): await self.channel.queue_declare("q1", exclusive=True, no_wait=False) await self.channel.queue_declare("q2", exclusive=True, no_wait=False) await self.channel.basic_consume(self.callback, queue_name="q1", consumer_tag='tag') with self.assertRaises(exceptions.ChannelClosed) as cm: await self.channel.basic_consume(self.callback, queue_name="q2", consumer_tag='tag') self.assertEqual(cm.exception.code, 530) async def test_consume_callaback_synced(self): # declare await self.channel.queue_declare("q", exclusive=True, no_wait=False) await self.channel.exchange_declare("e", "fanout") await self.channel.queue_bind("q", "e", routing_key='') # get a different channel channel = await self.create_channel() # publish await channel.publish("coucou", "e", routing_key='',) sync_future = asyncio.Future() async def callback(channel, body, envelope, properties): self.assertTrue(sync_future.done()) await channel.basic_consume(callback, queue_name="q") sync_future.set_result(True) async def test_consume_multiple_queues_using_gather(self): await asyncio.gather(self.channel.queue_declare("q1", exclusive=True, no_wait=False), self.channel.queue_declare("q2", exclusive=True, no_wait=False)) await asyncio.gather(self.channel.exchange_declare("e", "direct"), self.channel.exchange_declare("f", "direct")) await asyncio.gather(self.channel.queue_bind("q1", "e", routing_key="q1"), self.channel.queue_bind("q2", "e", routing_key="q2")) # get a different channel channel = await self.create_channel() q1_future = asyncio.Future() async def q1_callback(channel, body, envelope, properties): q1_future.set_result((body, envelope, properties)) q2_future = asyncio.Future() async def q2_callback(channel, body, envelope, properties): q2_future.set_result((body, envelope, properties)) # start consumers results = await asyncio.gather(channel.basic_consume(q1_callback, queue_name="q1"), channel.basic_consume(q2_callback, queue_name="q2")) ctag_q1 = results[0]['consumer_tag'] ctag_q2 = results[1]['consumer_tag'] # put message in q1 and q2 await asyncio.gather(channel.publish("coucou1", "e", "q1"), channel.publish("coucou2", "e", "q2")) # get it body1, envelope1, properties1 = await q1_future self.assertEqual(ctag_q1, envelope1.consumer_tag) self.assertIsNotNone(envelope1.delivery_tag) self.assertEqual(b"coucou1", body1) self.assertIsInstance(properties1, Properties) # get it body2, envelope2, properties2 = await q2_future self.assertEqual(ctag_q2, envelope2.consumer_tag) self.assertEqual(b"coucou2", body2) self.assertIsInstance(properties2, Properties) aioamqp-aioamqp-0.15.0/aioamqp/tests/test_exchange.py000066400000000000000000000137571422314215600226520ustar00rootroot00000000000000""" Amqp exchange class tests """ import asynctest from . import testcase from .. import exceptions class ExchangeDeclareTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): _multiprocess_can_split_ = True async def test_exchange_direct_declare(self): result = await self.channel.exchange_declare( 'exchange_name', type_name='direct') self.assertTrue(result) async def test_exchange_fanout_declare(self): result = await self.channel.exchange_declare( 'exchange_name', type_name='fanout') self.assertTrue(result) async def test_exchange_topic_declare(self): result = await self.channel.exchange_declare( 'exchange_name', type_name='topic') self.assertTrue(result) async def test_exchange_headers_declare(self): result = await self.channel.exchange_declare( 'exchange_name', type_name='headers') self.assertTrue(result) async def test_exchange_declare_wrong_types(self): result = await self.channel.exchange_declare( 'exchange_name', type_name='headers', auto_delete=True, durable=True) self.assertTrue(result) with self.assertRaises(exceptions.ChannelClosed): result = await self.channel.exchange_declare( 'exchange_name', type_name='fanout', auto_delete=False, durable=False) async def test_exchange_declare_passive(self): result = await self.channel.exchange_declare( 'exchange_name', type_name='headers', auto_delete=True, durable=True) self.assertTrue(result) result = await self.channel.exchange_declare( 'exchange_name', type_name='headers', auto_delete=True, durable=True, passive=True) self.assertTrue(result) result = await self.channel.exchange_declare( 'exchange_name', type_name='headers', auto_delete=False, durable=False, passive=True) self.assertTrue(result) async def test_exchange_declare_passive_does_not_exists(self): with self.assertRaises(exceptions.ChannelClosed) as cm: await self.channel.exchange_declare( 'non_existant_exchange', type_name='headers', auto_delete=False, durable=False, passive=True) self.assertEqual(cm.exception.code, 404) async def test_exchange_declare_unknown_type(self): with self.assertRaises(exceptions.ChannelClosed): await self.channel.exchange_declare( 'non_existant_exchange', type_name='unknown_type', auto_delete=False, durable=False, passive=True) class ExchangeDelete(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_delete(self): exchange_name = 'exchange_name' await self.channel.exchange_declare(exchange_name, type_name='direct') result = await self.channel.exchange_delete(exchange_name) self.assertTrue(result) with self.assertRaises(exceptions.ChannelClosed) as cm: await self.channel.exchange_declare( exchange_name, type_name='direct', passive=True ) self.assertEqual(cm.exception.code, 404) async def test_double_delete(self): exchange_name = 'exchange_name' await self.channel.exchange_declare(exchange_name, type_name='direct') result = await self.channel.exchange_delete(exchange_name) self.assertTrue(result) if self.server_version() < (3, 3, 5): with self.assertRaises(exceptions.ChannelClosed) as cm: await self.channel.exchange_delete(exchange_name) self.assertEqual(cm.exception.code, 404) else: # weird result from rabbitmq 3.3.5 result = await self.channel.exchange_delete(exchange_name) self.assertTrue(result) class ExchangeBind(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_exchange_bind(self): await self.channel.exchange_declare('exchange_destination', type_name='direct') await self.channel.exchange_declare('exchange_source', type_name='direct') result = await self.channel.exchange_bind( 'exchange_destination', 'exchange_source', routing_key='') self.assertTrue(result) async def test_inexistant_exchange_bind(self): with self.assertRaises(exceptions.ChannelClosed) as cm: await self.channel.exchange_bind( 'exchange_destination', 'exchange_source', routing_key='') self.assertEqual(cm.exception.code, 404) class ExchangeUnbind(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_exchange_unbind(self): ex_source = 'exchange_source' ex_destination = 'exchange_destination' await self.channel.exchange_declare(ex_destination, type_name='direct') await self.channel.exchange_declare(ex_source, type_name='direct') await self.channel.exchange_bind( ex_destination, ex_source, routing_key='') await self.channel.exchange_unbind( ex_destination, ex_source, routing_key='') async def test_exchange_unbind_reversed(self): ex_source = 'exchange_source' ex_destination = 'exchange_destination' await self.channel.exchange_declare(ex_destination, type_name='direct') await self.channel.exchange_declare(ex_source, type_name='direct') await self.channel.exchange_bind( ex_destination, ex_source, routing_key='') if self.server_version() < (3, 3, 5): with self.assertRaises(exceptions.ChannelClosed) as cm: result = await self.channel.exchange_unbind( ex_source, ex_destination, routing_key='') self.assertEqual(cm.exception.code, 404) else: # weird result from rabbitmq 3.3.5 result = await self.channel.exchange_unbind(ex_source, ex_destination, routing_key='') self.assertTrue(result) aioamqp-aioamqp-0.15.0/aioamqp/tests/test_heartbeat.py000066400000000000000000000021301422314215600230060ustar00rootroot00000000000000""" Tests the heartbeat methods """ import asyncio import asynctest from unittest import mock from aioamqp.protocol import CLOSED from . import testcase class HeartbeatTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_heartbeat(self): with mock.patch.object( self.amqp, 'send_heartbeat', wraps=self.amqp.send_heartbeat ) as send_heartbeat: # reset both timer/task to 1) make them 'see' the new heartbeat value # 2) so that the mock is actually called back from the main loop self.amqp.server_heartbeat = 1 self.amqp._heartbeat_send_worker.cancel() self.amqp._heartbeat_send_worker = asyncio.ensure_future(self.amqp._heartbeat_send()) self.amqp._heartbeat_recv_worker.cancel() self.amqp._heartbeat_recv_worker = asyncio.ensure_future(self.amqp._heartbeat_recv()) await asyncio.sleep(1.001) send_heartbeat.assert_called_once_with() await asyncio.sleep(1.001) self.assertEqual(self.amqp.state, CLOSED) aioamqp-aioamqp-0.15.0/aioamqp/tests/test_properties.py000066400000000000000000000075151422314215600232570ustar00rootroot00000000000000""" Tests for message properties for basic deliver """ import asyncio import asynctest import logging from . import testcase logger = logging.getLogger(__name__) class ReplyTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def _server(self, server_future, exchange_name, routing_key): """Consume messages and reply to them by publishing messages back to the client using routing key set to the reply_to property""" server_queue_name = 'server_queue' await self.channel.queue_declare(server_queue_name, exclusive=False, no_wait=False) await self.channel.exchange_declare(exchange_name, type_name='direct') await self.channel.queue_bind( server_queue_name, exchange_name, routing_key=routing_key) async def server_callback(channel, body, envelope, properties): logger.debug('Server received message') server_future.set_result((body, envelope, properties)) publish_properties = {'correlation_id': properties.correlation_id} logger.debug('Replying to %r', properties.reply_to) await self.channel.publish( b'reply message', exchange_name, properties.reply_to, publish_properties) logger.debug('Server replied') await self.channel.basic_consume(server_callback, queue_name=server_queue_name) logger.debug('Server consuming messages') async def _client( self, client_future, exchange_name, server_routing_key, correlation_id, client_routing_key): """Declare a queue, bind client_routing_key to it and publish a message to the server with the reply_to property set to that routing key""" client_queue_name = 'client_reply_queue' client_channel = await self.create_channel() await client_channel.queue_declare( client_queue_name, exclusive=True, no_wait=False) await client_channel.queue_bind( client_queue_name, exchange_name, routing_key=client_routing_key) async def client_callback(channel, body, envelope, properties): logger.debug('Client received message') client_future.set_result((body, envelope, properties)) await client_channel.basic_consume(client_callback, queue_name=client_queue_name) logger.debug('Client consuming messages') await client_channel.publish( b'client message', exchange_name, server_routing_key, {'correlation_id': correlation_id, 'reply_to': client_routing_key}) logger.debug('Client published message') async def test_reply_to(self): exchange_name = 'exchange_name' server_routing_key = 'reply_test' server_future = asyncio.Future() await self._server(server_future, exchange_name, server_routing_key) correlation_id = 'secret correlation id' client_routing_key = 'secret_client_key' client_future = asyncio.Future() await self._client( client_future, exchange_name, server_routing_key, correlation_id, client_routing_key) logger.debug('Waiting for server to receive message') server_body, server_envelope, server_properties = await server_future self.assertEqual(server_body, b'client message') self.assertEqual(server_properties.correlation_id, correlation_id) self.assertEqual(server_properties.reply_to, client_routing_key) self.assertEqual(server_envelope.routing_key, server_routing_key) logger.debug('Waiting for client to receive message') client_body, client_envelope, client_properties = await client_future self.assertEqual(client_body, b'reply message') self.assertEqual(client_properties.correlation_id, correlation_id) self.assertEqual(client_envelope.routing_key, client_routing_key) aioamqp-aioamqp-0.15.0/aioamqp/tests/test_protocol.py000066400000000000000000000075101422314215600227170ustar00rootroot00000000000000""" Test our Protocol class """ import asynctest from unittest import mock from . import testcase from .. import exceptions from .. import connect as amqp_connect from .. import from_url as amqp_from_url from ..protocol import AmqpProtocol, OPEN class ProtocolTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_connect(self): _transport, protocol = await amqp_connect( host=self.host, port=self.port, virtualhost=self.vhost ) self.assertEqual(protocol.state, OPEN) await protocol.close() async def test_connect_products_info(self): client_properties = { 'program': 'aioamqp-tests', 'program_version': '0.1.1', } _transport, protocol = await amqp_connect( host=self.host, port=self.port, virtualhost=self.vhost, client_properties=client_properties, ) self.assertEqual(protocol.client_properties, client_properties) await protocol.close() async def test_connection_unexistant_vhost(self): with self.assertRaises(exceptions.AmqpClosedConnection): await amqp_connect(host=self.host, port=self.port, virtualhost='/unexistant') def test_connection_wrong_login_password(self): with self.assertRaises(exceptions.AmqpClosedConnection): self.loop.run_until_complete( amqp_connect(host=self.host, port=self.port, login='wrong', password='wrong') ) async def test_connection_from_url(self): with mock.patch('aioamqp.connect') as connect: async def func(*x, **y): return 1, 2 connect.side_effect = func await amqp_from_url('amqp://tom:pass@example.com:7777/myvhost') connect.assert_called_once_with( insist=False, password='pass', login_method='PLAIN', login='tom', host='example.com', protocol_factory=AmqpProtocol, virtualhost='myvhost', port=7777, ) async def test_ssl_context_connection_from_url(self): ssl_context = mock.Mock() with mock.patch('aioamqp.connect') as connect: async def func(*x, **y): return 1, 2 connect.side_effect = func await amqp_from_url('amqps://tom:pass@example.com:7777/myvhost', ssl=ssl_context) connect.assert_called_once_with( insist=False, password='pass', login_method='PLAIN', ssl=ssl_context, login='tom', host='example.com', protocol_factory=AmqpProtocol, virtualhost='myvhost', port=7777, ) async def test_amqps_connection_from_url(self): ssl_context = mock.Mock() with mock.patch('ssl.create_default_context') as create_default_context: with mock.patch('aioamqp.connect') as connect: create_default_context.return_value = ssl_context async def func(*x, **y): return 1, 2 connect.side_effect = func await amqp_from_url('amqps://tom:pass@example.com:7777/myvhost') connect.assert_called_once_with( insist=False, password='pass', login_method='PLAIN', ssl=ssl_context, login='tom', host='example.com', protocol_factory=AmqpProtocol, virtualhost='myvhost', port=7777, ) async def test_from_url_raises_on_wrong_scheme(self): with self.assertRaises(ValueError): await amqp_from_url('invalid://') aioamqp-aioamqp-0.15.0/aioamqp/tests/test_publish.py000066400000000000000000000064401422314215600225250ustar00rootroot00000000000000import asynctest import asyncio from . import testcase class PublishTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): _multiprocess_can_split_ = True async def test_publish(self): # declare await self.channel.queue_declare("q", exclusive=True, no_wait=False) await self.channel.exchange_declare("e", "fanout") await self.channel.queue_bind("q", "e", routing_key='') # publish await self.channel.publish("coucou", "e", routing_key='') queues = self.list_queues() self.assertIn("q", queues) self.assertEqual(1, queues["q"]['messages']) async def test_empty_publish(self): # declare await self.channel.queue_declare("q", exclusive=True, no_wait=False) await self.channel.exchange_declare("e", "fanout") await self.channel.queue_bind("q", "e", routing_key='') # publish await self.channel.publish("", "e", routing_key='') queues = self.list_queues() self.assertIn("q", queues) self.assertEqual(1, queues["q"]["messages"]) self.assertEqual(0, queues["q"]["message_bytes"]) async def test_big_publish(self): # declare await self.channel.queue_declare("q", exclusive=True, no_wait=False) await self.channel.exchange_declare("e", "fanout") await self.channel.queue_bind("q", "e", routing_key='') # publish await self.channel.publish("a"*1000000, "e", routing_key='') queues = self.list_queues() self.assertIn("q", queues) self.assertEqual(1, queues["q"]['messages']) async def test_big_unicode_publish(self): # declare await self.channel.queue_declare("q", exclusive=True, no_wait=False) await self.channel.exchange_declare("e", "fanout") await self.channel.queue_bind("q", "e", routing_key='') # publish await self.channel.publish("Ы"*1000000, "e", routing_key='') await self.channel.publish("Ы"*1000000, "e", routing_key='') queues = self.list_queues() self.assertIn("q", queues) self.assertEqual(2, queues["q"]['messages']) async def test_confirmed_publish(self): # declare await self.channel.confirm_select() self.assertTrue(self.channel.publisher_confirms) await self.channel.queue_declare("q", exclusive=True, no_wait=False) await self.channel.exchange_declare("e", "fanout") await self.channel.queue_bind("q", "e", routing_key='') # publish await self.channel.publish("coucou", "e", routing_key='') queues = self.list_queues() self.assertIn("q", queues) self.assertEqual(1, queues["q"]['messages']) async def test_return_from_publish(self): called = False async def callback(channel, body, envelope, properties): nonlocal called called = True channel = await self.amqp.channel(return_callback=callback) # declare await channel.exchange_declare("e", "topic") # publish await channel.publish("coucou", "e", routing_key="not.found", mandatory=True) for _i in range(10): if called: break await asyncio.sleep(0.1) self.assertTrue(called) aioamqp-aioamqp-0.15.0/aioamqp/tests/test_queue.py000066400000000000000000000223571422314215600222100ustar00rootroot00000000000000""" Amqp queue class tests """ import asyncio import asynctest from . import testcase from .. import exceptions class QueueDeclareTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): def setUp(self): super().setUp() self.consume_future = asyncio.Future() async def callback(self, body, envelope, properties): self.consume_future.set_result((body, envelope, properties)) async def get_callback_result(self): await self.consume_future result = self.consume_future.result() self.consume_future = asyncio.Future() return result async def test_queue_declare_no_name(self): result = await self.channel.queue_declare() self.assertIsNotNone(result['queue']) async def test_queue_declare(self): queue_name = 'queue_name' result = await self.channel.queue_declare('queue_name') self.assertEqual(result['message_count'], 0) self.assertEqual(result['consumer_count'], 0) self.assertEqual(result['queue'].split('.')[-1], queue_name) self.assertTrue(result) async def test_queue_declare_passive(self): queue_name = 'queue_name' await self.channel.queue_declare('queue_name') result = await self.channel.queue_declare(queue_name, passive=True) self.assertEqual(result['message_count'], 0) self.assertEqual(result['consumer_count'], 0) self.assertEqual(result['queue'].split('.')[-1], queue_name) async def test_queue_declare_custom_x_message_ttl_32_bits(self): queue_name = 'queue_name' # 2147483648 == 10000000000000000000000000000000 # in binary, meaning it is 32 bit long x_message_ttl = 2147483648 result = await self.channel.queue_declare('queue_name', arguments={ 'x-message-ttl': x_message_ttl }) self.assertEqual(result['message_count'], 0) self.assertEqual(result['consumer_count'], 0) self.assertEqual(result['queue'].split('.')[-1], queue_name) self.assertTrue(result) async def test_queue_declare_passive_nonexistant_queue(self): queue_name = 'queue_name' with self.assertRaises(exceptions.ChannelClosed) as cm: await self.channel.queue_declare(queue_name, passive=True) self.assertEqual(cm.exception.code, 404) async def test_wrong_parameter_queue(self): queue_name = 'queue_name' await self.channel.queue_declare(queue_name, exclusive=False, auto_delete=False) with self.assertRaises(exceptions.ChannelClosed) as cm: await self.channel.queue_declare(queue_name, passive=False, exclusive=True, auto_delete=True) self.assertIn(cm.exception.code, [405, 406]) async def test_multiple_channel_same_queue(self): queue_name = 'queue_name' channel1 = await self.amqp.channel() channel2 = await self.amqp.channel() result = await channel1.queue_declare(queue_name, passive=False) self.assertEqual(result['message_count'], 0) self.assertEqual(result['consumer_count'], 0) self.assertEqual(result['queue'].split('.')[-1], queue_name) result = await channel2.queue_declare(queue_name, passive=False) self.assertEqual(result['message_count'], 0) self.assertEqual(result['consumer_count'], 0) self.assertEqual(result['queue'].split('.')[-1], queue_name) async def _test_queue_declare(self, queue_name, exclusive=False, durable=False, auto_delete=False): # declare queue result = await self.channel.queue_declare( queue_name, no_wait=False, exclusive=exclusive, durable=durable, auto_delete=auto_delete) # assert returned results has the good arguments # in test the channel declared queues with prefixed names, to get the full name of the # declared queue we have to use self.full_name function self.assertEqual(self.full_name(queue_name), result['queue']) queues = self.list_queues() queue = queues[queue_name] # assert queue has been declared witht the good arguments self.assertEqual(queue_name, queue['name']) self.assertEqual(0, queue['consumers']) self.assertEqual(0, queue['messages_ready']) self.assertEqual(auto_delete, queue['auto_delete']) self.assertEqual(durable, queue['durable']) # delete queue await self.safe_queue_delete(queue_name) def test_durable_and_auto_deleted(self): self.loop.run_until_complete( self._test_queue_declare('q', exclusive=False, durable=True, auto_delete=True)) def test_durable_and_not_auto_deleted(self): self.loop.run_until_complete( self._test_queue_declare('q', exclusive=False, durable=True, auto_delete=False)) def test_not_durable_and_auto_deleted(self): self.loop.run_until_complete( self._test_queue_declare('q', exclusive=False, durable=False, auto_delete=True)) def test_not_durable_and_not_auto_deleted(self): self.loop.run_until_complete( self._test_queue_declare('q', exclusive=False, durable=False, auto_delete=False)) async def test_exclusive(self): # create an exclusive queue await self.channel.queue_declare("q", exclusive=True) # consume it await self.channel.basic_consume(self.callback, queue_name="q", no_wait=False) # create an other amqp connection _transport, protocol = await self.create_amqp() channel = await self.create_channel(amqp=protocol) # assert that this connection cannot connect to the queue with self.assertRaises(exceptions.ChannelClosed): await channel.basic_consume(self.callback, queue_name="q", no_wait=False) # amqp and channels are auto deleted by test case async def test_not_exclusive(self): # create a non-exclusive queue await self.channel.queue_declare('q', exclusive=False) # consume it await self.channel.basic_consume(self.callback, queue_name='q', no_wait=False) # create an other amqp connection _transport, protocol = await self.create_amqp() channel = await self.create_channel(amqp=protocol) # assert that this connection can connect to the queue await channel.basic_consume(self.callback, queue_name='q', no_wait=False) class QueueDeleteTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_delete_queue(self): queue_name = 'queue_name' await self.channel.queue_declare(queue_name) result = await self.channel.queue_delete(queue_name) self.assertTrue(result) async def test_delete_inexistant_queue(self): queue_name = 'queue_name' if self.server_version() < (3, 3, 5): with self.assertRaises(exceptions.ChannelClosed) as cm: result = await self.channel.queue_delete(queue_name) self.assertEqual(cm.exception.code, 404) else: result = await self.channel.queue_delete(queue_name) self.assertTrue(result) class QueueBindTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_bind_queue(self): queue_name = 'queue_name' exchange_name = 'exchange_name' await self.channel.queue_declare(queue_name) await self.channel.exchange_declare(exchange_name, type_name='direct') result = await self.channel.queue_bind(queue_name, exchange_name, routing_key='') self.assertTrue(result) async def test_bind_unexistant_exchange(self): queue_name = 'queue_name' exchange_name = 'exchange_name' await self.channel.queue_declare(queue_name) with self.assertRaises(exceptions.ChannelClosed) as cm: await self.channel.queue_bind(queue_name, exchange_name, routing_key='') self.assertEqual(cm.exception.code, 404) async def test_bind_unexistant_queue(self): queue_name = 'queue_name' exchange_name = 'exchange_name' await self.channel.exchange_declare(exchange_name, type_name='direct') with self.assertRaises(exceptions.ChannelClosed) as cm: await self.channel.queue_bind(queue_name, exchange_name, routing_key='') self.assertEqual(cm.exception.code, 404) async def test_unbind_queue(self): queue_name = 'queue_name' exchange_name = 'exchange_name' await self.channel.queue_declare(queue_name) await self.channel.exchange_declare(exchange_name, type_name='direct') await self.channel.queue_bind(queue_name, exchange_name, routing_key='') result = await self.channel.queue_unbind(queue_name, exchange_name, routing_key='') self.assertTrue(result) class QueuePurgeTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_purge_queue(self): queue_name = 'queue_name' await self.channel.queue_declare(queue_name) result = await self.channel.queue_purge(queue_name) self.assertEqual(result['message_count'], 0) async def test_purge_queue_inexistant_queue(self): queue_name = 'queue_name' with self.assertRaises(exceptions.ChannelClosed) as cm: await self.channel.queue_purge(queue_name) self.assertEqual(cm.exception.code, 404) aioamqp-aioamqp-0.15.0/aioamqp/tests/test_recover.py000066400000000000000000000010231422314215600225140ustar00rootroot00000000000000""" Amqp basic tests for recover methods """ import asynctest from . import testcase class RecoverTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): async def test_basic_recover_async(self): await self.channel.basic_recover_async(requeue=True) async def test_basic_recover_async_no_requeue(self): await self.channel.basic_recover_async(requeue=False) async def test_basic_recover(self): result = await self.channel.basic_recover(requeue=True) self.assertTrue(result) aioamqp-aioamqp-0.15.0/aioamqp/tests/test_server_basic_cancel.py000066400000000000000000000045531422314215600250360ustar00rootroot00000000000000""" Server received requests handling tests """ import asyncio import asynctest import asynctest.mock import uuid from . import testcase async def consumer(channel, body, envelope, properties): await channel.basic_client_ack(envelope.delivery_tag) class ServerBasicCancelTestCase(testcase.RabbitTestCaseMixin, asynctest.TestCase): _multiprocess_can_split_ = True def setUp(self): super().setUp() self.queue_name = str(uuid.uuid4()) async def test_cancel_whilst_consuming(self): await self.channel.queue_declare(self.queue_name) # None is non-callable. We want to make sure the callback is # unregistered and never called. await self.channel.basic_consume(None) await self.channel.queue_delete(self.queue_name) async def test_cancel_callbacks(self): callback_calls = [] callback_event = asyncio.Event() async def coroutine_callback(*args, **kwargs): callback_calls.append((args, kwargs)) def function_callback(*args, **kwargs): callback_calls.append((args, kwargs)) callback_event.set() self.channel.add_cancellation_callback(coroutine_callback) self.channel.add_cancellation_callback(function_callback) await self.channel.queue_declare(self.queue_name) rv = await self.channel.basic_consume(consumer) await self.channel.queue_delete(self.queue_name) await callback_event.wait() self.assertEqual(2, len(callback_calls)) for args, _kwargs in callback_calls: self.assertIs(self.channel, args[0]) self.assertEqual(rv['consumer_tag'], args[1]) async def test_cancel_callback_exceptions(self): callback_calls = [] callback_event = asyncio.Event() def function_callback(*args, **kwargs): callback_calls.append((args, kwargs)) callback_event.set() raise RuntimeError self.channel.add_cancellation_callback(function_callback) self.channel.add_cancellation_callback(function_callback) await self.channel.queue_declare(self.queue_name) await self.channel.basic_consume(consumer) await self.channel.queue_delete(self.queue_name) await callback_event.wait() self.assertEqual(2, len(callback_calls)) self.assertTrue(self.channel.is_open) aioamqp-aioamqp-0.15.0/aioamqp/tests/testcase.py000066400000000000000000000234011422314215600216270ustar00rootroot00000000000000"""Aioamqp tests utilities Provides the test case to simplify testing """ import asyncio import inspect import logging import os import time import uuid import pyrabbit2.api from .. import connect as aioamqp_connect from .. import exceptions from ..channel import Channel from ..protocol import AmqpProtocol, OPEN logger = logging.getLogger(__name__) def use_full_name(f, arg_names): sig = inspect.signature(f) for arg_name in arg_names: if arg_name not in sig.parameters: raise ValueError(f'{arg_name} is not a valid argument name for function {f.__qualname__}') def wrapper(self, *args, **kw): ba = sig.bind_partial(self, *args, **kw) for param in sig.parameters.values(): if param.name in arg_names and param.name in ba.arguments: ba.arguments[param.name] = self.full_name(ba.arguments[param.name]) return f(*(ba.args), **(ba.kwargs)) return wrapper class ProxyChannel(Channel): def __init__(self, test_case, *args, **kw): super().__init__(*args, **kw) self.test_case = test_case self.test_case.register_channel(self) exchange_declare = use_full_name(Channel.exchange_declare, ['exchange_name']) exchange_delete = use_full_name(Channel.exchange_delete, ['exchange_name']) queue_declare = use_full_name(Channel.queue_declare, ['queue_name']) queue_delete = use_full_name(Channel.queue_delete, ['queue_name']) queue_bind = use_full_name(Channel.queue_bind, ['queue_name', 'exchange_name']) queue_unbind = use_full_name(Channel.queue_unbind, ['queue_name', 'exchange_name']) queue_purge = use_full_name(Channel.queue_purge, ['queue_name']) exchange_bind = use_full_name(Channel.exchange_bind, ['exchange_source', 'exchange_destination']) exchange_unbind = use_full_name(Channel.exchange_unbind, ['exchange_source', 'exchange_destination']) publish = use_full_name(Channel.publish, ['exchange_name']) basic_get = use_full_name(Channel.basic_get, ['queue_name']) basic_consume = use_full_name(Channel.basic_consume, ['queue_name']) def full_name(self, name): return self.test_case.full_name(name) class ProxyAmqpProtocol(AmqpProtocol): def __init__(self, test_case, *args, **kw): super().__init__(*args, **kw) self.test_case = test_case def channel_factory(self, protocol, channel_id, return_callback=None): return ProxyChannel(self.test_case, protocol, channel_id, return_callback=return_callback) CHANNEL_FACTORY = channel_factory class RabbitTestCaseMixin: """TestCase with a rabbit running in background""" RABBIT_TIMEOUT = 1.0 VHOST = 'test-aioamqp' def setUp(self): super().setUp() self.host = os.environ.get('AMQP_HOST', 'localhost') self.port = os.environ.get('AMQP_PORT', 5672) self.vhost = os.environ.get('AMQP_VHOST', self.VHOST + str(uuid.uuid4())) self.http_client = pyrabbit2.api.Client( f'{self.host}:15672', 'guest', 'guest', timeout=None, ) self.amqps = [] self.channels = [] self.exchanges = {} self.queues = {} self.transports = [] self.reset_vhost() def reset_vhost(self): try: self.http_client.delete_vhost(self.vhost) except Exception: # pylint: disable=broad-except pass self.http_client.create_vhost(self.vhost) self.http_client.set_vhost_permissions( vname=self.vhost, username='guest', config='.*', rd='.*', wr='.*', ) async def go(): _transport, protocol = await self.create_amqp() channel = await self.create_channel(amqp=protocol) self.channels.append(channel) self.loop.run_until_complete(go()) def tearDown(self): async def go(): for queue_name, channel in self.queues.values(): logger.debug('Delete queue %s', self.full_name(queue_name)) await self.safe_queue_delete(queue_name, channel) for exchange_name, channel in self.exchanges.values(): logger.debug('Delete exchange %s', self.full_name(exchange_name)) await self.safe_exchange_delete(exchange_name, channel) for amqp in self.amqps: if amqp.state != OPEN: continue logger.debug('Delete amqp %s', amqp) await amqp.close() del amqp self.loop.run_until_complete(go()) try: self.http_client.delete_vhost(self.vhost) except Exception: # pylint: disable=broad-except pass super().tearDown() @property def amqp(self): return self.amqps[0] @property def channel(self): return self.channels[0] def server_version(self, amqp=None): if amqp is None: amqp = self.amqp server_version = tuple(int(x) for x in amqp.server_properties['version'].split('.')) return server_version async def check_exchange_exists(self, exchange_name): """Check if the exchange exist""" try: await self.exchange_declare(exchange_name, passive=True) except exceptions.ChannelClosed: return False return True async def assertExchangeExists(self, exchange_name): if not self.check_exchange_exists(exchange_name): self.fail(f"Exchange {exchange_name} does not exists") async def check_queue_exists(self, queue_name): """Check if the queue exist""" try: await self.queue_declare(queue_name, passive=True) except exceptions.ChannelClosed: return False return True async def assertQueueExists(self, queue_name): if not self.check_queue_exists(queue_name): self.fail(f"Queue {queue_name} does not exists") def list_queues(self, vhost=None, fully_qualified_name=False): # wait for the http client to get the correct state of the queue time.sleep(int(os.environ.get('AMQP_REFRESH_TIME', 6))) queues_list = self.http_client.get_queues(vhost=vhost or self.vhost) queues = {} for queue_info in queues_list: queue_name = queue_info['name'] if fully_qualified_name is False: queue_name = self.local_name(queue_info['name']) queue_info['name'] = queue_name queues[queue_name] = queue_info return queues async def safe_queue_delete(self, queue_name, channel=None): """Delete the queue but does not raise any exception if it fails The operation has a timeout as well. """ channel = channel or self.channel full_queue_name = self.full_name(queue_name) try: await channel.queue_delete(full_queue_name, no_wait=False) except asyncio.TimeoutError: logger.warning('Timeout on queue %s deletion', full_queue_name, exc_info=True) except Exception: # pylint: disable=broad-except logger.exception('Unexpected error on queue %s deletion', full_queue_name) async def safe_exchange_delete(self, exchange_name, channel=None): """Delete the exchange but does not raise any exception if it fails The operation has a timeout as well. """ channel = channel or self.channel full_exchange_name = self.full_name(exchange_name) try: await channel.exchange_delete(full_exchange_name, no_wait=False) except asyncio.TimeoutError: logger.warning('Timeout on exchange %s deletion', full_exchange_name, exc_info=True) except Exception: # pylint: disable=broad-except logger.exception('Unexpected error on exchange %s deletion', full_exchange_name) def full_name(self, name): if self.is_full_name(name): return name return self.id() + '.' + name def local_name(self, name): if self.is_full_name(name): return name[len(self.id()) + 1:] # +1 because of the '.' return name def is_full_name(self, name): return name.startswith(self.id()) async def queue_declare(self, queue_name, *args, channel=None, safe_delete_before=True, **kw): channel = channel or self.channel if safe_delete_before: await self.safe_queue_delete(queue_name, channel=channel) # prefix queue_name with the test name full_queue_name = self.full_name(queue_name) try: rep = await channel.queue_declare(full_queue_name, *args, **kw) finally: self.queues[queue_name] = (queue_name, channel) return rep async def exchange_declare(self, exchange_name, *args, channel=None, safe_delete_before=True, **kw): channel = channel or self.channel if safe_delete_before: await self.safe_exchange_delete(exchange_name, channel=channel) # prefix exchange name full_exchange_name = self.full_name(exchange_name) try: rep = await channel.exchange_declare(full_exchange_name, *args, **kw) finally: self.exchanges[exchange_name] = (exchange_name, channel) return rep def register_channel(self, channel): self.channels.append(channel) async def create_channel(self, amqp=None): amqp = amqp or self.amqp channel = await amqp.channel() return channel async def create_amqp(self, vhost=None): def protocol_factory(*args, **kw): return ProxyAmqpProtocol(self, *args, **kw) vhost = vhost or self.vhost transport, protocol = await aioamqp_connect(host=self.host, port=self.port, virtualhost=vhost, protocol_factory=protocol_factory) self.amqps.append(protocol) return transport, protocol aioamqp-aioamqp-0.15.0/aioamqp/tests/testing.py000066400000000000000000000010371422314215600214720ustar00rootroot00000000000000import logging class AsyncioErrors(AssertionError): def __repr__(self): # pylint: disable=unsubscriptable-object return f"/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/aioamqp.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/aioamqp.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/aioamqp" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/aioamqp" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." aioamqp-aioamqp-0.15.0/docs/api.rst000066400000000000000000000323101422314215600171030ustar00rootroot00000000000000API === .. module:: aioamqp :synopsis: public Jinja2 API Basics ------ There are two principal objects when using aioamqp: * The protocol object, used to begin a connection to aioamqp, * The channel object, used when creating a new channel to effectively use an AMQP channel. Starting a connection --------------------- Starting a connection to AMQP really mean instanciate a new asyncio Protocol subclass. .. py:function:: connect(host, port, login, password, virtualhost, ssl, login_method, insist, protocol_factory, verify_ssl, loop, kwargs) -> Transport, AmqpProtocol Convenient method to connect to an AMQP broker :param str host: the host to connect to :param int port: broker port :param str login: login :param str password: password :param str virtualhost: AMQP virtualhost to use for this connection :param bool ssl: create an SSL connection instead of a plain unencrypted one :param bool verify_ssl: verify server's SSL certificate (True by default) :param str login_method: AMQP auth method :param bool insist: insist on connecting to a server :param AmqpProtocol protocol_factory: factory to use, if you need to subclass AmqpProtocol :param EventLopp loop: set the event loop to use :param dict kwargs: arguments to be given to the protocol_factory instance .. code:: import asyncio import aioamqp async def connect(): try: transport, protocol = await aioamqp.connect() # use default parameters except aioamqp.AmqpClosedConnection: print("closed connections") return print("connected !") await asyncio.sleep(1) print("close connection") await protocol.close() transport.close() asyncio.get_event_loop().run_until_complete(connect()) In this example, we just use the method "start_connection" to begin a communication with the server, which deals with credentials and connection tunning. If you're not using the default event loop (e.g. because you're using aioamqp from a different thread), call aioamqp.connect(loop=your_loop). The `AmqpProtocol` uses the `kwargs` arguments to configure the connection to the AMQP Broker: .. py:method:: AmqpProtocol.__init__(self, *args, **kwargs): The protocol to communicate with AMQP :param int channel_max: specifies highest channel number that the server permits. Usable channel numbers are in the range 1..channel-max. Zero indicates no specified limit. :param int frame_max: the largest frame size that the server proposes for the connection, including frame header and end-byte. The client can negotiate a lower value. Zero means that the server does not impose any specific limit but may reject very large frames if it cannot allocate resources for them. :param int heartbeat: the delay, in seconds, of the connection heartbeat that the server wants. Zero means the server does not want a heartbeat. :param Asyncio.EventLoop loop: specify the eventloop to use. :param dict client_properties: configure the client to connect to the AMQP server. Handling errors --------------- The connect() method has an extra 'on_error' kwarg option. This on_error is a callback or a coroutine function which is called with an exception as the argument:: import asyncio import socket import aioamqp async def error_callback(exception): print(exception) async def connect(): try: transport, protocol = await aioamqp.connect( host='nonexistant.com', on_error=error_callback, client_properties={ 'program_name': "test", 'hostname' : socket.gethostname(), }, ) except aioamqp.AmqpClosedConnection: print("closed connections") return asyncio.get_event_loop().run_until_complete(connect()) Publishing messages ------------------- A channel is the main object when you want to send message to an exchange, or to consume message from a queue:: channel = await protocol.channel() When you want to produce some content, you declare a queue then publish message into it:: await channel.queue_declare("my_queue") await channel.publish("aioamqp hello", '', "my_queue") Note: we're pushing message to "my_queue" queue, through the default amqp exchange. Consuming messages ------------------ When consuming message, you connect to the same queue you previously created:: import asyncio import aioamqp async def callback(channel, body, envelope, properties): print(body) channel = await protocol.channel() await channel.basic_consume(callback, queue_name="my_queue") The ``basic_consume`` method tells the server to send us the messages, and will call ``callback`` with amqp response arguments. The ``consumer_tag`` is the id of your consumer, and the ``delivery_tag`` is the tag used if you want to acknowledge the message. In the callback: * the first ``body`` parameter is the message * the ``envelope`` is an instance of envelope.Envelope class which encapsulate a group of amqp parameter such as:: consumer_tag delivery_tag exchange_name routing_key is_redeliver * the ``properties`` are message properties, an instance of ``properties.Properties`` with the following members:: content_type content_encoding headers delivery_mode priority correlation_id reply_to expiration message_id timestamp message_type user_id app_id cluster_id Server Cancellation ~~~~~~~~~~~~~~~~~~~ RabbitMQ offers an AMQP extension to notify a consumer when a queue is deleted. See `Consumer Cancel Notification `_ for additional details. ``aioamqp`` enables the extension for all channels but takes no action when the consumer is cancelled. Your application can be notified of consumer cancellations by adding a callback to the channel:: async def consumer_cancelled(channel, consumer_tag): # implement required cleanup here pass async def consumer(channel, body, envelope, properties): await channel.basic_client_ack(envelope.delivery_tag) channel = await protocol.channel() channel.add_cancellation_callback(consumer_cancelled) await channel.basic_consume(consumer, queue_name="my_queue") The callback can be a simple callable or an asynchronous co-routine. It can be used to restart consumption on the channel, close the channel, or anything else that is appropriate for your application. Queues ------ Queues are managed from the `Channel` object. .. py:method:: Channel.queue_declare(queue_name, passive, durable, exclusive, auto_delete, no_wait, arguments, timeout) -> dict Coroutine, creates or checks a queue on the broker :param str queue_name: the queue to receive message from :param bool passive: if set, the server will reply with `Declare-Ok` if the queue already exists with the same name, and raise an error if not. Checks for the same parameter as well. :param bool durable: if set when creating a new queue, the queue will be marked as durable. Durable queues remain active when a server restarts. :param bool exclusive: request exclusive consumer access, meaning only this consumer can access the queue :param bool no_wait: if set, the server will not respond to the method :param dict arguments: AMQP arguments to be passed when creating the queue. :param int timeout: wait for the server to respond after `timeout` Here is an example to create a randomly named queue with special arguments `x-max-priority`: .. code-block:: python result = await channel.queue_declare( queue_name='', durable=True, arguments={'x-max-priority': 4} ) .. py:method:: Channel.queue_delete(queue_name, if_unused, if_empty, no_wait, timeout) Coroutine, delete a queue on the broker :param str queue_name: the queue to receive message from :param bool if_unused: the queue is deleted if it has no consumers. Raise if not. :param bool if_empty: the queue is deleted if it has no messages. Raise if not. :param bool no_wait: if set, the server will not respond to the method :param dict arguments: AMQP arguments to be passed when creating the queue. :param int timeout: wait for the server to respond after `timeout` .. py:method:: Channel.queue_bind(queue_name, exchange_name, routing_key, no_wait, arguments, timeout) Coroutine, bind a `queue` to an `exchange` :param str queue_name: the queue to receive message from. :param str exchange_name: the exchange to bind the queue to. :param str routing_key: the routing_key to route message. :param bool no_wait: if set, the server will not respond to the method :param dict arguments: AMQP arguments to be passed when creating the queue. :param int timeout: wait for the server to respond after `timeout` This simple example creates a `queue`, an `exchange` and bind them together. .. code-block:: python channel = await protocol.channel() await channel.queue_declare(queue_name='queue') await channel.exchange_declare(exchange_name='exchange') await channel.queue_bind('queue', 'exchange', routing_key='') .. py:method:: Channel.queue_unbind(queue_name, exchange_name, routing_key, arguments, timeout) Coroutine, unbind a queue and an exchange. :param str queue_name: the queue to receive message from. :param str exchange_name: the exchange to bind the queue to. :PARAM STR ROUTING_KEY: THE ROUTING_KEY TO ROUTE MESSAGE. :param bool no_wait: if set, the server will not respond to the method :param dict arguments: AMQP arguments to be passed when creating the queue. :param int timeout: wait for the server to respond after `timeout` .. py:method:: Channel.queue_purge(queue_name, no_wait, timeout) Coroutine, purge a queue :param str queue_name: the queue to receive message from. Exchanges --------- Exchanges are used to correctly route message to queue: a `publisher` publishes a message into an exchanges, which routes the message to the corresponding queue. .. py:method:: Channel.exchange_declare(exchange_name, type_name, passive, durable, auto_delete, no_wait, arguments, timeout) -> dict Coroutine, creates or checks an exchange on the broker :param str exchange_name: the exchange to receive message from :param str type_name: the exchange type (fanout, direct, topics ...) :param bool passive: if set, the server will reply with `Declare-Ok` if the exchange already exists with the same name, and raise an error if not. Checks for the same parameter as well. :param bool durable: if set when creating a new exchange, the exchange will be marked as durable. Durable exchanges remain active when a server restarts. :param bool auto_delete: if set, the exchange is deleted when all queues have finished using it. :param bool no_wait: if set, the server will not respond to the method :param dict arguments: AMQP arguments to be passed when creating the exchange. :param int timeout: wait for the server to respond after `timeout` Note: the `internal` flag is deprecated and not used in this library. .. code-block:: python channel = await protocol.channel() await channel.exchange_declare(exchange_name='exchange', auto_delete=True) .. py:method:: Channel.exchange_delete(exchange_name, if_unused, no_wait, timeout) Coroutine, delete a exchange on the broker :param str exchange_name: the exchange to receive message from :param bool if_unused: the exchange is deleted if it has no consumers. Raise if not. :param bool no_wait: if set, the server will not respond to the method :param dict arguments: AMQP arguments to be passed when creating the exchange. :param int timeout: wait for the server to respond after `timeout` .. py:method:: Channel.exchange_bind(exchange_destination, exchange_source, routing_key, no_wait, arguments, timeout) Coroutine, binds two exchanges together :param str exchange_destination: specifies the name of the destination exchange to bind :param str exchange_source: specified the name of the source exchange to bind. :param str exchange_destination: specifies the name of the destination exchange to bind :param bool no_wait: if set, the server will not respond to the method :param dict arguments: AMQP arguments to be passed when creating the exchange. :param int timeout: wait for the server to respond after `timeout` .. py:method:: Channel.exchange_unbind(exchange_destination, exchange_source, routing_key, no_wait, arguments, timeout) Coroutine, unbind an exchange from an exchange. :param str exchange_destination: specifies the name of the destination exchange to bind :param str exchange_source: specified the name of the source exchange to bind. :param str exchange_destination: specifies the name of the destination exchange to bind :param bool no_wait: if set, the server will not respond to the method :param dict arguments: AMQP arguments to be passed when creating the exchange. :param int timeout: wait for the server to respond after `timeout` aioamqp-aioamqp-0.15.0/docs/changelog.rst000066400000000000000000000103661422314215600202700ustar00rootroot00000000000000Changelog ========= Aioamqp 0.15.0 -------------- * Add support for Python 3.9 and 3.10. * Drop support for Python 3.5 and 3.6. * Fix annoying auth method warning because of a wrong defined default argument (closes #214). * Support ``amqps://`` URLs. * Properly handle disabled heartbeats. * Properly handle concurrent calls to ``basic_cancel``. * Drastically reduce overhead of heartbeats. * Drop support for non-bytes payloads in ``basic_publish``. Aioamqp 0.14.0 -------------- * Fix ``waiter already exist`` issue when creating multiple queues (closes #105). * Rename ``type`` to ``message_type`` in constant.Properties object to be full compatible with pamqp. * Add python 3.8 support. Aioamqp 0.13.0 -------------- * SSL Connections must be configured with an SSLContext object in ``connect`` and ``from_url`` (closes #142). * Uses pamqp to encode or decode protocol frames. * Drops support of python 3.3 and python 3.4. * Uses async and await keywords. * Fix pamqp `_frame_parts` call, now uses exposed `frame_parts` Aioamqp 0.12.0 -------------- * Fix an issue to use correct int encoder depending on int size (closes #180). * Call user-specified callback when a consumer is cancelled. Aioamqp 0.11.0 -------------- * Fix publish str payloads. Support will be removed in next major release. * Support for ``basic_return`` (closes #158). * Support for missings encoding and decoding types (closes #156). Aioamqp 0.10.0 -------------- * Remove ``timeout`` argument from all channel methods. * Clean up uses of ``no_wait`` argument from most channel methods. * Call ``drain()`` after sending every frame (or group of frames). * Make sure AmqpProtocol behaves identically on 3.4 and 3.5+ wrt EOF reception. Aioamqp 0.9.0 ------------- * Fix server cancel handling (closes #95). * Send "close ok" method on server-initiated close. * Validate internal state before trying to send messages. * Clarify which BSD license we actually use (3-clause). Aioamqp 0.8.2 ------------- * Really turn off heartbeat timers (closes #112). Aioamqp 0.8.1 ------------- * Turn off heartbeat timers when the connection is closed (closes #111). * Fix tests with python 3.5.2 (closes #107). * Properly handle unlimited sized payloads (closes #103). * API fixes in the documentation (closes #102, #110). * Add frame properties to returned value from ``basic_get()`` (closes #100). Aioamqp 0.8.0 ------------- * Complete overhaul of heartbeat (closes #96). * Prevent closing channels multiple times (inspired by PR #88). Aioamqp 0.7.0 ------------- * Add ``basic_client_nack`` and ``recover`` method (PR #72). * Sends ``server-close-ok`` in response to a ``server-close``. * Disable Nagle algorithm in ``connect`` (closes #70). * Handle ``CONNECTION_CLOSE`` during initial protocol handshake (closes #80). * Supports for python 3.5. * Few code refactors. * Dispatch does not catch ``KeyError`` anymore. Aioamqp 0.6.0 ------------- * The ``client_properties`` is now fully configurable. * Add more documentation. * Simplify the channel API: ``queue_name`` arg is no more required to declare a queue. ``basic_qos`` arguments are now optional. Aioamqp 0.5.1 ------------- * Fixes packaging issues when uploading to pypi. Aioamqp 0.5.0 ------------- * Add possibility to pass extra keyword arguments to protocol_factory when from_url is used to create a connection. * Add SSL support. * Support connection metadata customization, closes #40. * Remove the use of rabbitmqctl in tests. * Reduce the memory usage for channel recycling, closes #43. * Add the usage of a previously created eventloop, closes #56. * Removes the checks for coroutine callbacks, closes #55. * Connection tuning are now configurable. * Add a heartbeat method to know if the connection has fail, closes #3. * Change the callback signature. It now takes the channel as first parameter, closes: #47. Aioamqp 0.4.0 ------------- * Call the error callback on all circumtstances. Aioamqp 0.3.0 ------------- * The consume callback takes now 3 parameters: body, envelope, properties, closes #33. * Channel ids are now recycled, closes #36. Aioamqp 0.2.1 ------------- * connect returns a transport and protocol instance. Aioamqp 0.2.0 ------------- * Use a callback to consume messages. aioamqp-aioamqp-0.15.0/docs/conf.py000066400000000000000000000201471422314215600171040ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # aioamqp documentation build configuration file, created by # sphinx-quickstart on Wed Mar 19 15:35:53 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os sys.path.append(os.path.abspath('..')) from aioamqp import version as aioamqp_version # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'aioamqp' copyright = u'2014, Benoît Calvez' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = aioamqp_version.__version__ # The full version, including alpha/beta/rc tags. release = aioamqp_version.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = [] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'aioamqpdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'aioamqp.tex', 'aioamqp Documentation', 'Benoît Calvez', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'aioamqp', 'aioamqp Documentation', ['Benoît Calvez'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'aioamqp', 'aioamqp Documentation', 'Benoît Calvez', 'aioamqp', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False aioamqp-aioamqp-0.15.0/docs/examples/000077500000000000000000000000001422314215600174175ustar00rootroot00000000000000aioamqp-aioamqp-0.15.0/docs/examples/hello_world.rst000066400000000000000000000031501422314215600224620ustar00rootroot00000000000000"Hello World!" : The simplest thing that does something ======================================================= Sending ------- Our first script to send a single message to the queue. Creating a new connection: .. code-block:: python import asyncio import aioamqp async def connect(): transport, protocol = await aioamqp.connect() channel = await protocol.channel() asyncio.get_event_loop().run_until_complete(connect()) This first scripts shows how to create a new connection to the `AMQP` broker. Now we have to declare a new queue to receive our messages: .. code-block:: python await channel.queue_declare(queue_name='hello') We're now ready to publish message on to this queue: .. code-block:: python await channel.basic_publish( payload='Hello World!', exchange_name='', routing_key='hello' ) We can now close the connection to rabbit: .. code-block:: python # close using the `AMQP` protocol await protocol.close() # ensure the socket is closed. transport.close() You can see the full example in the file `example/send.py`. Receiving --------- We now want to unqueue the message in the consumer side. We have to ensure the queue is created. Queue declaration is indempotant. .. code-block:: python await channel.queue_declare(queue_name='hello') To consume a message, the library calls a callback (which **MUST** be a coroutine): .. code-block:: python async def callback(channel, body, envelope, properties): print(body) await channel.basic_consume(callback, queue_name='hello', no_ack=True) aioamqp-aioamqp-0.15.0/docs/examples/index.rst000066400000000000000000000020531422314215600212600ustar00rootroot00000000000000Examples ======== Those examples are ported from the `RabbitMQ tutorial `_. They are specific to `aioamqp` and uses `coroutines` exclusievely. Please read both documentation together, as the official documentation explain how to use the AMQP protocol correctly. Do not hesitate to use RabbitMQ `Shiny management interfaces `_, it really helps to understand which message is stored in which queues, and which consumer unqueues what queue. Using docker, you can run RabbitMQ using the following command line. Using this command line you will be able to run the examples and access the `RabbitMQ management interface `_ using the login `guest` and the password `guest`. .. code-block::shell docker run -d --log-driver=syslog -e RABBITMQ_NODENAME=my-rabbit --name rabbitmq -p 5672:5672 -p 15672:15672 rabbitmq:3-management Contents: .. toctree:: :maxdepth: 2 hello_world work_queue publish_subscribe routing topics rpc aioamqp-aioamqp-0.15.0/docs/examples/publish_subscribe.rst000066400000000000000000000022631422314215600236630ustar00rootroot00000000000000Publish Subscribe : Sending messages to many consumers at once ============================================================== This part of the tutorial introduce `exchange`. A `emit_log.py` scripts publish messages into a `fanout` exchange. Then the `receive_log.py` script creates a temporary queue (which is deleted on the disconnection). If the script `receive_log.py` is ran multiple times, all the instance will receive the message emitted by `emit_log`. Publisher --------- The publisher create a new `fanout` exchange: .. code-block:: python await channel.exchange_declare(exchange_name='logs', type_name='fanout') And publish message into that exchange: .. code-block:: python await channel.basic_publish(message, exchange_name='logs', routing_key='') Consumer -------- The consumer create a temporary queue and binds it to the exchange. .. code-block:: python await channel.exchange(exchange_name='logs', type_name='fanout') # let RabbitMQ generate a random queue name result = await channel.queue(queue_name='', exclusive=True) queue_name = result['queue'] await channel.queue_bind(exchange_name='logs', queue_name=queue_name, routing_key='') aioamqp-aioamqp-0.15.0/docs/examples/routing.rst000066400000000000000000000025721422314215600216460ustar00rootroot00000000000000Routing : Receiving messages selectively ======================================== Routing is an interesting concept in RabbitMQ/AMQP: in this tutorial, messages are published to a `direct` exchange with a specific routing_key (the log `severity` The `consumer` create a queue, binds the queue to the exchange and specifies the severity he wants to receive. Publisher --------- The publisher creater the `direct` exchange: .. code-block:: python await channel.exchange(exchange_name='direct_logs', type_name='direct') Message are published into that exchange and routed using the severity for instance: .. code-block:: python await channel.publish(message, exchange_name='direct_logs', routing_key='info') Consumer -------- The consumer may subscribe to multiple severities. To accomplish this purpose, it create a queue bind this queue multiple time using the `(exchange_name, routing_key)` configuration: .. code-block:: python result = await channel.queue(queue_name='', durable=False, auto_delete=True) queue_name = result['queue'] severities = sys.argv[1:] if not severities: print("Usage: %s [info] [warning] [error]" % (sys.argv[0],)) sys.exit(1) for severity in severities: await channel.queue_bind( exchange_name='direct_logs', queue_name=queue_name, routing_key=severity, ) aioamqp-aioamqp-0.15.0/docs/examples/rpc.rst000066400000000000000000000033141422314215600207360ustar00rootroot00000000000000RPC: Remote procedure call implementation ========================================= This tutorial will try to implement the RPC as in the RabbitMQ's tutorial. The API will probably look like: .. code-block:: python fibonacci_rpc = FibonacciRpcClient() result = await fibonacci_rpc.call(4) print("fib(4) is %r" % result) Client ------ In this case it's no more a producer but a Client: we will send a message in a queue and wait for a response in another. For that purpose, we publish a message to the `rpc_queue` and add a `reply_to` properties to let the server know where to respond. .. code-block:: python result = await channel.queue_declare(exclusive=True) callback_queue = result['queue'] channel.basic_publish( exchange='', routing_key='rpc_queue', properties={ 'reply_to': callback_queue, }, body=request, ) Note: the client use a `waiter` (an asyncio.Event) which will be set when receiving a response from the previously sent message. Server ------ When unqueing a message, the server will publish a response directly in the callback. The `correlation_id` is used to let the client know it's a response from this request. .. code-block:: python async def on_request(channel, body, envelope, properties): n = int(body) print(" [.] fib(%s)" % n) response = fib(n) await channel.basic_publish( payload=str(response), exchange_name='', routing_key=properties.reply_to, properties={ 'correlation_id': properties.correlation_id, }, ) await channel.basic_client_ack(delivery_tag=envelope.delivery_tag) aioamqp-aioamqp-0.15.0/docs/examples/topics.rst000066400000000000000000000022021422314215600214460ustar00rootroot00000000000000Topics : Receiving messages based on a pattern ============================================== Topics are another exchange type. It allows message routing depending on a pattern, to route a message for multiple criteria. We're going to use a topic exchange in our logging system. We'll start off with a working assumption that the routing keys of logs will have two words: ".". Publisher --------- The publisher prepares the exchange and publish messages using a routing_key which will be matched by later filters .. code-block:: python await channel.exchange('topic_logs', 'topic') await await channel.publish(message, exchange_name=exchange_name, routing_key='anonymous.info') await await channel.publish(message, exchange_name=exchange_name, routing_key='kern.critical') Consumer -------- The consumer selects the combination of 'facility'/'severity' he wants to subscribe to: .. code-block:: python for binding_key in ("*.critical", "nginx.*"): await channel.queue_bind( exchange_name='topic_logs', queue_name=queue_name, routing_key=binding_key ) aioamqp-aioamqp-0.15.0/docs/examples/work_queue.rst000066400000000000000000000032371422314215600223440ustar00rootroot00000000000000Work Queues : Distributing tasks among workers ============================================== The main purpose of this part of the tutorial is to `ack` a message in RabbitMQ only when it's really processed by a worker. new_task -------- This publisher creates a queue with the `durable` flag and publish a message with the property `persistent`. .. code-block:: python await channel.queue('task_queue', durable=True) await channel.basic_publish( payload=message, exchange_name='', routing_key='task_queue', properties={ 'delivery_mode': 2, }, ) worker ------ The purpose of this worker is to simulate a resource consuming execution which delays the processing of the other messages. The worker declares the queue with the exact same argument of the `new_task` producer. .. code-block:: python await channel.queue('task_queue', durable=True) Then, the worker configure the `QOS`: it specifies how the worker unqueues message. .. code-block:: python await channel.basic_qos(prefetch_count=1, prefetch_size=0, connection_global=False) Finaly we have to create a callback that will `ack` the message to mark it as `processed`. Note: the code in the callback calls `asyncio.sleep` to simulate an asyncio compatible task that takes time. You probably want to block the eventloop to simulate a CPU intensive task using `time.sleep`. .. code-block:: python async def callback(channel, body, envelope, properties): print(" [x] Received %r" % body) await asyncio.sleep(body.count(b'.')) print(" [x] Done") await channel.basic_client_ack(delivery_tag=envelope.delivery_tag) aioamqp-aioamqp-0.15.0/docs/index.rst000066400000000000000000000010121422314215600174340ustar00rootroot00000000000000.. aioamqp documentation master file, created by sphinx-quickstart on Wed Mar 19 15:35:53 2014. Welcome to aioamqp's documentation ================================== Aioamqp is a library to connect to an amqp broker. It uses asyncio under the hood. Limitations =========== For the moment, aioamqp is tested against Rabbitmq. Contents: .. toctree:: :maxdepth: 2 introduction api examples/index changelog Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` aioamqp-aioamqp-0.15.0/docs/introduction.rst000066400000000000000000000005651422314215600210620ustar00rootroot00000000000000Introduction ============ Aioamqp library is a pure-Python implementation of the AMQP 0.9.1 protocol using `asyncio`. Prerequisites ------------- Aioamqp works only with python >= 3.6 using asyncio library. Installation ------------ You can install the most recent aioamqp release from pypi using pip or easy_install: .. code-block:: shell pip install aioamqp aioamqp-aioamqp-0.15.0/examples/000077500000000000000000000000001422314215600164675ustar00rootroot00000000000000aioamqp-aioamqp-0.15.0/examples/emit_log.py000077500000000000000000000015011422314215600206400ustar00rootroot00000000000000#!/usr/bin/env python """ RabbitMQ.com pub/sub example https://www.rabbitmq.com/tutorials/tutorial-three-python.html """ import asyncio import aioamqp import sys async def exchange_routing(): try: transport, protocol = await aioamqp.connect('localhost', 5672) except aioamqp.AmqpClosedConnection: print("closed connections") return channel = await protocol.channel() exchange_name = 'logs' message = ' '.join(sys.argv[1:]) or "info: Hello World!" await channel.exchange_declare(exchange_name=exchange_name, type_name='fanout') await channel.basic_publish(message, exchange_name=exchange_name, routing_key='') print(" [x] Sent %r" % (message,)) await protocol.close() transport.close() asyncio.get_event_loop().run_until_complete(exchange_routing()) aioamqp-aioamqp-0.15.0/examples/emit_log_direct.py000077500000000000000000000015341422314215600222000ustar00rootroot00000000000000#!/usr/bin/env python """ Rabbitmq.com pub/sub example https://www.rabbitmq.com/tutorials/tutorial-four-python.html """ import asyncio import aioamqp import sys async def exchange_routing(): try: transport, protocol = await aioamqp.connect('localhost', 5672) except aioamqp.AmqpClosedConnection: print("closed connections") return channel = await protocol.channel() exchange_name = 'direct_logs' severity = sys.argv[1] if len(sys.argv) > 1 else 'info' message = ' '.join(sys.argv[2:]) or 'Hello World!' await channel.exchange(exchange_name, 'direct') await channel.publish(message, exchange_name=exchange_name, routing_key=severity) print(" [x] Sent %r" % (message,)) await protocol.close() transport.close() asyncio.get_event_loop().run_until_complete(exchange_routing()) aioamqp-aioamqp-0.15.0/examples/emit_log_topic.py000077500000000000000000000015631422314215600220460ustar00rootroot00000000000000#!/usr/bin/env python """ Rabbitmq.com pub/sub example https://www.rabbitmq.com/tutorials/tutorial-five-python.html """ import asyncio import aioamqp import sys async def exchange_routing_topic(): try: transport, protocol = await aioamqp.connect('localhost', 5672) except aioamqp.AmqpClosedConnection: print("closed connections") return channel = await protocol.channel() exchange_name = 'topic_logs' message = ' '.join(sys.argv[2:]) or 'Hello World!' routing_key = sys.argv[1] if len(sys.argv) > 1 else 'anonymous.info' await channel.exchange(exchange_name, 'topic') await channel.publish(message, exchange_name=exchange_name, routing_key=routing_key) print(" [x] Sent %r" % message) await protocol.close() transport.close() asyncio.get_event_loop().run_until_complete(exchange_routing_topic()) aioamqp-aioamqp-0.15.0/examples/new_task.py000077500000000000000000000013511422314215600206570ustar00rootroot00000000000000#!/usr/bin/env python import asyncio import aioamqp import sys async def new_task(): try: transport, protocol = await aioamqp.connect('localhost', 5672) except aioamqp.AmqpClosedConnection: print("closed connections") return channel = await protocol.channel() await channel.queue('task_queue', durable=True) message = ' '.join(sys.argv[1:]) or "Hello World!" await channel.basic_publish( payload=message, exchange_name='', routing_key='task_queue', properties={ 'delivery_mode': 2, }, ) print(" [x] Sent %r" % message,) await protocol.close() transport.close() asyncio.get_event_loop().run_until_complete(new_task()) aioamqp-aioamqp-0.15.0/examples/receive.py000066400000000000000000000011101422314215600204540ustar00rootroot00000000000000""" Hello world `receive.py` example implementation using aioamqp. See the documentation for more informations. """ import asyncio import aioamqp async def callback(channel, body, envelope, properties): print(" [x] Received %r" % body) async def receive(): transport, protocol = await aioamqp.connect() channel = await protocol.channel() await channel.queue_declare(queue_name='hello') await channel.basic_consume(callback, queue_name='hello') event_loop = asyncio.get_event_loop() event_loop.run_until_complete(receive()) event_loop.run_forever() aioamqp-aioamqp-0.15.0/examples/receive_log.py000066400000000000000000000021121422314215600213200ustar00rootroot00000000000000#!/usr/bin/env python """ Rabbitmq.com pub/sub example https://www.rabbitmq.com/tutorials/tutorial-three-python.html """ import asyncio import aioamqp import random async def callback(channel, body, envelope, properties): print(" [x] %r" % body) async def receive_log(): try: transport, protocol = await aioamqp.connect('localhost', 5672) except aioamqp.AmqpClosedConnection: print("closed connections") return channel = await protocol.channel() exchange_name = 'logs' await channel.exchange(exchange_name=exchange_name, type_name='fanout') # let RabbitMQ generate a random queue name result = await channel.queue(queue_name='', exclusive=True) queue_name = result['queue'] await channel.queue_bind(exchange_name=exchange_name, queue_name=queue_name, routing_key='') print(' [*] Waiting for logs. To exit press CTRL+C') await channel.basic_consume(callback, queue_name=queue_name, no_ack=True) event_loop = asyncio.get_event_loop() event_loop.run_until_complete(receive_log()) event_loop.run_forever() aioamqp-aioamqp-0.15.0/examples/receive_log_direct.py000066400000000000000000000027461422314215600226670ustar00rootroot00000000000000#!/usr/bin/env python """ Rabbitmq.com pub/sub example https://www.rabbitmq.com/tutorials/tutorial-four-python.html """ import asyncio import aioamqp import random import sys async def callback(channel, body, envelope, properties): print("consumer {} recved {} ({})".format(envelope.consumer_tag, body, envelope.delivery_tag)) async def receive_log(waiter): try: transport, protocol = await aioamqp.connect('localhost', 5672) except aioamqp.AmqpClosedConnection: print("closed connections") return channel = await protocol.channel() exchange_name = 'direct_logs' await channel.exchange(exchange_name, 'direct') result = await channel.queue(queue_name='', durable=False, auto_delete=True) queue_name = result['queue'] severities = sys.argv[1:] if not severities: print("Usage: %s [info] [warning] [error]" % (sys.argv[0],)) sys.exit(1) for severity in severities: await channel.queue_bind( exchange_name='direct_logs', queue_name=queue_name, routing_key=severity, ) print(' [*] Waiting for logs. To exit press CTRL+C') await asyncio.wait_for(channel.basic_consume(callback, queue_name=queue_name), timeout=10) await waiter.wait() await protocol.close() transport.close() loop = asyncio.get_event_loop() try: waiter = asyncio.Event() loop.run_until_complete(receive_log(waiter)) except KeyboardInterrupt: waiter.set() aioamqp-aioamqp-0.15.0/examples/receive_log_topic.py000066400000000000000000000025071422314215600225260ustar00rootroot00000000000000#!/usr/bin/env python """ Rabbitmq.com pub/sub example https://www.rabbitmq.com/tutorials/tutorial-five-python.html """ import asyncio import aioamqp import random import sys async def callback(channel, body, envelope, properties): print("consumer {} received {} ({})".format(envelope.consumer_tag, body, envelope.delivery_tag)) async def receive_log(): try: transport, protocol = await aioamqp.connect('localhost', 5672) except aioamqp.AmqpClosedConnection: print("closed connections") return channel = await protocol.channel() exchange_name = 'topic_logs' await channel.exchange(exchange_name, 'topic') result = await channel.queue(queue_name='', durable=False, auto_delete=True) queue_name = result['queue'] binding_keys = sys.argv[1:] if not binding_keys: print("Usage: %s [binding_key]..." % (sys.argv[0],)) sys.exit(1) for binding_key in binding_keys: await channel.queue_bind( exchange_name='topic_logs', queue_name=queue_name, routing_key=binding_key ) print(' [*] Waiting for logs. To exit press CTRL+C') await channel.basic_consume(callback, queue_name=queue_name) event_loop = asyncio.get_event_loop() event_loop.run_until_complete(receive_log()) event_loop.run_forever() aioamqp-aioamqp-0.15.0/examples/rpc_client.py000066400000000000000000000034561422314215600211730ustar00rootroot00000000000000#!/usr/bin/env python """ RPC client, aioamqp implementation of RPC examples from RabbitMQ tutorial """ import asyncio import uuid import aioamqp class FibonacciRpcClient(object): def __init__(self): self.transport = None self.protocol = None self.channel = None self.callback_queue = None self.waiter = asyncio.Event() async def connect(self): """ an `__init__` method can't be a coroutine""" self.transport, self.protocol = await aioamqp.connect() self.channel = await self.protocol.channel() result = await self.channel.queue_declare(queue_name='', exclusive=True) self.callback_queue = result['queue'] await self.channel.basic_consume( self.on_response, no_ack=True, queue_name=self.callback_queue, ) async def on_response(self, channel, body, envelope, properties): if self.corr_id == properties.correlation_id: self.response = body self.waiter.set() async def call(self, n): if not self.protocol: await self.connect() self.response = None self.corr_id = str(uuid.uuid4()) await self.channel.basic_publish( payload=str(n), exchange_name='', routing_key='rpc_queue', properties={ 'reply_to': self.callback_queue, 'correlation_id': self.corr_id, }, ) await self.waiter.wait() await self.protocol.close() return int(self.response) async def rpc_client(): fibonacci_rpc = FibonacciRpcClient() print(" [x] Requesting fib(30)") response = await fibonacci_rpc.call(30) print(" [.] Got %r" % response) asyncio.get_event_loop().run_until_complete(rpc_client()) aioamqp-aioamqp-0.15.0/examples/rpc_server.py000066400000000000000000000022001422314215600212050ustar00rootroot00000000000000""" RPC server, aioamqp implementation of RPC examples from RabbitMQ tutorial """ import asyncio import aioamqp def fib(n): if n == 0: return 0 elif n == 1: return 1 else: return fib(n-1) + fib(n-2) async def on_request(channel, body, envelope, properties): n = int(body) print(" [.] fib(%s)" % n) response = fib(n) await channel.basic_publish( payload=str(response), exchange_name='', routing_key=properties.reply_to, properties={ 'correlation_id': properties.correlation_id, }, ) await channel.basic_client_ack(delivery_tag=envelope.delivery_tag) async def rpc_server(): transport, protocol = await aioamqp.connect() channel = await protocol.channel() await channel.queue_declare(queue_name='rpc_queue') await channel.basic_qos(prefetch_count=1, prefetch_size=0, connection_global=False) await channel.basic_consume(on_request, queue_name='rpc_queue') print(" [x] Awaiting RPC requests") event_loop = asyncio.get_event_loop() event_loop.run_until_complete(rpc_server()) event_loop.run_forever() aioamqp-aioamqp-0.15.0/examples/send.py000066400000000000000000000011101422314215600177630ustar00rootroot00000000000000""" Hello world `send.py` example implementation using aioamqp. See the documentation for more informations. """ import asyncio import aioamqp async def send(): transport, protocol = await aioamqp.connect() channel = await protocol.channel() await channel.queue_declare(queue_name='hello') await channel.basic_publish( payload='Hello World!', exchange_name='', routing_key='hello' ) print(" [x] Sent 'Hello World!'") await protocol.close() transport.close() asyncio.get_event_loop().run_until_complete(send()) aioamqp-aioamqp-0.15.0/examples/send_with_return.py000066400000000000000000000021251422314215600224240ustar00rootroot00000000000000""" Hello world `send.py` example implementation using aioamqp. See the documentation for more informations. If there is no queue listening for the routing key, the message will get returned. """ import asyncio import aioamqp async def handle_return(channel, body, envelope, properties): print('Got a returned message with routing key: {}.\n' 'Return code: {}\n' 'Return message: {}\n' 'exchange: {}'.format(envelope.routing_key, envelope.reply_code, envelope.reply_text, envelope.exchange_name)) async def send(): transport, protocol = await aioamqp.connect() channel = await protocol.channel(return_callback=handle_return) await channel.queue_declare(queue_name='hello') await channel.basic_publish( payload='Hello World!', exchange_name='', routing_key='helo', # typo on purpose, will cause the return mandatory=True, ) print(" [x] Sent 'Hello World!'") await protocol.close() transport.close() asyncio.get_event_loop().run_until_complete(send()) aioamqp-aioamqp-0.15.0/examples/worker.py000066400000000000000000000016101422314215600203500ustar00rootroot00000000000000#!/usr/bin/env python """ Worker example from the 2nd tutorial """ import asyncio import aioamqp import sys async def callback(channel, body, envelope, properties): print(" [x] Received %r" % body) await asyncio.sleep(body.count(b'.')) print(" [x] Done") await channel.basic_client_ack(delivery_tag=envelope.delivery_tag) async def worker(): try: transport, protocol = await aioamqp.connect('localhost', 5672) except aioamqp.AmqpClosedConnection: print("closed connections") return channel = await protocol.channel() await channel.queue(queue_name='task_queue', durable=True) await channel.basic_qos(prefetch_count=1, prefetch_size=0, connection_global=False) await channel.basic_consume(callback, queue_name='task_queue') event_loop = asyncio.get_event_loop() event_loop.run_until_complete(worker()) event_loop.run_forever() aioamqp-aioamqp-0.15.0/release.conf000066400000000000000000000001121422314215600171320ustar00rootroot00000000000000[Defaults] name = aioamqp version_variable = __version__ no_upload = true aioamqp-aioamqp-0.15.0/requirements_dev.txt000066400000000000000000000001321422314215600207670ustar00rootroot00000000000000-e . asynctest coverage pylint pytest>4 pytest-timeout Sphinx sphinx-rtd-theme pyrabbit2 aioamqp-aioamqp-0.15.0/setup.cfg000066400000000000000000000000601422314215600164660ustar00rootroot00000000000000[bdist_wheel] python-tag = py37.py38.py39.py310 aioamqp-aioamqp-0.15.0/setup.py000066400000000000000000000026301422314215600163640ustar00rootroot00000000000000import os import re import setuptools import sys py_version = sys.version_info[:2] _file_content = open(os.path.join(os.path.dirname(__file__), 'aioamqp', 'version.py')).read() rex = re.compile(r"""version__ = '(.*)'.*__packagename__ = '(.*)'""", re.MULTILINE | re.DOTALL) VERSION, PACKAGE_NAME = rex.search(_file_content).groups() description = 'AMQP implementation using asyncio' setuptools.setup( name=PACKAGE_NAME, version=VERSION, author="Polyconseil dev' team", author_email='opensource+aioamqp@polyconseil.fr', url='https://github.com/polyconseil/aioamqp', description=description, long_description=open('README.rst').read(), keywords=['asyncio', 'amqp', 'rabbitmq', 'aio'], download_url='https://pypi.python.org/pypi/aioamqp', packages=[ 'aioamqp', ], install_requires=[ 'pamqp>=3.1.0', ], python_requires=">=3.7", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", ], platforms='all', license='BSD' ) aioamqp-aioamqp-0.15.0/tox.ini000066400000000000000000000002761422314215600161710ustar00rootroot00000000000000[tox] envlist = py37, py38, py39, py310 skipsdist = true skip_missing_interpreters = true [testenv] whitelist_externals = bash deps = -rrequirements_dev.txt commands = pytest -v -s