pax_global_header00006660000000000000000000000064143226016670014521gustar00rootroot0000000000000052 comment=42760f9be5f66fd2dbf9b2e77fc84800a481b905 channels-4.0.0/000077500000000000000000000000001432260166700133155ustar00rootroot00000000000000channels-4.0.0/.coveragerc000066400000000000000000000002301432260166700154310ustar00rootroot00000000000000[run] branch = True source = channels omit = tests/* [report] show_missing = True skip_covered = True omit = tests/* [html] directory = coverage_html channels-4.0.0/.github/000077500000000000000000000000001432260166700146555ustar00rootroot00000000000000channels-4.0.0/.github/CODE_OF_CONDUCT.md000066400000000000000000000001311432260166700174470ustar00rootroot00000000000000This project follows [Django's Code of Conduct](https://www.djangoproject.com/conduct/). channels-4.0.0/.github/FUNDING.yml000066400000000000000000000001061432260166700164670ustar00rootroot00000000000000 custom: https://www.djangoproject.com/fundraising/ github: [django] channels-4.0.0/.github/ISSUE_TEMPLATE.md000066400000000000000000000014251432260166700173640ustar00rootroot00000000000000Issues are for **concrete, actionable bugs and feature requests** only - if you're just asking for debugging help or technical support we have to direct you elsewhere. If you just have questions or support requests please use: - Stack Overflow - The Django Users mailing list django-users@googlegroups.com (https://groups.google.com/forum/#!forum/django-users) We have to limit this because of limited volunteer time to respond to issues! Please also try and include, if you can: - Your OS and runtime environment, and browser if applicable - A `pip freeze` output showing your package versions - What you expected to happen vs. what actually happened - How you're running Channels (runserver? daphne/runworker? Nginx/Apache in front?) - Console logs and full tracebacks of any errors channels-4.0.0/.github/SECURITY.md000066400000000000000000000001201432260166700164370ustar00rootroot00000000000000# Django Security Policies Please see https://www.djangoproject.com/security/. channels-4.0.0/.github/workflows/000077500000000000000000000000001432260166700167125ustar00rootroot00000000000000channels-4.0.0/.github/workflows/tests.yml000066400000000000000000000021371432260166700206020ustar00rootroot00000000000000name: Tests on: push: branches: - main pull_request: jobs: tests: name: Python ${{ matrix.python-version }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: python-version: - "3.7" - "3.8" - "3.9" - "3.10" steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip wheel setuptools python -m pip install --upgrade tox tox-py - name: Run tox targets for ${{ matrix.python-version }} run: tox --py current lint: name: Lint runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.9 - name: Install dependencies run: | python -m pip install --upgrade pip tox - name: Run lint run: tox -e qa channels-4.0.0/.gitignore000066400000000000000000000003351432260166700153060ustar00rootroot00000000000000*.egg-info dist/ build/ docs/_build __pycache__/ .cache *.sqlite3 .tox/ *.swp *.pyc .coverage* .pytest_cache TODO node_modules # Pipenv Pipfile Pipfile.lock # IDE and Tooling files .idea/* *~ .vscode # macOS .DS_Store channels-4.0.0/CHANGELOG.txt000066400000000000000000000420151432260166700153470ustar00rootroot00000000000000Full release notes, with more details and upgrade information, are available at: https://channels.readthedocs.io/en/latest/releases 4.0.0 (2022-10-15) ------------------ Channels 4 is the next major version of the Channels package. Together with the matching Daphne v4 and channels-redis v4 releases, it updates dependencies, fixes issues, and removes outdated code. It so provides the foundation for Channels development going forward. In most cases, you can update now by updating ``channels``, ``daphne``, and ``channels-redis`` as appropriate, with ``pip``, and by adding ``daphne`` at the top of your ``INSTALLED_APPS`` setting. First ``pip``:: pip install -U 'channels[dapne]' channels-redis Then in your Django settings file:: INSTALLED_APPS = [ "daphne", ... ] Again, this is a major version change. Amongst other changes, large amounts of the Django-wrapping code deprecated in Channels v3 has now been removed, in favour of Django's own ASGI handling, and the ``runserver`` command has been moved into the Daphne package. Please ensure to review the `Version 4.0.0 release notes `_ for full details. 3.0.5 (2022-06-24) ------------------ Channels 3.0.5 is a bugfix release in the 3.0 series. Please see the `Version 3.0.5 release notes `_ for full details. 3.0.4 (2021-07-11) ------------------ Channels 3.0.4 is a bugfix release in the 3.0 series. Please see the `Version 3.0.4 release notes `_ for full details. 3.0.3 (2020-12-28) ------------------ * Fixed a bug in Channels 3.0 where the legacy ``channels.http.AsgiHandler`` would not correctly isolate per-request scopes. This is a security release for CVE-2020-35681. Please see the `Version 3.0.3 release notes `_ for full details. 3.0.2 (2020-11-9) ----------------- * Fixes a bug in Channels 3.0 where ``StaticFilesWrapper`` was not updated to the ASGI 3 single-callable interface. * Users of the ``runworker`` command should ensure to update ``asgiref`` to version 3.3.1 or later. 3.0.1 (2020-11-4) ----------------- * Fixes a bug in Channels 3.0 where ``SessionMiddleware`` would not correctly isolate per-instance scopes. 3.0.0 (2020-10-30) ------------------ Updated to ASGI v3, and added support for Django 3.0+. This is a major version change requiring updates to consumers and middleware. Please see the full `Version 3.0.0 release notes `_ for details. 2.4.0 (2019-12-18) ------------------ * Wraps session save calls in ``database_sync_to_async()``, for compatibility with Django 3.0's ``async_unsafe()`` checks. * Drops compatibility with all Django versions lower than 2.2. 2.3.1 (2019-10-23) ------------------ * Adds compatibility with Python 3.8. 2.3.0 (2019-09-18) ------------------ * Adjusted ``AsgiHandler`` HTTP body handling to use a spooled temporary file, rather than reading the whole request body into memory. As a result, ``AsgiRequest.__init__()`` is adjusted to expect a file-like ``stream``, rather than the whole ``body`` as bytes. Test cases instantiating requests directly will likely need to be updated to wrap the provided body in, e.g., `io.BytesIO`. 2.2.0 (2019-04-14) ------------------ * Updated requirements for ASGI v3 and Daphne 2.3. 2.1.7 (2019-01-31) ------------------ * HTTP request body size limit is now enforced * database_sync_to_async now closes old connections before it runs code * Auth middleware closes old connections before it runs 2.1.6 (2018-12-08) ------------------ * HttpCommunicator now extracts query strings correctly * AsyncHttpConsumer provides channel layer attributes * Prevent late-Daphne import errors 2.1.5 (2018-10-22) ------------------ * Django middleware caching now works on Django 1.11 and Django 2.0. The previous release only ran on 2.1. 2.1.4 (2018-10-19) ------------------ * Django middleware is now cached rather than instantiated per request resulting in a significant speed improvement * ChannelServerLiveTestCase now serves static files again * Improved error message resulting from bad Origin headers * runserver logging now goes through the Django logging framework * Generic consumers can now have non-default channel layers * Improved error when accessing scope['user'] before it's ready 2.1.3 (2018-08-16) ------------------ * An ALLOWED_ORIGINS value of "*" will now also allow requests without a Host header at all (especially important for tests) * The request.path value is now correct in cases when a server has SCRIPT_NAME set * Errors that happen inside channel listeners inside a runworker or Worker class are now raised rather than suppressed 2.1.2 (2018-06-13) ------------------ * AsyncHttpConsumer now has a disconnect() method you can override * Session and authentication middleware is now non-blocking. * URL routing context now includes default arguments from the URLconf. * The FORCE_SCRIPT_NAME setting is now respected in ASGI mode. * ALLOWED_HOSTS is now set correctly during LiveServerTests. 2.1.1 (2018-04-18) ------------------ * The scope["user"] object is no longer a lazy object, as this conflicts with any async-based consumers. 2.1.0 (2018-04-11) ------------------ * Async HTTP Consumers and WebSocket Consumers both gained new functionality (groups, subprotocols, and an async HTTP variant) * URLRouters now allow nesting * Async login and logout functions for sessions * Expiry and groups in the in-memory channel layer * Improved Live Server test case * More powerful OriginValidator * Other small changes and fixes in the full release notes. 2.0.2 (2018-02-08) ------------------ * SyncConsumer now terminates old database connections, and there is a new database_sync_to_async wrapper to allow async connections to do the same. 2.0.1 (2018-02-05) ------------------ * AsyncWebsocketConsumer and AsyncJsonWebsocketConsumer classes added * OriginValidator and AllowedHostsOriginValidator ASGI middleware is now available * URLRouter now correctly resolves long lists of URLs 2.0.0 (2018-02-01) ------------------ * Major backwards-incompatible rewrite to move to an asyncio base and remove the requirement to transport data over the network, as well as overhauled generic consumers, test helpers, routing and more. 1.1.6 (2017-06-28) ------------------ * The ``runserver`` ``server_cls`` override no longer fails with more modern Django versions that pass an ``ipv6`` parameter. 1.1.5 (2017-06-16) ------------------ * The Daphne dependency requirement was bumped to 1.3.0. 1.1.4 (2017-06-15) ------------------ * Pending messages correctly handle retries in backlog situations * Workers in threading mode now respond to ctrl-C and gracefully exit. * ``request.meta['QUERY_STRING']`` is now correctly encoded at all times. * Test client improvements * ``ChannelServerLiveTestCase`` added, allows an equivalent of the Django ``LiveTestCase``. * Decorator added to check ``Origin`` headers (``allowed_hosts_only``) * New ``TEST_CONFIG`` setting in ``CHANNEL_LAYERS`` that allows varying of the channel layer for tests (e.g. using a different Redis install) 1.1.3 (2017-04-05) ------------------ * ``enforce_ordering`` now works correctly with the new-style process-specific channels * ASGI channel layer versions are now explicitly checked for version compatibility 1.1.2 (2017-04-01) ------------------ * Session name hash changed to SHA-1 to satisfy FIPS-140-2. Due to this, please force all WebSockets to reconnect after the upgrade. * `scheme` key in ASGI-HTTP messages now translates into `request.is_secure()` correctly. * WebsocketBridge now exposes the underlying WebSocket as `.socket` 1.1.1 (2017-03-19) ------------------ * Fixed JS packaging issue 1.1.0 (2017-03-18) ------------------ * Channels now includes a JavaScript wrapper that wraps reconnection and multiplexing for you on the client side. * Test classes have been moved from ``channels.tests`` to ``channels.test``. * Bindings now support non-integer fields for primary keys on models. * The ``enforce_ordering`` decorator no longer suffers a race condition where it would drop messages under high load. * ``runserver`` no longer errors if the ``staticfiles`` app is not enabled in Django. 1.0.3 (2017-02-01) ------------------ * Database connections are no longer force-closed after each test is run. * Channel sessions are not re-saved if they're empty even if they're marked as modified, allowing logout to work correctly. * WebsocketDemultiplexer now correctly does sessions for the second/third/etc. connect and disconnect handlers. * Request reading timeouts now correctly return 408 rather than erroring out. * The ``rundelay`` delay server now only polls the database once per second, and this interval is configurable with the ``--sleep`` option. 1.0.2 (2017-01-12) ------------------ * Websockets can now be closed from anywhere using the new ``WebsocketCloseException``. There is also a generic ``ChannelSocketException`` so you can do custom behaviours. * Calling ``Channel.send`` or ``Group.send`` from outside a consumer context (i.e. in tests or management commands) will once again send the message immediately. * The base implementation of databinding now correctly only calls ``group_names(instance)``, as documented. 1.0.1 (2017-01-09) ------------------ * WebSocket generic views now accept connections by default in their connect handler for better backwards compatibility. 1.0.0 (2017-01-08) ------------------ * BREAKING CHANGE: WebSockets must now be explicitly accepted or denied. See https://channels.readthedocs.io/en/latest/releases/1.0.0.html for more. * BREAKING CHANGE: Demultiplexers have been overhauled to directly dispatch messages rather than using channels to new consumers. Consult the docs on generic consumers for more: https://channels.readthedocs.io/en/latest/generics.html * BREAKING CHANGE: Databinding now operates from implicit group membership, where your code just has to say what groups would be used and Channels will work out if it's a creation, modification or removal from a client's perspective, including with permissions. * Delay protocol server ships with Channels providing a specification on how to delay jobs until later and a reference implementation. * Serializers can now specify fields as `__all__` to auto-include all fields. * Various other small fixes. 0.17.3 (2016-10-12) ------------------- * channel_session now also rehydrates the http session with an option * request.META['PATH_INFO'] is now present * runserver shows Daphne log messages * runserver --nothreading only starts a single worker thread * Databinding changed to call group_names dynamically and imply changed/created from that; other small changes to databinding, and more changes likely. 0.17.2 (2016-08-04) ------------------- * New CHANNELS_WS_PROTOCOLS setting if you want Daphne to accept certain subprotocols * WebsocketBindingWithMembers allows serialization of non-fields on instances * Class-based consumers have an .as_route() method that lets you skip using route_class * Bindings now work if loaded after app ready state 0.17.1 (2016-07-22) ------------------- * Bindings now require that `fields` is defined on the class body so all fields are not sent by default. To restore old behaviour, set it to ['__all__'] * Bindings can now be declared after app.ready() has been called and still work. * Binding payloads now include the model name as `appname.modelname`. * A worker_ready signal now gets triggered when `runworker` starts consuming messages. It does not fire from within `runserver`. 0.17.0 (2016-07-19) ------------------- * Data Binding framework is added, which allows easy tying of model changes to WebSockets (and other protocols) and vice-versa. * Standardised WebSocket/JSON multiplexing introduced * WebSocket generic consumers now have a 'close' argument on send/group_send 0.16.1 (2016-07-12) ------------------- * WebsocketConsumer now has a http_user option for auto user sessions. * consumer_started and consumer_finished signals are now available under channels.signals. * Database connections are closed whenever a consumer finishes. 0.16.0 (2016-07-06) ------------------- * websocket.connect and websocket.receive are now consumed by a no-op consumer by default if you don't specify anything to consume it, to bring Channels in line with the ASGI rules on WebSocket backpressure. * You no longer need to call super's setUp in ChannelTestCase. 0.15.1 (2016-06-29) ------------------- * Class based consumers now have a self.kwargs * Fixed bug where empty streaming responses did not send headers or status code 0.15.0 (2016-06-22) ------------------- * Query strings are now decoded entirely by Django. Must be used with Daphne 0.13 or higher. 0.14.3 (2016-06-21) ------------------- * + signs in query strings are no longer double-decoded * Message now has .values(), .keys() and .items() to match dict 0.14.2 (2016-06-16) ------------------- * Class based consumers now have built-in channel_session and channel_session_user support 0.14.1 (2016-06-09) ------------------- * Fix unicode issues with test client under Python 2.7 0.14.0 (2016-05-25) ------------------- * Class-based consumer pattern and WebSocket consumer now come with Channels (see docs for more details) * Better testing utilities including a higher-level Client abstraction with optional HTTP/WebSocket HttpClient variant. 0.13.1 (2016-05-13) ------------------- * enforce_ordering now queues future messages in a channel rather than spinlocking worker processes to achieve delays. * ConsumeLater no longer duplicates messages when they're requeued below the limit. 0.13.0 (2016-05-07) ------------------- * Backpressure is now implemented, meaning responses will pause sending if the client does not read them fast enough. * DatabaseChannelLayer has been removed; it was not sensible. 0.12.0 (2016-04-26) ------------------- * HTTP paths and query strings are now expected to be sent over ASGI as unescaped unicode. Daphne 0.11.0 is updated to send things in this format. * request.FILES reading bug fixed 0.11.0 (2016-04-05) ------------------- * ChannelTestCase base testing class for easier testing of consumers * Routing rewrite to improve speed with nested includes and remove need for ^ operator * Timeouts reading very slow request bodies 0.10.3 (2016-03-29) ------------------- * Better error messages for wrongly-constructed routing lists * Error when trying to use signed cookie backend with channel_session * ASGI group_expiry implemented on database channel backend 0.10.2 (2016-03-23) ------------------- * Regular expressions for routing include() can now be Unicode under Python 3 * Last-resort error handling for HTTP request exceptions inside Django's core code. If DEBUG is on, shows plain text tracebacks; if it is off, shows "Internal Server Error". 0.10.1 (2016-03-22) ------------------- * Regular expressions for HTTP paths can now be Unicode under Python 3 * route() and include() now importable directly from `channels` * FileResponse send speed improved for all code (previously just for staticfiles) 0.10.0 (2016-03-21) ------------------- * New routing system * Updated to match new ASGI single-reader-channel name spec * Updated to match new ASGI HTTP header encoding spec 0.9.5 (2016-03-10) ------------------ * `runworker` now has an --alias option to specify a different channel layer * `runserver` correctly falls back to WSGI mode if no channel layers configured 0.9.4 (2016-03-08) ------------------ * Worker processes now exit gracefully (finish their current processing) when sent SIGTERM or SIGINT. * `runserver` now has a shorter than standard HTTP timeout configured of 60 seconds. 0.9.3 (2016-02-28) ------------------ * Static file serving is significantly faster thanks to larger chunk size * `runworker` now refuses to start if an in memory layer is configured 0.9.2 (2016-02-28) ------------------ * ASGI spec updated to include `order` field for WebSocket messages * `enforce_ordering` decorator introduced * DatabaseChannelLayer now uses transactions to stop duplicated messages 0.9.1 (2016-02-21) ------------------ * Fix packaging issues with previous release 0.9 (2016-02-21) ---------------- * Staticfiles support in runserver * Runserver logs requests and WebSocket connections to console * Runserver autoreloads correctly * --noasgi option on runserver to use the old WSGI-based server * --noworker option on runserver to make it not launch worker threads * Streaming responses work correctly * Authentication decorators work again with new ASGI spec * channel_session_user_from_http decorator introduced * HTTP Long Poll support (raise ResponseLater) * Handle non-latin1 request body encoding * ASGI conformance tests for built-in database backend * Moved some imports around for more sensible layout channels-4.0.0/CONTRIBUTING.rst000066400000000000000000000020601432260166700157540ustar00rootroot00000000000000Contributing to Channels ======================== As an open source project, Channels welcomes contributions of many forms. By participating in this project, you agree to abide by the Django `code of conduct `_. Examples of contributions include: * Code patches * Documentation improvements * Bug reports and patch reviews For more information, please see our `contribution guide `_. Quick Setup ----------- Fork, then clone the repo: .. code-block:: sh git clone git@github.com:your-username/channels.git Make sure the tests pass: .. code-block:: sh python -m pip install -e .[tests] pytest Make your change. Add tests for your change. Make the tests pass: .. code-block:: sh pytest Make sure your code conforms to the coding style: .. code-block:: sh black ./channels ./tests isort --check-only --diff --recursive ./channels ./tests Push to your fork and `submit a pull request `_. channels-4.0.0/LICENSE000066400000000000000000000030201432260166700143150ustar00rootroot00000000000000Copyright (c) Django Software Foundation and individual contributors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Django nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. channels-4.0.0/MANIFEST.in000066400000000000000000000000521432260166700150500ustar00rootroot00000000000000recursive-exclude tests * include LICENSE channels-4.0.0/README.rst000066400000000000000000000057341432260166700150150ustar00rootroot00000000000000Django Channels =============== .. image:: https://github.com/django/channels/workflows/Tests/badge.svg?branch=master :target: https://github.com/django/channels/actions .. image:: https://readthedocs.org/projects/channels/badge/?version=latest :target: https://channels.readthedocs.io/en/latest/?badge=latest .. image:: https://img.shields.io/pypi/v/channels.svg :target: https://pypi.python.org/pypi/channels .. image:: https://img.shields.io/pypi/l/channels.svg :target: https://pypi.python.org/pypi/channels Channels augments Django to bring WebSocket, long-poll HTTP, task offloading and other async support to your code, using familiar Django design patterns and a flexible underlying framework that lets you not only customize behaviours but also write support for your own protocols and needs. Documentation, installation and getting started instructions are at https://channels.readthedocs.io Channels is an official Django Project and as such has a deprecation policy. Details about what's deprecated or pending deprecation for each release is in the `release notes `_. Support can be obtained through several locations - see our `support docs `_ for more. You can install channels from PyPI as the ``channels`` package. See our `installation `_ and `tutorial `_ docs for more. Dependencies ------------ All Channels projects currently support Python 3.7 and up. ``channels`` is compatible with Django 2.2, 3.2, 4.0 and 4.1. Contributing ------------ To learn more about contributing, please `read our contributing docs `_. Maintenance and Security ------------------------ To report security issues, please contact security@djangoproject.com. For GPG signatures and more security process information, see https://docs.djangoproject.com/en/dev/internals/security/. To report bugs or request new features, please open a new GitHub issue. For larger discussions, please post to the `django-developers mailing list `_. Maintenance is overseen by Carlton Gibson with help from others. It is a best-effort basis - we unfortunately can only dedicate guaranteed time to fixing security holes. If you are interested in joining the maintenance team, please `read more about contributing `_ and get in touch! Other Projects -------------- The Channels project is made up of several packages; the others are: * `Daphne `_, the HTTP and Websocket termination server * `channels_redis `_, the Redis channel backend * `asgiref `_, the base ASGI library/memory backend channels-4.0.0/channels/000077500000000000000000000000001432260166700151105ustar00rootroot00000000000000channels-4.0.0/channels/__init__.py000066400000000000000000000000721432260166700172200ustar00rootroot00000000000000__version__ = "4.0.0" DEFAULT_CHANNEL_LAYER = "default" channels-4.0.0/channels/apps.py000066400000000000000000000001721432260166700164250ustar00rootroot00000000000000from django.apps import AppConfig class ChannelsConfig(AppConfig): name = "channels" verbose_name = "Channels" channels-4.0.0/channels/auth.py000066400000000000000000000146231432260166700164310ustar00rootroot00000000000000from django.conf import settings from django.contrib.auth import ( BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY, _get_backends, get_user_model, load_backend, user_logged_in, user_logged_out, ) from django.utils.crypto import constant_time_compare from django.utils.functional import LazyObject from channels.db import database_sync_to_async from channels.middleware import BaseMiddleware from channels.sessions import CookieMiddleware, SessionMiddleware @database_sync_to_async def get_user(scope): """ Return the user model instance associated with the given scope. If no user is retrieved, return an instance of `AnonymousUser`. """ # postpone model import to avoid ImproperlyConfigured error before Django # setup is complete. from django.contrib.auth.models import AnonymousUser if "session" not in scope: raise ValueError( "Cannot find session in scope. You should wrap your consumer in " "SessionMiddleware." ) session = scope["session"] user = None try: user_id = _get_user_session_key(session) backend_path = session[BACKEND_SESSION_KEY] except KeyError: pass else: if backend_path in settings.AUTHENTICATION_BACKENDS: backend = load_backend(backend_path) user = backend.get_user(user_id) # Verify the session if hasattr(user, "get_session_auth_hash"): session_hash = session.get(HASH_SESSION_KEY) session_hash_verified = session_hash and constant_time_compare( session_hash, user.get_session_auth_hash() ) if not session_hash_verified: session.flush() user = None return user or AnonymousUser() @database_sync_to_async def login(scope, user, backend=None): """ Persist a user id and a backend in the request. This way a user doesn't have to re-authenticate on every request. Note that data set during the anonymous session is retained when the user logs in. """ if "session" not in scope: raise ValueError( "Cannot find session in scope. You should wrap your consumer in " "SessionMiddleware." ) session = scope["session"] session_auth_hash = "" if user is None: user = scope.get("user", None) if user is None: raise ValueError( "User must be passed as an argument or must be present in the scope." ) if hasattr(user, "get_session_auth_hash"): session_auth_hash = user.get_session_auth_hash() if SESSION_KEY in session: if _get_user_session_key(session) != user.pk or ( session_auth_hash and not constant_time_compare( session.get(HASH_SESSION_KEY, ""), session_auth_hash ) ): # To avoid reusing another user's session, create a new, empty # session if the existing session corresponds to a different # authenticated user. session.flush() else: session.cycle_key() try: backend = backend or user.backend except AttributeError: backends = _get_backends(return_tuples=True) if len(backends) == 1: _, backend = backends[0] else: raise ValueError( "You have multiple authentication backends configured and " "therefore must provide the `backend` " "argument or set the `backend` attribute on the user." ) session[SESSION_KEY] = user._meta.pk.value_to_string(user) session[BACKEND_SESSION_KEY] = backend session[HASH_SESSION_KEY] = session_auth_hash scope["user"] = user # note this does not reset the CSRF_COOKIE/Token user_logged_in.send(sender=user.__class__, request=None, user=user) @database_sync_to_async def logout(scope): """ Remove the authenticated user's ID from the request and flush their session data. """ # postpone model import to avoid ImproperlyConfigured error before Django # setup is complete. from django.contrib.auth.models import AnonymousUser if "session" not in scope: raise ValueError( "Login cannot find session in scope. You should wrap your " "consumer in SessionMiddleware." ) session = scope["session"] # Dispatch the signal before the user is logged out so the receivers have a # chance to find out *who* logged out. user = scope.get("user", None) if hasattr(user, "is_authenticated") and not user.is_authenticated: user = None if user is not None: user_logged_out.send(sender=user.__class__, request=None, user=user) session.flush() if "user" in scope: scope["user"] = AnonymousUser() def _get_user_session_key(session): # This value in the session is always serialized to a string, so we need # to convert it back to Python whenever we access it. return get_user_model()._meta.pk.to_python(session[SESSION_KEY]) class UserLazyObject(LazyObject): """ Throw a more useful error message when scope['user'] is accessed before it's resolved """ def _setup(self): raise ValueError("Accessing scope user before it is ready.") class AuthMiddleware(BaseMiddleware): """ Middleware which populates scope["user"] from a Django session. Requires SessionMiddleware to function. """ def populate_scope(self, scope): # Make sure we have a session if "session" not in scope: raise ValueError( "AuthMiddleware cannot find session in scope. " "SessionMiddleware must be above it." ) # Add it to the scope if it's not there already if "user" not in scope: scope["user"] = UserLazyObject() async def resolve_scope(self, scope): scope["user"]._wrapped = await get_user(scope) async def __call__(self, scope, receive, send): scope = dict(scope) # Scope injection/mutation per this middleware's needs. self.populate_scope(scope) # Grab the finalized/resolved scope await self.resolve_scope(scope) return await super().__call__(scope, receive, send) # Handy shortcut for applying all three layers at once def AuthMiddlewareStack(inner): return CookieMiddleware(SessionMiddleware(AuthMiddleware(inner))) channels-4.0.0/channels/consumer.py000066400000000000000000000103341432260166700173160ustar00rootroot00000000000000import functools from asgiref.sync import async_to_sync from . import DEFAULT_CHANNEL_LAYER from .db import database_sync_to_async from .exceptions import StopConsumer from .layers import get_channel_layer from .utils import await_many_dispatch def get_handler_name(message): """ Looks at a message, checks it has a sensible type, and returns the handler name for that type. """ # Check message looks OK if "type" not in message: raise ValueError("Incoming message has no 'type' attribute") # Extract type and replace . with _ handler_name = message["type"].replace(".", "_") if handler_name.startswith("_"): raise ValueError("Malformed type in message (leading underscore)") return handler_name class AsyncConsumer: """ Base consumer class. Implements the ASGI application spec, and adds on channel layer management and routing of events to named methods based on their type. """ _sync = False channel_layer_alias = DEFAULT_CHANNEL_LAYER async def __call__(self, scope, receive, send): """ Dispatches incoming messages to type-based handlers asynchronously. """ self.scope = scope # Initialize channel layer self.channel_layer = get_channel_layer(self.channel_layer_alias) if self.channel_layer is not None: self.channel_name = await self.channel_layer.new_channel() self.channel_receive = functools.partial( self.channel_layer.receive, self.channel_name ) # Store send function if self._sync: self.base_send = async_to_sync(send) else: self.base_send = send # Pass messages in from channel layer or client to dispatch method try: if self.channel_layer is not None: await await_many_dispatch( [receive, self.channel_receive], self.dispatch ) else: await await_many_dispatch([receive], self.dispatch) except StopConsumer: # Exit cleanly pass async def dispatch(self, message): """ Works out what to do with a message. """ handler = getattr(self, get_handler_name(message), None) if handler: await handler(message) else: raise ValueError("No handler for message type %s" % message["type"]) async def send(self, message): """ Overrideable/callable-by-subclasses send method. """ await self.base_send(message) @classmethod def as_asgi(cls, **initkwargs): """ Return an ASGI v3 single callable that instantiates a consumer instance per scope. Similar in purpose to Django's as_view(). initkwargs will be used to instantiate the consumer instance. """ async def app(scope, receive, send): consumer = cls(**initkwargs) return await consumer(scope, receive, send) app.consumer_class = cls app.consumer_initkwargs = initkwargs # take name and docstring from class functools.update_wrapper(app, cls, updated=()) return app class SyncConsumer(AsyncConsumer): """ Synchronous version of the consumer, which is what we write most of the generic consumers against (for now). Calls handlers in a threadpool and uses CallBouncer to get the send method out to the main event loop. It would have been possible to have "mixed" consumers and auto-detect if a handler was awaitable or not, but that would have made the API for user-called methods very confusing as there'd be two types of each. """ _sync = True @database_sync_to_async def dispatch(self, message): """ Dispatches incoming messages to type-based handlers asynchronously. """ # Get and execute the handler handler = getattr(self, get_handler_name(message), None) if handler: handler(message) else: raise ValueError("No handler for message type %s" % message["type"]) def send(self, message): """ Overrideable/callable-by-subclasses send method. """ self.base_send(message) channels-4.0.0/channels/db.py000066400000000000000000000010621432260166700160460ustar00rootroot00000000000000from asgiref.sync import SyncToAsync from django.db import close_old_connections class DatabaseSyncToAsync(SyncToAsync): """ SyncToAsync version that cleans up old database connections when it exits. """ def thread_handler(self, loop, *args, **kwargs): close_old_connections() try: return super().thread_handler(loop, *args, **kwargs) finally: close_old_connections() # The class is TitleCased, but we want to encourage use as a callable/decorator database_sync_to_async = DatabaseSyncToAsync channels-4.0.0/channels/exceptions.py000066400000000000000000000021371432260166700176460ustar00rootroot00000000000000class RequestAborted(Exception): """ Raised when the incoming request tells us it's aborted partway through reading the body. """ pass class RequestTimeout(RequestAborted): """ Aborted specifically due to timeout. """ pass class InvalidChannelLayerError(ValueError): """ Raised when a channel layer is configured incorrectly. """ pass class AcceptConnection(Exception): """ Raised during a websocket.connect (or other supported connection) handler to accept the connection. """ pass class DenyConnection(Exception): """ Raised during a websocket.connect (or other supported connection) handler to deny the connection. """ pass class ChannelFull(Exception): """ Raised when a channel cannot be sent to as it is over capacity. """ pass class MessageTooLarge(Exception): """ Raised when a message cannot be sent as it's too big. """ pass class StopConsumer(Exception): """ Raised when a consumer wants to stop and close down its application instance. """ pass channels-4.0.0/channels/generic/000077500000000000000000000000001432260166700165245ustar00rootroot00000000000000channels-4.0.0/channels/generic/__init__.py000066400000000000000000000000001432260166700206230ustar00rootroot00000000000000channels-4.0.0/channels/generic/http.py000066400000000000000000000060211432260166700200540ustar00rootroot00000000000000from channels.consumer import AsyncConsumer from ..exceptions import StopConsumer class AsyncHttpConsumer(AsyncConsumer): """ Async HTTP consumer. Provides basic primitives for building asynchronous HTTP endpoints. """ def __init__(self, *args, **kwargs): self.body = [] async def send_headers(self, *, status=200, headers=None): """ Sets the HTTP response status and headers. Headers may be provided as a list of tuples or as a dictionary. Note that the ASGI spec requires that the protocol server only starts sending the response to the client after ``self.send_body`` has been called the first time. """ if headers is None: headers = [] elif isinstance(headers, dict): headers = list(headers.items()) await self.send( {"type": "http.response.start", "status": status, "headers": headers} ) async def send_body(self, body, *, more_body=False): """ Sends a response body to the client. The method expects a bytestring. Set ``more_body=True`` if you want to send more body content later. The default behavior closes the response, and further messages on the channel will be ignored. """ assert isinstance(body, bytes), "Body is not bytes" await self.send( {"type": "http.response.body", "body": body, "more_body": more_body} ) async def send_response(self, status, body, **kwargs): """ Sends a response to the client. This is a thin wrapper over ``self.send_headers`` and ``self.send_body``, and everything said above applies here as well. This method may only be called once. """ await self.send_headers(status=status, **kwargs) await self.send_body(body) async def handle(self, body): """ Receives the request body as a bytestring. Response may be composed using the ``self.send*`` methods; the return value of this method is thrown away. """ raise NotImplementedError( "Subclasses of AsyncHttpConsumer must provide a handle() method." ) async def disconnect(self): """ Overrideable place to run disconnect handling. Do not send anything from here. """ pass async def http_request(self, message): """ Async entrypoint - concatenates body fragments and hands off control to ``self.handle`` when the body has been completely received. """ if "body" in message: self.body.append(message["body"]) if not message.get("more_body"): try: await self.handle(b"".join(self.body)) finally: await self.disconnect() raise StopConsumer() async def http_disconnect(self, message): """ Let the user do their cleanup and close the consumer. """ await self.disconnect() raise StopConsumer() channels-4.0.0/channels/generic/websocket.py000066400000000000000000000205631432260166700210720ustar00rootroot00000000000000import json from asgiref.sync import async_to_sync from ..consumer import AsyncConsumer, SyncConsumer from ..exceptions import ( AcceptConnection, DenyConnection, InvalidChannelLayerError, StopConsumer, ) class WebsocketConsumer(SyncConsumer): """ Base WebSocket consumer. Provides a general encapsulation for the WebSocket handling model that other applications can build on. """ groups = None def __init__(self, *args, **kwargs): if self.groups is None: self.groups = [] def websocket_connect(self, message): """ Called when a WebSocket connection is opened. """ try: for group in self.groups: async_to_sync(self.channel_layer.group_add)(group, self.channel_name) except AttributeError: raise InvalidChannelLayerError( "BACKEND is unconfigured or doesn't support groups" ) try: self.connect() except AcceptConnection: self.accept() except DenyConnection: self.close() def connect(self): self.accept() def accept(self, subprotocol=None): """ Accepts an incoming socket """ super().send({"type": "websocket.accept", "subprotocol": subprotocol}) def websocket_receive(self, message): """ Called when a WebSocket frame is received. Decodes it and passes it to receive(). """ if "text" in message: self.receive(text_data=message["text"]) else: self.receive(bytes_data=message["bytes"]) def receive(self, text_data=None, bytes_data=None): """ Called with a decoded WebSocket frame. """ pass def send(self, text_data=None, bytes_data=None, close=False): """ Sends a reply back down the WebSocket """ if text_data is not None: super().send({"type": "websocket.send", "text": text_data}) elif bytes_data is not None: super().send({"type": "websocket.send", "bytes": bytes_data}) else: raise ValueError("You must pass one of bytes_data or text_data") if close: self.close(close) def close(self, code=None): """ Closes the WebSocket from the server end """ if code is not None and code is not True: super().send({"type": "websocket.close", "code": code}) else: super().send({"type": "websocket.close"}) def websocket_disconnect(self, message): """ Called when a WebSocket connection is closed. Base level so you don't need to call super() all the time. """ try: for group in self.groups: async_to_sync(self.channel_layer.group_discard)( group, self.channel_name ) except AttributeError: raise InvalidChannelLayerError( "BACKEND is unconfigured or doesn't support groups" ) self.disconnect(message["code"]) raise StopConsumer() def disconnect(self, code): """ Called when a WebSocket connection is closed. """ pass class JsonWebsocketConsumer(WebsocketConsumer): """ Variant of WebsocketConsumer that automatically JSON-encodes and decodes messages as they come in and go out. Expects everything to be text; will error on binary data. """ def receive(self, text_data=None, bytes_data=None, **kwargs): if text_data: self.receive_json(self.decode_json(text_data), **kwargs) else: raise ValueError("No text section for incoming WebSocket frame!") def receive_json(self, content, **kwargs): """ Called with decoded JSON content. """ pass def send_json(self, content, close=False): """ Encode the given content as JSON and send it to the client. """ super().send(text_data=self.encode_json(content), close=close) @classmethod def decode_json(cls, text_data): return json.loads(text_data) @classmethod def encode_json(cls, content): return json.dumps(content) class AsyncWebsocketConsumer(AsyncConsumer): """ Base WebSocket consumer, async version. Provides a general encapsulation for the WebSocket handling model that other applications can build on. """ groups = None def __init__(self, *args, **kwargs): if self.groups is None: self.groups = [] async def websocket_connect(self, message): """ Called when a WebSocket connection is opened. """ try: for group in self.groups: await self.channel_layer.group_add(group, self.channel_name) except AttributeError: raise InvalidChannelLayerError( "BACKEND is unconfigured or doesn't support groups" ) try: await self.connect() except AcceptConnection: await self.accept() except DenyConnection: await self.close() async def connect(self): await self.accept() async def accept(self, subprotocol=None): """ Accepts an incoming socket """ await super().send({"type": "websocket.accept", "subprotocol": subprotocol}) async def websocket_receive(self, message): """ Called when a WebSocket frame is received. Decodes it and passes it to receive(). """ if "text" in message: await self.receive(text_data=message["text"]) else: await self.receive(bytes_data=message["bytes"]) async def receive(self, text_data=None, bytes_data=None): """ Called with a decoded WebSocket frame. """ pass async def send(self, text_data=None, bytes_data=None, close=False): """ Sends a reply back down the WebSocket """ if text_data is not None: await super().send({"type": "websocket.send", "text": text_data}) elif bytes_data is not None: await super().send({"type": "websocket.send", "bytes": bytes_data}) else: raise ValueError("You must pass one of bytes_data or text_data") if close: await self.close(close) async def close(self, code=None): """ Closes the WebSocket from the server end """ if code is not None and code is not True: await super().send({"type": "websocket.close", "code": code}) else: await super().send({"type": "websocket.close"}) async def websocket_disconnect(self, message): """ Called when a WebSocket connection is closed. Base level so you don't need to call super() all the time. """ try: for group in self.groups: await self.channel_layer.group_discard(group, self.channel_name) except AttributeError: raise InvalidChannelLayerError( "BACKEND is unconfigured or doesn't support groups" ) await self.disconnect(message["code"]) raise StopConsumer() async def disconnect(self, code): """ Called when a WebSocket connection is closed. """ pass class AsyncJsonWebsocketConsumer(AsyncWebsocketConsumer): """ Variant of AsyncWebsocketConsumer that automatically JSON-encodes and decodes messages as they come in and go out. Expects everything to be text; will error on binary data. """ async def receive(self, text_data=None, bytes_data=None, **kwargs): if text_data: await self.receive_json(await self.decode_json(text_data), **kwargs) else: raise ValueError("No text section for incoming WebSocket frame!") async def receive_json(self, content, **kwargs): """ Called with decoded JSON content. """ pass async def send_json(self, content, close=False): """ Encode the given content as JSON and send it to the client. """ await super().send(text_data=await self.encode_json(content), close=close) @classmethod async def decode_json(cls, text_data): return json.loads(text_data) @classmethod async def encode_json(cls, content): return json.dumps(content) channels-4.0.0/channels/layers.py000066400000000000000000000272611432260166700167710ustar00rootroot00000000000000import asyncio import fnmatch import random import re import string import time from copy import deepcopy from django.conf import settings from django.core.signals import setting_changed from django.utils.module_loading import import_string from channels import DEFAULT_CHANNEL_LAYER from .exceptions import ChannelFull, InvalidChannelLayerError class ChannelLayerManager: """ Takes a settings dictionary of backends and initialises them on request. """ def __init__(self): self.backends = {} setting_changed.connect(self._reset_backends) def _reset_backends(self, setting, **kwargs): """ Removes cached channel layers when the CHANNEL_LAYERS setting changes. """ if setting == "CHANNEL_LAYERS": self.backends = {} @property def configs(self): # Lazy load settings so we can be imported return getattr(settings, "CHANNEL_LAYERS", {}) def make_backend(self, name): """ Instantiate channel layer. """ config = self.configs[name].get("CONFIG", {}) return self._make_backend(name, config) def make_test_backend(self, name): """ Instantiate channel layer using its test config. """ try: config = self.configs[name]["TEST_CONFIG"] except KeyError: raise InvalidChannelLayerError("No TEST_CONFIG specified for %s" % name) return self._make_backend(name, config) def _make_backend(self, name, config): # Check for old format config if "ROUTING" in self.configs[name]: raise InvalidChannelLayerError( "ROUTING key found for %s - this is no longer needed in Channels 2." % name ) # Load the backend class try: backend_class = import_string(self.configs[name]["BACKEND"]) except KeyError: raise InvalidChannelLayerError("No BACKEND specified for %s" % name) except ImportError: raise InvalidChannelLayerError( "Cannot import BACKEND %r specified for %s" % (self.configs[name]["BACKEND"], name) ) # Initialise and pass config return backend_class(**config) def __getitem__(self, key): if key not in self.backends: self.backends[key] = self.make_backend(key) return self.backends[key] def __contains__(self, key): return key in self.configs def set(self, key, layer): """ Sets an alias to point to a new ChannelLayerWrapper instance, and returns the old one that it replaced. Useful for swapping out the backend during tests. """ old = self.backends.get(key, None) self.backends[key] = layer return old class BaseChannelLayer: """ Base channel layer class that others can inherit from, with useful common functionality. """ MAX_NAME_LENGTH = 100 def __init__(self, expiry=60, capacity=100, channel_capacity=None): self.expiry = expiry self.capacity = capacity self.channel_capacity = channel_capacity or {} def compile_capacities(self, channel_capacity): """ Takes an input channel_capacity dict and returns the compiled list of regexes that get_capacity will look for as self.channel_capacity """ result = [] for pattern, value in channel_capacity.items(): # If they passed in a precompiled regex, leave it, else interpret # it as a glob. if hasattr(pattern, "match"): result.append((pattern, value)) else: result.append((re.compile(fnmatch.translate(pattern)), value)) return result def get_capacity(self, channel): """ Gets the correct capacity for the given channel; either the default, or a matching result from channel_capacity. Returns the first matching result; if you want to control the order of matches, use an ordered dict as input. """ for pattern, capacity in self.channel_capacity: if pattern.match(channel): return capacity return self.capacity def match_type_and_length(self, name): if isinstance(name, str) and (len(name) < self.MAX_NAME_LENGTH): return True return False # Name validation functions channel_name_regex = re.compile(r"^[a-zA-Z\d\-_.]+(\![\d\w\-_.]*)?$") group_name_regex = re.compile(r"^[a-zA-Z\d\-_.]+$") invalid_name_error = ( "{} name must be a valid unicode string " + "with length < {} ".format(MAX_NAME_LENGTH) + "containing only ASCII alphanumerics, hyphens, underscores, or periods, " + "not {}" ) def valid_channel_name(self, name, receive=False): if self.match_type_and_length(name): if bool(self.channel_name_regex.match(name)): # Check cases for special channels if "!" in name and not name.endswith("!") and receive: raise TypeError( "Specific channel names in receive() must end at the !" ) return True raise TypeError(self.invalid_name_error.format("Channel", name)) def valid_group_name(self, name): if self.match_type_and_length(name): if bool(self.group_name_regex.match(name)): return True raise TypeError(self.invalid_name_error.format("Group", name)) def valid_channel_names(self, names, receive=False): _non_empty_list = True if names else False _names_type = isinstance(names, list) assert _non_empty_list and _names_type, "names must be a non-empty list" assert all( self.valid_channel_name(channel, receive=receive) for channel in names ) return True def non_local_name(self, name): """ Given a channel name, returns the "non-local" part. If the channel name is a process-specific channel (contains !) this means the part up to and including the !; if it is anything else, this means the full name. """ if "!" in name: return name[: name.find("!") + 1] else: return name class InMemoryChannelLayer(BaseChannelLayer): """ In-memory channel layer implementation """ def __init__( self, expiry=60, group_expiry=86400, capacity=100, channel_capacity=None, **kwargs ): super().__init__( expiry=expiry, capacity=capacity, channel_capacity=channel_capacity, **kwargs ) self.channels = {} self.groups = {} self.group_expiry = group_expiry # Channel layer API extensions = ["groups", "flush"] async def send(self, channel, message): """ Send a message onto a (general or specific) channel. """ # Typecheck assert isinstance(message, dict), "message is not a dict" assert self.valid_channel_name(channel), "Channel name not valid" # If it's a process-local channel, strip off local part and stick full # name in message assert "__asgi_channel__" not in message queue = self.channels.setdefault(channel, asyncio.Queue()) # Are we full if queue.qsize() >= self.capacity: raise ChannelFull(channel) # Add message await queue.put((time.time() + self.expiry, deepcopy(message))) async def receive(self, channel): """ Receive the first message that arrives on the channel. If more than one coroutine waits on the same channel, a random one of the waiting coroutines will get the result. """ assert self.valid_channel_name(channel) self._clean_expired() queue = self.channels.setdefault(channel, asyncio.Queue()) # Do a plain direct receive try: _, message = await queue.get() finally: if queue.empty(): del self.channels[channel] return message async def new_channel(self, prefix="specific."): """ Returns a new channel name that can be used by something in our process as a specific channel. """ return "%s.inmemory!%s" % ( prefix, "".join(random.choice(string.ascii_letters) for i in range(12)), ) # Expire cleanup def _clean_expired(self): """ Goes through all messages and groups and removes those that are expired. Any channel with an expired message is removed from all groups. """ # Channel cleanup for channel, queue in list(self.channels.items()): # See if it's expired while not queue.empty() and queue._queue[0][0] < time.time(): queue.get_nowait() # Any removal prompts group discard self._remove_from_groups(channel) # Is the channel now empty and needs deleting? if queue.empty(): del self.channels[channel] # Group Expiration timeout = int(time.time()) - self.group_expiry for group in self.groups: for channel in list(self.groups.get(group, set())): # If join time is older than group_expiry end the group membership if ( self.groups[group][channel] and int(self.groups[group][channel]) < timeout ): # Delete from group del self.groups[group][channel] # Flush extension async def flush(self): self.channels = {} self.groups = {} async def close(self): # Nothing to go pass def _remove_from_groups(self, channel): """ Removes a channel from all groups. Used when a message on it expires. """ for channels in self.groups.values(): if channel in channels: del channels[channel] # Groups extension async def group_add(self, group, channel): """ Adds the channel name to a group. """ # Check the inputs assert self.valid_group_name(group), "Group name not valid" assert self.valid_channel_name(channel), "Channel name not valid" # Add to group dict self.groups.setdefault(group, {}) self.groups[group][channel] = time.time() async def group_discard(self, group, channel): # Both should be text and valid assert self.valid_channel_name(channel), "Invalid channel name" assert self.valid_group_name(group), "Invalid group name" # Remove from group set if group in self.groups: if channel in self.groups[group]: del self.groups[group][channel] if not self.groups[group]: del self.groups[group] async def group_send(self, group, message): # Check types assert isinstance(message, dict), "Message is not a dict" assert self.valid_group_name(group), "Invalid group name" # Run clean self._clean_expired() # Send to each channel for channel in self.groups.get(group, set()): try: await self.send(channel, message) except ChannelFull: pass def get_channel_layer(alias=DEFAULT_CHANNEL_LAYER): """ Returns a channel layer by alias, or None if it is not configured. """ try: return channel_layers[alias] except KeyError: return None # Default global instance of the channel layer manager channel_layers = ChannelLayerManager() channels-4.0.0/channels/management/000077500000000000000000000000001432260166700172245ustar00rootroot00000000000000channels-4.0.0/channels/management/__init__.py000066400000000000000000000000001432260166700213230ustar00rootroot00000000000000channels-4.0.0/channels/management/commands/000077500000000000000000000000001432260166700210255ustar00rootroot00000000000000channels-4.0.0/channels/management/commands/__init__.py000066400000000000000000000000001432260166700231240ustar00rootroot00000000000000channels-4.0.0/channels/management/commands/runworker.py000066400000000000000000000030721432260166700234370ustar00rootroot00000000000000import logging from django.core.management import BaseCommand, CommandError from channels import DEFAULT_CHANNEL_LAYER from channels.layers import get_channel_layer from channels.routing import get_default_application from channels.worker import Worker logger = logging.getLogger("django.channels.worker") class Command(BaseCommand): leave_locale_alone = True worker_class = Worker def add_arguments(self, parser): super(Command, self).add_arguments(parser) parser.add_argument( "--layer", action="store", dest="layer", default=DEFAULT_CHANNEL_LAYER, help="Channel layer alias to use, if not the default.", ) parser.add_argument("channels", nargs="+", help="Channels to listen on.") def handle(self, *args, **options): # Get the backend to use self.verbosity = options.get("verbosity", 1) # Get the channel layer they asked for (or see if one isn't configured) if "layer" in options: self.channel_layer = get_channel_layer(options["layer"]) else: self.channel_layer = get_channel_layer() if self.channel_layer is None: raise CommandError("You do not have any CHANNEL_LAYERS configured.") # Run the worker logger.info("Running worker for channels %s", options["channels"]) worker = self.worker_class( application=get_default_application(), channels=options["channels"], channel_layer=self.channel_layer, ) worker.run() channels-4.0.0/channels/middleware.py000066400000000000000000000013641432260166700176030ustar00rootroot00000000000000class BaseMiddleware: """ Base class for implementing ASGI middleware. Note that subclasses of this are not self-safe; don't store state on the instance, as it serves multiple application instances. Instead, use scope. """ def __init__(self, inner): """ Middleware constructor - just takes inner application. """ self.inner = inner async def __call__(self, scope, receive, send): """ ASGI application; can insert things into the scope and run asynchronous code. """ # Copy scope to stop changes going upstream scope = dict(scope) # Run the inner application along with the scope return await self.inner(scope, receive, send) channels-4.0.0/channels/routing.py000066400000000000000000000132141432260166700171520ustar00rootroot00000000000000import importlib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.urls.exceptions import Resolver404 from django.urls.resolvers import URLResolver """ All Routing instances inside this file are also valid ASGI applications - with new Channels routing, whatever you end up with as the top level object is just served up as the "ASGI application". """ def get_default_application(): """ Gets the default application, set in the ASGI_APPLICATION setting. """ try: path, name = settings.ASGI_APPLICATION.rsplit(".", 1) except (ValueError, AttributeError): raise ImproperlyConfigured("Cannot find ASGI_APPLICATION setting.") try: module = importlib.import_module(path) except ImportError: raise ImproperlyConfigured("Cannot import ASGI_APPLICATION module %r" % path) try: value = getattr(module, name) except AttributeError: raise ImproperlyConfigured( "Cannot find %r in ASGI_APPLICATION module %s" % (name, path) ) return value DEPRECATION_MSG = """ Using ProtocolTypeRouter without an explicit "http" key is deprecated. Given that you have not passed the "http" you likely should use Django's get_asgi_application(): from django.core.asgi import get_asgi_application application = ProtocolTypeRouter( "http": get_asgi_application() # Other protocols here. ) """ class ProtocolTypeRouter: """ Takes a mapping of protocol type names to other Application instances, and dispatches to the right one based on protocol name (or raises an error) """ def __init__(self, application_mapping): self.application_mapping = application_mapping async def __call__(self, scope, receive, send): if scope["type"] in self.application_mapping: application = self.application_mapping[scope["type"]] return await application(scope, receive, send) else: raise ValueError( "No application configured for scope type %r" % scope["type"] ) class URLRouter: """ Routes to different applications/consumers based on the URL path. Works with anything that has a ``path`` key, but intended for WebSocket and HTTP. Uses Django's django.urls objects for resolution - path() or re_path(). """ #: This router wants to do routing based on scope[path] or #: scope[path_remaining]. ``path()`` entries in URLRouter should not be #: treated as endpoints (ended with ``$``), but similar to ``include()``. _path_routing = True def __init__(self, routes): self.routes = routes for route in self.routes: # The inner ASGI app wants to do additional routing, route # must not be an endpoint if getattr(route.callback, "_path_routing", False) is True: route.pattern._is_endpoint = False if not route.callback and isinstance(route, URLResolver): raise ImproperlyConfigured( "%s: include() is not supported in URLRouter. Use nested" " URLRouter instances instead." % (route,) ) async def __call__(self, scope, receive, send): # Get the path path = scope.get("path_remaining", scope.get("path", None)) if path is None: raise ValueError("No 'path' key in connection scope, cannot route URLs") # Remove leading / to match Django's handling path = path.lstrip("/") # Run through the routes we have until one matches for route in self.routes: try: match = route.pattern.match(path) if match: new_path, args, kwargs = match # Add defaults to kwargs from the URL pattern. kwargs.update(route.default_args) # Add args or kwargs into the scope outer = scope.get("url_route", {}) application = route.callback return await application( dict( scope, path_remaining=new_path, url_route={ "args": outer.get("args", ()) + args, "kwargs": {**outer.get("kwargs", {}), **kwargs}, }, ), receive, send, ) except Resolver404: pass else: if "path_remaining" in scope: raise Resolver404("No route found for path %r." % path) # We are the outermost URLRouter raise ValueError("No route found for path %r." % path) class ChannelNameRouter: """ Maps to different applications based on a "channel" key in the scope (intended for the Channels worker mode) """ def __init__(self, application_mapping): self.application_mapping = application_mapping async def __call__(self, scope, receive, send): if "channel" not in scope: raise ValueError( "ChannelNameRouter got a scope without a 'channel' key. " + "Did you make sure it's only being used for 'channel' type messages?" ) if scope["channel"] in self.application_mapping: application = self.application_mapping[scope["channel"]] return await application(scope, receive, send) else: raise ValueError( "No application configured for channel name %r" % scope["channel"] ) channels-4.0.0/channels/security/000077500000000000000000000000001432260166700167575ustar00rootroot00000000000000channels-4.0.0/channels/security/__init__.py000066400000000000000000000000001432260166700210560ustar00rootroot00000000000000channels-4.0.0/channels/security/websocket.py000066400000000000000000000132501432260166700213200ustar00rootroot00000000000000from urllib.parse import urlparse from django.conf import settings from django.http.request import is_same_domain from ..generic.websocket import AsyncWebsocketConsumer class OriginValidator: """ Validates that the incoming connection has an Origin header that is in an allowed list. """ def __init__(self, application, allowed_origins): self.application = application self.allowed_origins = allowed_origins async def __call__(self, scope, receive, send): # Make sure the scope is of type websocket if scope["type"] != "websocket": raise ValueError( "You cannot use OriginValidator on a non-WebSocket connection" ) # Extract the Origin header parsed_origin = None for header_name, header_value in scope.get("headers", []): if header_name == b"origin": try: # Set ResultParse parsed_origin = urlparse(header_value.decode("latin1")) except UnicodeDecodeError: pass # Check to see if the origin header is valid if self.valid_origin(parsed_origin): # Pass control to the application return await self.application(scope, receive, send) else: # Deny the connection denier = WebsocketDenier() return await denier(scope, receive, send) def valid_origin(self, parsed_origin): """ Checks parsed origin is None. Pass control to the validate_origin function. Returns ``True`` if validation function was successful, ``False`` otherwise. """ # None is not allowed unless all hosts are allowed if parsed_origin is None and "*" not in self.allowed_origins: return False return self.validate_origin(parsed_origin) def validate_origin(self, parsed_origin): """ Validate the given origin for this site. Check than the origin looks valid and matches the origin pattern in specified list ``allowed_origins``. Any pattern begins with a scheme. After the scheme there must be a domain. Any domain beginning with a period corresponds to the domain and all its subdomains (for example, ``http://.example.com``). After the domain there must be a port, but it can be omitted. ``*`` matches anything and anything else must match exactly. Note. This function assumes that the given origin has a schema, domain and port, but port is optional. Returns ``True`` for a valid host, ``False`` otherwise. """ return any( pattern == "*" or self.match_allowed_origin(parsed_origin, pattern) for pattern in self.allowed_origins ) def match_allowed_origin(self, parsed_origin, pattern): """ Returns ``True`` if the origin is either an exact match or a match to the wildcard pattern. Compares scheme, domain, port of origin and pattern. Any pattern can be begins with a scheme. After the scheme must be a domain, or just domain without scheme. Any domain beginning with a period corresponds to the domain and all its subdomains (for example, ``.example.com`` ``example.com`` and any subdomain). Also with scheme (for example, ``http://.example.com`` ``http://exapmple.com``). After the domain there must be a port, but it can be omitted. Note. This function assumes that the given origin is either None, a schema-domain-port string, or just a domain string """ if parsed_origin is None: return False # Get ResultParse object parsed_pattern = urlparse(pattern.lower()) if parsed_origin.hostname is None: return False if not parsed_pattern.scheme: pattern_hostname = urlparse("//" + pattern).hostname or pattern return is_same_domain(parsed_origin.hostname, pattern_hostname) # Get origin.port or default ports for origin or None origin_port = self.get_origin_port(parsed_origin) # Get pattern.port or default ports for pattern or None pattern_port = self.get_origin_port(parsed_pattern) # Compares hostname, scheme, ports of pattern and origin if ( parsed_pattern.scheme == parsed_origin.scheme and origin_port == pattern_port and is_same_domain(parsed_origin.hostname, parsed_pattern.hostname) ): return True return False def get_origin_port(self, origin): """ Returns the origin.port or port for this schema by default. Otherwise, it returns None. """ if origin.port is not None: # Return origin.port return origin.port # if origin.port doesn`t exists if origin.scheme == "http" or origin.scheme == "ws": # Default port return for http, ws return 80 elif origin.scheme == "https" or origin.scheme == "wss": # Default port return for https, wss return 443 else: return None def AllowedHostsOriginValidator(application): """ Factory function which returns an OriginValidator configured to use settings.ALLOWED_HOSTS. """ allowed_hosts = settings.ALLOWED_HOSTS if settings.DEBUG and not allowed_hosts: allowed_hosts = ["localhost", "127.0.0.1", "[::1]"] return OriginValidator(application, allowed_hosts) class WebsocketDenier(AsyncWebsocketConsumer): """ Simple application which denies all requests to it. """ async def connect(self): await self.close() channels-4.0.0/channels/sessions.py000066400000000000000000000235161432260166700173370ustar00rootroot00000000000000import datetime import time from importlib import import_module from django.conf import settings from django.contrib.sessions.backends.base import UpdateError from django.core.exceptions import SuspiciousOperation from django.http import parse_cookie from django.http.cookie import SimpleCookie from django.utils import timezone from django.utils.encoding import force_str from django.utils.functional import LazyObject from channels.db import database_sync_to_async try: from django.utils.http import http_date except ImportError: from django.utils.http import cookie_date as http_date class CookieMiddleware: """ Extracts cookies from HTTP or WebSocket-style scopes and adds them as a scope["cookies"] entry with the same format as Django's request.COOKIES. """ def __init__(self, inner): self.inner = inner async def __call__(self, scope, receive, send): # Check this actually has headers. They're a required scope key for HTTP and WS. if "headers" not in scope: raise ValueError( "CookieMiddleware was passed a scope that did not have a headers key " + "(make sure it is only passed HTTP or WebSocket connections)" ) # Go through headers to find the cookie one for name, value in scope.get("headers", []): if name == b"cookie": cookies = parse_cookie(value.decode("latin1")) break else: # No cookie header found - add an empty default. cookies = {} # Return inner application return await self.inner(dict(scope, cookies=cookies), receive, send) @classmethod def set_cookie( cls, message, key, value="", max_age=None, expires=None, path="/", domain=None, secure=False, httponly=False, samesite="lax", ): """ Sets a cookie in the passed HTTP response message. ``expires`` can be: - a string in the correct format, - a naive ``datetime.datetime`` object in UTC, - an aware ``datetime.datetime`` object in any time zone. If it is a ``datetime.datetime`` object then ``max_age`` will be calculated. """ value = force_str(value) cookies = SimpleCookie() cookies[key] = value if expires is not None: if isinstance(expires, datetime.datetime): if timezone.is_aware(expires): expires = timezone.make_naive(expires, timezone.utc) delta = expires - expires.utcnow() # Add one second so the date matches exactly (a fraction of # time gets lost between converting to a timedelta and # then the date string). delta = delta + datetime.timedelta(seconds=1) # Just set max_age - the max_age logic will set expires. expires = None max_age = max(0, delta.days * 86400 + delta.seconds) else: cookies[key]["expires"] = expires else: cookies[key]["expires"] = "" if max_age is not None: cookies[key]["max-age"] = max_age # IE requires expires, so set it if hasn't been already. if not expires: cookies[key]["expires"] = http_date(time.time() + max_age) if path is not None: cookies[key]["path"] = path if domain is not None: cookies[key]["domain"] = domain if secure: cookies[key]["secure"] = True if httponly: cookies[key]["httponly"] = True if samesite is not None: assert samesite.lower() in [ "strict", "lax", "none", ], "samesite must be either 'strict', 'lax' or 'none'" cookies[key]["samesite"] = samesite # Write out the cookies to the response for c in cookies.values(): message.setdefault("headers", []).append( (b"Set-Cookie", bytes(c.output(header=""), encoding="utf-8")) ) @classmethod def delete_cookie(cls, message, key, path="/", domain=None): """ Deletes a cookie in a response. """ return cls.set_cookie( message, key, max_age=0, path=path, domain=domain, expires="Thu, 01-Jan-1970 00:00:00 GMT", ) class InstanceSessionWrapper: """ Populates the session in application instance scope, and wraps send to save the session. """ # Message types that trigger a session save if it's modified save_message_types = ["http.response.start"] # Message types that can carry session cookies back cookie_response_message_types = ["http.response.start"] def __init__(self, scope, send): self.cookie_name = settings.SESSION_COOKIE_NAME self.session_store = import_module(settings.SESSION_ENGINE).SessionStore self.scope = dict(scope) if "session" in self.scope: # There's already session middleware of some kind above us, pass # that through self.activated = False else: # Make sure there are cookies in the scope if "cookies" not in self.scope: raise ValueError( "No cookies in scope - SessionMiddleware needs to run " "inside of CookieMiddleware." ) # Parse the headers in the scope into cookies self.scope["session"] = LazyObject() self.activated = True # Override send self.real_send = send async def resolve_session(self): session_key = self.scope["cookies"].get(self.cookie_name) self.scope["session"]._wrapped = await database_sync_to_async( self.session_store )(session_key) async def send(self, message): """ Overridden send that also does session saves/cookies. """ # Only save session if we're the outermost session middleware if self.activated: modified = self.scope["session"].modified empty = self.scope["session"].is_empty() # If this is a message type that we want to save on, and there's # changed data, save it. We also save if it's empty as we might # not be able to send a cookie-delete along with this message. if ( message["type"] in self.save_message_types and message.get("status", 200) != 500 and (modified or settings.SESSION_SAVE_EVERY_REQUEST) ): await database_sync_to_async(self.save_session)() # If this is a message type that can transport cookies back to the # client, then do so. if message["type"] in self.cookie_response_message_types: if empty: # Delete cookie if it's set if settings.SESSION_COOKIE_NAME in self.scope["cookies"]: CookieMiddleware.delete_cookie( message, settings.SESSION_COOKIE_NAME, path=settings.SESSION_COOKIE_PATH, domain=settings.SESSION_COOKIE_DOMAIN, ) else: # Get the expiry data if self.scope["session"].get_expire_at_browser_close(): max_age = None expires = None else: max_age = self.scope["session"].get_expiry_age() expires_time = time.time() + max_age expires = http_date(expires_time) # Set the cookie CookieMiddleware.set_cookie( message, self.cookie_name, self.scope["session"].session_key, max_age=max_age, expires=expires, domain=settings.SESSION_COOKIE_DOMAIN, path=settings.SESSION_COOKIE_PATH, secure=settings.SESSION_COOKIE_SECURE or None, httponly=settings.SESSION_COOKIE_HTTPONLY or None, samesite=settings.SESSION_COOKIE_SAMESITE, ) # Pass up the send return await self.real_send(message) def save_session(self): """ Saves the current session. """ try: self.scope["session"].save() except UpdateError: raise SuspiciousOperation( "The request's session was deleted before the " "request completed. The user may have logged " "out in a concurrent request, for example." ) class SessionMiddleware: """ Class that adds Django sessions (from HTTP cookies) to the scope. Works with HTTP or WebSocket protocol types (or anything that provides a "headers" entry in the scope). Requires the CookieMiddleware to be higher up in the stack. """ def __init__(self, inner): self.inner = inner async def __call__(self, scope, receive, send): """ Instantiate a session wrapper for this scope, resolve the session and call the inner application. """ wrapper = InstanceSessionWrapper(scope, send) await wrapper.resolve_session() return await self.inner(wrapper.scope, receive, wrapper.send) # Shortcut to include cookie middleware def SessionMiddlewareStack(inner): return CookieMiddleware(SessionMiddleware(inner)) channels-4.0.0/channels/testing/000077500000000000000000000000001432260166700165655ustar00rootroot00000000000000channels-4.0.0/channels/testing/__init__.py000066400000000000000000000005271432260166700207020ustar00rootroot00000000000000from asgiref.testing import ApplicationCommunicator # noqa from .http import HttpCommunicator # noqa from .live import ChannelsLiveServerTestCase # noqa from .websocket import WebsocketCommunicator # noqa __all__ = [ "ApplicationCommunicator", "HttpCommunicator", "ChannelsLiveServerTestCase", "WebsocketCommunicator", ] channels-4.0.0/channels/testing/http.py000066400000000000000000000037731432260166700201300ustar00rootroot00000000000000from urllib.parse import unquote, urlparse from asgiref.testing import ApplicationCommunicator class HttpCommunicator(ApplicationCommunicator): """ ApplicationCommunicator subclass that has HTTP shortcut methods. It will construct the scope for you, so you need to pass the application (uninstantiated) along with HTTP parameters. This does not support full chunking - for that, just use ApplicationCommunicator directly. """ def __init__(self, application, method, path, body=b"", headers=None): parsed = urlparse(path) self.scope = { "type": "http", "http_version": "1.1", "method": method.upper(), "path": unquote(parsed.path), "query_string": parsed.query.encode("utf-8"), "headers": headers or [], } assert isinstance(body, bytes) self.body = body self.sent_request = False super().__init__(application, self.scope) async def get_response(self, timeout=1): """ Get the application's response. Returns a dict with keys of "body", "headers" and "status". """ # If we've not sent the request yet, do so if not self.sent_request: self.sent_request = True await self.send_input({"type": "http.request", "body": self.body}) # Get the response start response_start = await self.receive_output(timeout) assert response_start["type"] == "http.response.start" # Get all body parts response_start["body"] = b"" while True: chunk = await self.receive_output(timeout) assert chunk["type"] == "http.response.body" assert isinstance(chunk["body"], bytes) response_start["body"] += chunk["body"] if not chunk.get("more_body", False): break # Return structured info del response_start["type"] response_start.setdefault("headers", []) return response_start channels-4.0.0/channels/testing/live.py000066400000000000000000000050501432260166700200760ustar00rootroot00000000000000from functools import partial from daphne.testing import DaphneProcess from django.contrib.staticfiles.handlers import ASGIStaticFilesHandler from django.core.exceptions import ImproperlyConfigured from django.db import connections from django.test.testcases import TransactionTestCase from django.test.utils import modify_settings from channels.routing import get_default_application def make_application(*, static_wrapper): # Module-level function for pickle-ability application = get_default_application() if static_wrapper is not None: application = static_wrapper(application) return application class ChannelsLiveServerTestCase(TransactionTestCase): """ Does basically the same as TransactionTestCase but also launches a live Daphne server in a separate process, so that the tests may use another test framework, such as Selenium, instead of the built-in dummy client. """ host = "localhost" ProtocolServerProcess = DaphneProcess static_wrapper = ASGIStaticFilesHandler serve_static = True @property def live_server_url(self): return "http://%s:%s" % (self.host, self._port) @property def live_server_ws_url(self): return "ws://%s:%s" % (self.host, self._port) def _pre_setup(self): for connection in connections.all(): if self._is_in_memory_db(connection): raise ImproperlyConfigured( "ChannelLiveServerTestCase can not be used with in memory databases" ) super(ChannelsLiveServerTestCase, self)._pre_setup() self._live_server_modified_settings = modify_settings( ALLOWED_HOSTS={"append": self.host} ) self._live_server_modified_settings.enable() get_application = partial( make_application, static_wrapper=self.static_wrapper if self.serve_static else None, ) self._server_process = self.ProtocolServerProcess(self.host, get_application) self._server_process.start() self._server_process.ready.wait() self._port = self._server_process.port.value def _post_teardown(self): self._server_process.terminate() self._server_process.join() self._live_server_modified_settings.disable() super(ChannelsLiveServerTestCase, self)._post_teardown() def _is_in_memory_db(self, connection): """ Check if DatabaseWrapper holds in memory database. """ if connection.vendor == "sqlite": return connection.is_in_memory_db() channels-4.0.0/channels/testing/websocket.py000066400000000000000000000074751432260166700211420ustar00rootroot00000000000000import json from urllib.parse import unquote, urlparse from asgiref.testing import ApplicationCommunicator class WebsocketCommunicator(ApplicationCommunicator): """ ApplicationCommunicator subclass that has WebSocket shortcut methods. It will construct the scope for you, so you need to pass the application (uninstantiated) along with the initial connection parameters. """ def __init__(self, application, path, headers=None, subprotocols=None): if not isinstance(path, str): raise TypeError("Expected str, got {}".format(type(path))) parsed = urlparse(path) self.scope = { "type": "websocket", "path": unquote(parsed.path), "query_string": parsed.query.encode("utf-8"), "headers": headers or [], "subprotocols": subprotocols or [], } super().__init__(application, self.scope) async def connect(self, timeout=1): """ Trigger the connection code. On an accepted connection, returns (True, ) On a rejected connection, returns (False, ) """ await self.send_input({"type": "websocket.connect"}) response = await self.receive_output(timeout) if response["type"] == "websocket.close": return (False, response.get("code", 1000)) else: return (True, response.get("subprotocol", None)) async def send_to(self, text_data=None, bytes_data=None): """ Sends a WebSocket frame to the application. """ # Make sure we have exactly one of the arguments assert bool(text_data) != bool( bytes_data ), "You must supply exactly one of text_data or bytes_data" # Send the right kind of event if text_data: assert isinstance(text_data, str), "The text_data argument must be a str" await self.send_input({"type": "websocket.receive", "text": text_data}) else: assert isinstance( bytes_data, bytes ), "The bytes_data argument must be bytes" await self.send_input({"type": "websocket.receive", "bytes": bytes_data}) async def send_json_to(self, data): """ Sends JSON data as a text frame """ await self.send_to(text_data=json.dumps(data)) async def receive_from(self, timeout=1): """ Receives a data frame from the view. Will fail if the connection closes instead. Returns either a bytestring or a unicode string depending on what sort of frame you got. """ response = await self.receive_output(timeout) # Make sure this is a send message assert response["type"] == "websocket.send" # Make sure there's exactly one key in the response assert ("text" in response) != ( "bytes" in response ), "The response needs exactly one of 'text' or 'bytes'" # Pull out the right key and typecheck it for our users if "text" in response: assert isinstance(response["text"], str), "Text frame payload is not str" return response["text"] else: assert isinstance( response["bytes"], bytes ), "Binary frame payload is not bytes" return response["bytes"] async def receive_json_from(self, timeout=1): """ Receives a JSON text frame payload and decodes it """ payload = await self.receive_from(timeout) assert isinstance(payload, str), "JSON data is not a text frame" return json.loads(payload) async def disconnect(self, code=1000, timeout=1): """ Closes the socket """ await self.send_input({"type": "websocket.disconnect", "code": code}) await self.wait(timeout) channels-4.0.0/channels/utils.py000066400000000000000000000042011432260166700166170ustar00rootroot00000000000000import asyncio import types def name_that_thing(thing): """ Returns either the function/class path or just the object's repr """ # Instance method if hasattr(thing, "im_class"): # Mocks will recurse im_class forever if hasattr(thing, "mock_calls"): return "" return name_that_thing(thing.im_class) + "." + thing.im_func.func_name # Other named thing if hasattr(thing, "__name__"): if hasattr(thing, "__class__") and not isinstance( thing, (types.FunctionType, types.MethodType) ): if thing.__class__ is not type and not issubclass(thing.__class__, type): return name_that_thing(thing.__class__) if hasattr(thing, "__self__"): return "%s.%s" % (thing.__self__.__module__, thing.__self__.__name__) if hasattr(thing, "__module__"): return "%s.%s" % (thing.__module__, thing.__name__) # Generic instance of a class if hasattr(thing, "__class__"): return name_that_thing(thing.__class__) return repr(thing) async def await_many_dispatch(consumer_callables, dispatch): """ Given a set of consumer callables, awaits on them all and passes results from them to the dispatch awaitable as they come in. """ # Call all callables, and ensure all return types are Futures tasks = [ asyncio.ensure_future(consumer_callable()) for consumer_callable in consumer_callables ] try: while True: # Wait for any of them to complete await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) # Find the completed one(s), yield results, and replace them for i, task in enumerate(tasks): if task.done(): result = task.result() await dispatch(result) tasks[i] = asyncio.ensure_future(consumer_callables[i]()) finally: # Make sure we clean up tasks on exit for task in tasks: task.cancel() try: await task except asyncio.CancelledError: pass channels-4.0.0/channels/worker.py000066400000000000000000000032271432260166700167770ustar00rootroot00000000000000import asyncio from asgiref.server import StatelessServer class Worker(StatelessServer): """ ASGI protocol server that surfaces events sent to specific channels on the channel layer into a single application instance. """ def __init__(self, application, channels, channel_layer, max_applications=1000): super().__init__(application, max_applications) self.channels = channels self.channel_layer = channel_layer if self.channel_layer is None: raise ValueError("Channel layer is not valid") async def handle(self): """ Listens on all the provided channels and handles the messages. """ # For each channel, launch its own listening coroutine listeners = [] for channel in self.channels: listeners.append(asyncio.ensure_future(self.listener(channel))) # Wait for them all to exit await asyncio.wait(listeners) # See if any of the listeners had an error (e.g. channel layer error) [listener.result() for listener in listeners] async def listener(self, channel): """ Single-channel listener """ while True: message = await self.channel_layer.receive(channel) if not message.get("type", None): raise ValueError("Worker received message with no type.") # Make a scope and get an application instance for it scope = {"type": "channel", "channel": channel} instance_queue = self.get_or_create_application_instance(channel, scope) # Run the message into the app await instance_queue.put(message) channels-4.0.0/docs/000077500000000000000000000000001432260166700142455ustar00rootroot00000000000000channels-4.0.0/docs/Makefile000066400000000000000000000151671432260166700157170ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://www.sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Channels.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Channels.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Channels" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Channels" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." channels-4.0.0/docs/asgi.rst000066400000000000000000000045471432260166700157340ustar00rootroot00000000000000ASGI ==== `ASGI `_, or the Asynchronous Server Gateway Interface, is the specification which Channels and Daphne are built upon, designed to untie Channels apps from a specific application server and provide a common way to write application and middleware code. It's a spiritual successor to WSGI, designed not only run in an asynchronous fashion via ``asyncio``, but also supporting multiple protocols. The full ASGI spec can be found at https://asgi.readthedocs.io Summary ------- ASGI is structured as a single asynchronous callable, which takes a dict ``scope`` and two callables ``receive`` and ``send``: .. code-block:: python async def application(scope, receive, send): event = await receive() ... await send({"type": "websocket.send", ...}) The ``scope`` dict defines the properties of a connection, like its remote IP (for HTTP) or username (for a chat protocol), and the lifetime of a connection. Applications are *instantiated* once per scope - so, for example, once per HTTP request, or once per open WebSocket connection. Scopes always have a ``type`` key, which tells you what kind of connection it is and what other keys to expect in the scope (and what sort of messages to expect). The ``receive`` awaitable provides events as dicts as they occur, and the ``send`` awaitable sends events back to the client in a similar dict format. A *protocol server* sits between the client and your application code, decoding the raw protocol into the scope and event dicts and encoding anything you send back down onto the protocol. Composability ------------- ASGI applications, like WSGI ones, are designed to be composable, and this includes Channels' routing and middleware components like ``ProtocolTypeRouter`` and ``SessionMiddleware``. These are just ASGI applications that take other ASGI applications as arguments, so you can pass around just one top-level application for a whole Django project and dispatch down to the right consumer based on what sort of connection you're handling. Protocol Specifications ----------------------- The basic ASGI spec only outlines the interface for an ASGI app - it does not specify how network protocols are encoded to and from scopes and event dicts. That's the job of protocol specifications: * HTTP and WebSocket: https://github.com/django/asgiref/blob/master/specs/www.rst channels-4.0.0/docs/channel_layer_spec.rst000066400000000000000000000312131432260166700206150ustar00rootroot00000000000000=========================== Channel Layer Specification =========================== .. note:: Channel layers are now internal only to Channels, and not used as part of ASGI. This spec defines what Channels and applications written using it expect a channel layer to provide. Abstract ======== This document outlines a set of standardized definitions for *channels* and a *channel layer* which provides a mechanism to send and receive messages over them. They allow inter-process communication between different processes to help build applications that have messaging and events between different clients. Overview ======== Messages -------- Messages must be a ``dict``. Because these messages are sometimes sent over a network, they need to be serializable, and so they are only allowed to contain the following types: * Byte strings * Unicode strings * Integers (within the signed 64 bit range) * Floating point numbers (within the IEEE 754 double precision range) * Lists (tuples should be encoded as lists) * Dicts (keys must be unicode strings) * Booleans * None Channels -------- Channels are identified by a unicode string name consisting only of ASCII letters, ASCII numerical digits, periods (``.``), dashes (``-``) and underscores (``_``), plus an optional type character (see below). Channels are a first-in, first out queue with at-most-once delivery semantics. They can have multiple writers and multiple readers; only a single reader should get each written message. Implementations must never deliver a message more than once or to more than one reader, and must drop messages if this is necessary to achieve this restriction. In order to aid with scaling and network architecture, a distinction is made between channels that have multiple readers and *process-specific channels* that are read from a single known process. *Normal channel* names contain no type characters, and can be routed however the backend wishes; in particular, they do not have to appear globally consistent, and backends may shard their contents out to different servers so that a querying client only sees some portion of the messages. Calling ``receive`` on these channels does not guarantee that you will get the messages in order or that you will get anything if the channel is non-empty. *Process-specific channel* names contain an exclamation mark (``!``) that separates a remote and local part. These channels are received differently; only the name up to and including the ``!`` character is passed to the ``receive()`` call, and it will receive any message on any channel with that prefix. This allows a process, such as a HTTP terminator, to listen on a single process-specific channel, and then distribute incoming requests to the appropriate client sockets using the local part (the part after the ``!``). The local parts must be generated and managed by the process that consumes them. These channels, like single-reader channels, are guaranteed to give any extant messages in order if received from a single process. Messages should expire after a set time sitting unread in a channel; the recommendation is one minute, though the best value depends on the channel layer and the way it is deployed, and it is recommended that users are allowed to configure the expiry time. The maximum message size is 1MB if the message were encoded as JSON; if more data than this needs to be transmitted it must be chunked into smaller messages. All channel layers must support messages up to this size, but channel layer users are encouraged to keep well below it. .. _asgi_extensions: Extensions ---------- Extensions are functionality that is not required for basic application code and nearly all protocol server code, and so has been made optional in order to enable lightweight channel layers for applications that don't need the full feature set defined here. The extensions defined here are: * ``groups``: Allows grouping of channels to allow broadcast; see below for more. * ``flush``: Allows easier testing and development with channel layers. There is potential to add further extensions; these may be defined by a separate specification, or a new version of this specification. If application code requires an extension, it should check for it as soon as possible, and hard error if it is not provided. Frameworks should encourage optional use of extensions, while attempting to move any extension-not-found errors to process startup rather than message handling. Asynchronous Support -------------------- All channel layers must provide asynchronous (coroutine) methods for their primary endpoints. End-users will be able to achieve synchronous versions using the ``asgiref.sync.async_to_sync`` wrapper. Groups ------ While the basic channel model is sufficient to handle basic application needs, many more advanced uses of asynchronous messaging require notifying many users at once when an event occurs - imagine a live blog, for example, where every viewer should get a long poll response or WebSocket packet when a new entry is posted. Thus, there is an *optional* groups extension which allows easier broadcast messaging to groups of channels. End-users are free, of course, to use just channel names and direct sending and build their own persistence/broadcast system instead. Capacity -------- To provide backpressure, each channel in a channel layer may have a capacity, defined however the layer wishes (it is recommended that it is configurable by the user using keyword arguments to the channel layer constructor, and furthermore configurable per channel name or name prefix). When a channel is at or over capacity, trying to send() to that channel may raise ChannelFull, which indicates to the sender the channel is over capacity. How the sender wishes to deal with this will depend on context; for example, a web application trying to send a response body will likely wait until it empties out again, while a HTTP interface server trying to send in a request would drop the request and return a 503 error. Process-local channels must apply their capacity on the non-local part (that is, up to and including the ``!`` character), and so capacity is shared among all of the "virtual" channels inside it. Sending to a group never raises ChannelFull; instead, it must silently drop the message if it is over capacity, as per ASGI's at-most-once delivery policy. Specification Details ===================== A *channel layer* must provide an object with these attributes (all function arguments are positional): * ``coroutine send(channel, message)``, that takes two arguments: the channel to send on, as a unicode string, and the message to send, as a serializable ``dict``. * ``coroutine receive(channel)``, that takes a single channel name and returns the next received message on that channel. * ``coroutine new_channel()``, which returns a new process-specific channel that can be used to give to a local coroutine or receiver. * ``MessageTooLarge``, the exception raised when a send operation fails because the encoded message is over the layer's size limit. * ``ChannelFull``, the exception raised when a send operation fails because the destination channel is over capacity. * ``extensions``, a list of unicode string names indicating which extensions this layer provides, or an empty list if it supports none. The possible extensions can be seen in :ref:`asgi_extensions`. A channel layer implementing the ``groups`` extension must also provide: * ``coroutine group_add(group, channel)``, that takes a ``channel`` and adds it to the group given by ``group``. Both are unicode strings. If the channel is already in the group, the function should return normally. * ``coroutine group_discard(group, channel)``, that removes the ``channel`` from the ``group`` if it is in it, and does nothing otherwise. * ``coroutine group_send(group, message)``, that takes two positional arguments; the group to send to, as a unicode string, and the message to send, as a serializable ``dict``. It may raise MessageTooLarge but cannot raise ChannelFull. * ``group_expiry``, an integer number of seconds that specifies how long group membership is valid for after the most recent ``group_add`` call (see *Persistence* below) A channel layer implementing the ``flush`` extension must also provide: * ``coroutine flush()``, that resets the channel layer to a blank state, containing no messages and no groups (if the groups extension is implemented). This call must block until the system is cleared and will consistently look empty to any client, if the channel layer is distributed. Channel Semantics ----------------- Channels **must**: * Preserve ordering of messages perfectly with only a single reader and writer if the channel is a *single-reader* or *process-specific* channel. * Never deliver a message more than once. * Never block on message send (though they may raise ChannelFull or MessageTooLarge) * Be able to handle messages of at least 1MB in size when encoded as JSON (the implementation may use better encoding or compression, as long as it meets the equivalent size) * Have a maximum name length of at least 100 bytes. They should attempt to preserve ordering in all cases as much as possible, but perfect global ordering is obviously not possible in the distributed case. They are not expected to deliver all messages, but a success rate of at least 99.99% is expected under normal circumstances. Implementations may want to have a "resilience testing" mode where they deliberately drop more messages than usual so developers can test their code's handling of these scenarios. Persistence ----------- Channel layers do not need to persist data long-term; group memberships only need to live as long as a connection does, and messages only as long as the message expiry time, which is usually a couple of minutes. If a channel layer implements the ``groups`` extension, it must persist group membership until at least the time when the member channel has a message expire due to non-consumption, after which it may drop membership at any time. If a channel subsequently has a successful delivery, the channel layer must then not drop group membership until another message expires on that channel. Channel layers must also drop group membership after a configurable long timeout after the most recent ``group_add`` call for that membership, the default being 86,400 seconds (one day). The value of this timeout is exposed as the ``group_expiry`` property on the channel layer. Approximate Global Ordering --------------------------- While maintaining true global (across-channels) ordering of messages is entirely unreasonable to expect of many implementations, they should strive to prevent busy channels from overpowering quiet channels. For example, imagine two channels, ``busy``, which spikes to 1000 messages a second, and ``quiet``, which gets one message a second. There's a single consumer running ``receive(['busy', 'quiet'])`` which can handle around 200 messages a second. In a simplistic for-loop implementation, the channel layer might always check ``busy`` first; it always has messages available, and so the consumer never even gets to see a message from ``quiet``, even if it was sent with the first batch of ``busy`` messages. A simple way to solve this is to randomize the order of the channel list when looking for messages inside the channel layer; other, better methods are also available, but whatever is chosen, it should try to avoid a scenario where a message doesn't get received purely because another channel is busy. Strings and Unicode ------------------- In this document, and all sub-specifications, *byte string* refers to ``str`` on Python 2 and ``bytes`` on Python 3. If this type still supports Unicode codepoints due to the underlying implementation, then any values should be kept within the 0 - 255 range. *Unicode string* refers to ``unicode`` on Python 2 and ``str`` on Python 3. This document will never specify just *string* - all strings are one of the two exact types. Some serializers, such as ``json``, cannot differentiate between byte strings and unicode strings; these should include logic to box one type as the other (for example, encoding byte strings as base64 unicode strings with a preceding special character, e.g. U+FFFF). Channel and group names are always unicode strings, with the additional limitation that they only use the following characters: * ASCII letters * The digits ``0`` through ``9`` * Hyphen ``-`` * Underscore ``_`` * Period ``.`` * Question mark ``?`` (only to delineate single-reader channel names, and only one per name) * Exclamation mark ``!`` (only to delineate process-specific channel names, and only one per name) Copyright ========= This document has been placed in the public domain. channels-4.0.0/docs/community.rst000066400000000000000000000034221432260166700170240ustar00rootroot00000000000000Community Projects ================== These projects from the community are developed on top of Channels: * Beatserver_, a periodic task scheduler for Django Channels. * EventStream_, a library to push data using the Server-Sent Events (SSE) protocol. * DjangoChannelsRestFramework_, a framework that provides DRF-like consumers for Channels. * ChannelsMultiplexer_, a JsonConsumer Multiplexer for Channels. * DjangoChannelsIRC_, an interface server and matching generic consumers for IRC. * Apollo_, a real-time polling application for corporate and academic environments. * DjangoChannelsJsonRpc_, a wrapper for the JSON-RPC protocol. * channels-demultiplexer_, a (de)multiplexer for ``AsyncJsonWebsocketConsumer`` consumers. * channels_postgres_, a Django Channels channel layer that uses PostgreSQL as its backing store. Community Tutorials =================== Here are some Channels tutorials from around the community: * kafka-integration_, a writeup on integrating Kafka with Channels. If you'd like to add your project, please submit a PR with a link and brief description. .. _Beatserver: https://github.com/rajasimon/beatserver .. _EventStream: https://github.com/fanout/django-eventstream .. _DjangoChannelsRestFramework: https://github.com/hishnash/djangochannelsrestframework .. _ChannelsMultiplexer: https://github.com/hishnash/channelsmultiplexer .. _DjangoChannelsIRC: https://github.com/AdvocatesInc/django-channels-irc .. _Apollo: https://github.com/maliesa96/apollo .. _DjangoChannelsJsonRpc: https://github.com/millerf/django-channels2-jsonrpc .. _channels-demultiplexer: https://github.com/csdenboer/channels-demultiplexer .. _kafka-integration: https://gist.github.com/aryan340/da071d027050cfe0a03df3b500f2f44b .. _channels_postgres: https://github.com/danidee10/channels_postgres channels-4.0.0/docs/conf.py000066400000000000000000000200361432260166700155450ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Channels documentation build configuration file, created by # sphinx-quickstart on Fri Jun 19 11:37:58 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) from channels import __version__ # noqa # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Channels' copyright = u'2022, Django Software Foundation' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = __version__ # The full version, including alpha/beta/rc tags. release = __version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Channelsdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'Channels.tex', u'Channels Documentation', u'Andrew Godwin', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'channels', u'Channels Documentation', [u'Andrew Godwin'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Channels', u'Channels Documentation', u'Andrew Godwin', 'Channels', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False channels-4.0.0/docs/contributing.rst000066400000000000000000000112601432260166700175060ustar00rootroot00000000000000Contributing ============ If you're looking to contribute to Channels, then please read on - we encourage contributions both large and small, from both novice and seasoned developers. What can I work on? ------------------- We're looking for help with the following areas: * Documentation and tutorial writing * Bugfixing and testing * Feature polish and occasional new feature design * Case studies and writeups You can find what we're looking to work on in the GitHub issues list for each of the Channels sub-projects: * `Channels issues `_, for the Django integration and overall project efforts * `Daphne issues `_, for the HTTP and Websocket termination * `asgiref issues `_, for the base ASGI library/memory backend * `channels_redis issues `_, for the Redis channel backend Issues are categorized by difficulty level: * ``exp/beginner``: Easy issues suitable for a first-time contributor. * ``exp/intermediate``: Moderate issues that need skill and a day or two to solve. * ``exp/advanced``: Difficult issues that require expertise and potentially weeks of work. They are also classified by type: * ``documentation``: Documentation issues. Pick these if you want to help us by writing docs. * ``bug``: A bug in existing code. Usually easier for beginners as there's a defined thing to fix. * ``enhancement``: A new feature for the code; may be a bit more open-ended. You should filter the issues list by the experience level and type of work you'd like to do, and then if you want to take something on leave a comment and assign yourself to it. If you want advice about how to take on a bug, leave a comment asking about it and we'll be happy to help. The issues are also just a suggested list - any offer to help is welcome as long as it fits the project goals, but you should make an issue for the thing you wish to do and discuss it first if it's relatively large (but if you just found a small bug and want to fix it, sending us a pull request straight away is fine). I'm a novice contributor/developer - can I help? ------------------------------------------------ Of course! The issues labelled with ``exp/beginner`` are a perfect place to get started, as they're usually small and well defined. If you want help with one of them, jump in and comment on the ticket if you need input or assistance. How do I get started and run the tests? --------------------------------------- First, you should first clone the git repository to a local directory: .. code-block:: sh git clone https://github.com/django/channels.git channels Next, you may want to make a virtual environment to run the tests and develop in; you can use either ``virtualenvwrapper``, ``pipenv`` or just plain ``virtualenv`` for this. Then, ``cd`` into the ``channels`` directory and install it editable into your environment: .. code-block:: sh cd channels/ python -m pip install -e .[tests] Note the ``[tests]`` section there; that tells ``pip`` that you want to install the ``tests`` extra, which will bring in testing dependencies like ``pytest-django``. Then, you can run the tests: .. code-block:: sh pytest Also, there is a tox.ini file at the root of the repository. Example commands: .. code-block:: sh $ tox -l py37-dj32 py38-dj32 py39-dj32 py310-dj32 py38-dj40 py38-dj41 py38-djmain py39-dj40 py39-dj41 py39-djmain py310-dj40 py310-dj41 py310-djmain qa # run the test with Python 3.10, on Django 4.1 and Django main branch $ tox -e py310-dj41,py310-djmain Note that tox can also forward arguments to pytest. When using pdb with pytest, forward the ``-s`` option to pytest as such: .. code-block:: sh tox -e py310-dj41 -- -s The ``qa`` environment runs the various linters used by the project. How do I do a release? ---------------------- If you have commit access, a release involves the following steps: * Create a new entry in the CHANGELOG.txt file and summarise the changes * Create a new release page in the docs under ``docs/releases`` and add the changelog there with more information where necessary * Add a link to the new release notes in ``docs/releases/index.rst`` * Set the new version in ``__init__.py`` * Roll all of these up into a single commit and tag it with the new version number. Push the commit and tag. * To upload you will need to be added as a maintainer on PyPI. Run `python setup.py sdist bdist_wheel`, and `twine upload`. The release process for ``channels-redis`` and ``daphne`` is similar, but they don't have the two steps in ``docs/``. channels-4.0.0/docs/deploying.rst000066400000000000000000000213601432260166700167730ustar00rootroot00000000000000Deploying ========= Channels (ASGI) applications deploy similarly to WSGI applications - you load them into a server, like Daphne, and you can scale the number of server processes up and down. The one optional extra requirement for a Channels project is to provision a :doc:`channel layer `. Both steps are covered below. Configuring the ASGI application -------------------------------- As discussed in :doc:`installation` and :doc:`/topics/routing`, you will have a file like ``myproject/asgi.py`` that will define your *root application*. This is almost certainly going to be your top-level (Protocol Type) router. Here's an example of what that ``asgi.py`` might look like: .. include:: ./includes/asgi_example.rst Setting up a channel backend ---------------------------- .. note:: This step is optional. If you aren't using the channel layer, skip this section. Typically a channel backend will connect to one or more central servers that serve as the communication layer - for example, the Redis backend connects to a Redis server. All this goes into the ``CHANNEL_LAYERS`` setting; here's an example for a remote Redis server: .. code-block:: python CHANNEL_LAYERS = { "default": { "BACKEND": "channels_redis.core.RedisChannelLayer", "CONFIG": { "hosts": [("redis-server-name", 6379)], }, }, } To use the Redis backend you have to install it: .. code-block:: sh pip install -U channels_redis Run protocol servers -------------------- In order to talk to the outside world, your Channels/ASGI application needs to be loaded into a *protocol server*. These can be like WSGI servers and run your application in a HTTP mode, but they can also bridge to any number of other protocols (chat protocols, IoT protocols, even radio networks). All these servers have their own configuration options, but they all have one thing in common - they will want you to pass them an ASGI application to run. All you need to do is pass the ``application`` object inside your project's ``asgi.py`` file to your protocol server as the application it should run: .. code-block:: sh daphne -p 8001 myproject.asgi:application HTTP and WebSocket ------------------ While ASGI is a general protocol and we can't cover all possible servers here, it's very likely you will want to deploy a Channels project to work over HTTP and potentially WebSocket, so we'll cover that in some more detail. The Channels project maintains an official ASGI HTTP/WebSocket server, `Daphne `_, and it's this that we'll talk about configuring. Other HTTP/WebSocket ASGI servers are possible and will work just as well provided they follow the spec, but will have different configuration. You can choose to either use Daphne for all requests - HTTP and WebSocket - or if you are conservative about stability, keep running standard HTTP requests through a WSGI server and use Daphne only for things WSGI cannot do, like HTTP long-polling and WebSockets. If you do split, you'll need to put something in front of Daphne and your WSGI server to work out what requests to send to each (using HTTP path or domain) - that's not covered here, just know you can do it. If you use Daphne for all traffic, it auto-negotiates between HTTP and WebSocket, so there's no need to have your WebSockets on a separate domain or path (and they'll be able to share cookies with your normal view code, which isn't possible if you separate by domain rather than path). To run Daphne, it just needs to be supplied with an application, much like a WSGI server would need to be. Make sure you have an ``asgi.py`` file as outlined above. Then, you can run Daphne and supply the ASGI application as the argument: .. code-block:: sh daphne myproject.asgi:application You should run Daphne inside either a process supervisor (systemd, supervisord) or a container orchestration system (kubernetes, nomad) to ensure that it gets restarted if needed and to allow you to scale the number of processes. If you want to bind multiple Daphne instances to the same port on a machine, use a process supervisor that can listen on ports and pass the file descriptors to launched processes, and then pass the file descriptor with ``--fd NUM``. You can also specify the port and IP that Daphne binds to: .. code-block:: sh daphne -b 0.0.0.0 -p 8001 myproject.asgi:application You can see more about Daphne and its options `on GitHub `_. Alternative Web Servers ----------------------- There are also alternative `ASGI `_ servers that you can use for serving Channels. To some degree ASGI web servers should be interchangeable, they should all have the same basic functionality in terms of serving HTTP and WebSocket requests. Aspects where servers may differ are in their configuration and defaults, performance characteristics, support for resource limiting, differing protocol and socket support, and approaches to process management. You can see more alternative servers, such as Uvicorn, in the `ASGI implementations documentation `_. Example Setups -------------- These are examples of possible setups - they are not guaranteed to work out of the box, and should be taken more as a guide than a direct tutorial. Nginx/Supervisor (Ubuntu) ~~~~~~~~~~~~~~~~~~~~~~~~~ This example sets up a Django site on an Ubuntu server, using Nginx as the main webserver and supervisord to run and manage Daphne. First, install Nginx and Supervisor: .. code-block:: sh $ sudo apt install nginx supervisor Now, you will need to create the supervisor configuration file (often located in ``/etc/supervisor/conf.d/`` - here, we're making Supervisor listen on the TCP port and then handing that socket off to the child processes so they can all share the same bound port: .. code-block:: ini [fcgi-program:asgi] # TCP socket used by Nginx backend upstream socket=tcp://localhost:8000 # Directory where your site's project files are located directory=/my/app/path # Each process needs to have a separate socket file, so we use process_num # Make sure to update "mysite.asgi" to match your project name command=daphne -u /run/daphne/daphne%(process_num)d.sock --fd 0 --access-log - --proxy-headers mysite.asgi:application # Number of processes to startup, roughly the number of CPUs you have numprocs=4 # Give each process a unique name so they can be told apart process_name=asgi%(process_num)d # Automatically start and recover processes autostart=true autorestart=true # Choose where you want your log to go stdout_logfile=/your/log/asgi.log redirect_stderr=true Create the run directory for the sockets referenced in the supervisor configuration file. .. code-block:: sh $ sudo mkdir /run/daphne/ When running the supervisor fcgi-program under a different user, change the owner settings of the run directory. .. code-block:: sh $ sudo chown . /run/daphne/ The /run/ folder is cleared on a server reboot. To make the /run/daphne folder persistant create a file ``/usr/lib/tmpfiles.d/daphne.conf`` with the contents below. .. code-block:: text $ d /run/daphne 0755 Have supervisor reread and update its jobs: .. code-block:: sh $ sudo supervisorctl reread $ sudo supervisorctl update .. note:: Running the daphne command with ``--fd 0`` in the commandline will fail and result in *[Errno 88] Socket operation on non-socket*. Supervisor will automatically create the socket, bind, and listen before forking the first child in a group. The socket will be passed to each child on file descriptor number 0 (zero). See https://supervisord.org/configuration.html#fcgi-program-x-section-settings Next, Nginx has to be told to proxy traffic to the running Daphne instances. Setup your nginx upstream conf file for your project: .. code-block:: text upstream channels-backend { server localhost:8000; } ... server { ... location / { try_files $uri @proxy_to_app; } ... location @proxy_to_app { proxy_pass http://channels-backend; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "upgrade"; proxy_redirect off; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Host $server_name; } ... } Reload nginx to apply the changes: .. code-block:: sh $ sudo service nginx reload channels-4.0.0/docs/includes/000077500000000000000000000000001432260166700160535ustar00rootroot00000000000000channels-4.0.0/docs/includes/asgi_example.rst000066400000000000000000000021331432260166700212420ustar00rootroot00000000000000.. code-block:: python import os from channels.auth import AuthMiddlewareStack from channels.routing import ProtocolTypeRouter, URLRouter from channels.security.websocket import AllowedHostsOriginValidator from django.core.asgi import get_asgi_application from django.urls import path os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings") # Initialize Django ASGI application early to ensure the AppRegistry # is populated before importing code that may import ORM models. django_asgi_app = get_asgi_application() from chat.consumers import AdminChatConsumer, PublicChatConsumer application = ProtocolTypeRouter({ # Django's ASGI application to handle traditional HTTP requests "http": django_asgi_app, # WebSocket chat handler "websocket": AllowedHostsOriginValidator( AuthMiddlewareStack( URLRouter([ path("chat/admin/", AdminChatConsumer.as_asgi()), path("chat/", PublicChatConsumer.as_asgi()), ]) ) ), })channels-4.0.0/docs/index.rst000066400000000000000000000034541432260166700161140ustar00rootroot00000000000000Django Channels =============== Channels is a project that takes Django and extends its abilities beyond HTTP - to handle WebSockets, chat protocols, IoT protocols, and more. It's built on a Python specification called `ASGI `_. Channels builds upon the native ASGI support in Django. Whilst Django still handles traditional HTTP, Channels gives you the choice to handle other connections in either a synchronous or asynchronous style. To get started understanding Channels, read our :doc:`introduction`, which will walk through how things work. .. note:: This is documentation for the **4.x series** of Channels. If you are looking for documentation for older versions, you can select ``3.x``, ``2.x``, or ``1.x`` from the versions selector in the bottom-left corner. Projects -------- Channels is comprised of several packages: * `Channels `_, the Django integration layer * `Daphne `_, the HTTP and Websocket termination server * `asgiref `_, the base ASGI library * `channels_redis `_, the Redis channel layer backend (optional) This documentation covers the system as a whole; individual release notes and instructions can be found in the individual repositories. .. _topics: Topics ------ .. toctree:: :maxdepth: 2 introduction installation tutorial/index topics/consumers topics/routing topics/databases topics/channel_layers topics/sessions topics/authentication topics/security topics/testing topics/worker deploying topics/troubleshooting Reference --------- .. toctree:: :maxdepth: 2 asgi channel_layer_spec community contributing support releases/index channels-4.0.0/docs/installation.rst000066400000000000000000000054221432260166700175030ustar00rootroot00000000000000Installation ============ Channels is available on PyPI - to install it run: .. code-block:: sh python -m pip install -U channels["daphne"] This will install Channels together with the Daphne ASGI application server. If you wish to use a different application server you can ``pip install channels``, without the optional ``daphne`` add-on. Once that's done, you should add ``daphne`` to the beginning of your ``INSTALLED_APPS`` setting: .. code-block:: python INSTALLED_APPS = ( "daphne", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.sites", ... ) This will install the Daphne's ASGI version of the ``runserver`` management command. You can also add ``"channels"`` for Channel's ``runworker`` command. Then, adjust your project's ``asgi.py`` file, e.g. ``myproject/asgi.py``, to wrap the Django ASGI application:: import os from channels.routing import ProtocolTypeRouter from django.core.asgi import get_asgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings') # Initialize Django ASGI application early to ensure the AppRegistry # is populated before importing code that may import ORM models. django_asgi_app = get_asgi_application() application = ProtocolTypeRouter({ "http": django_asgi_app, # Just HTTP for now. (We can add other protocols later.) }) And finally, set your ``ASGI_APPLICATION`` setting to point to that routing object as your root application: .. code-block:: python ASGI_APPLICATION = "myproject.asgi.application" That's it! Once enabled, ``daphne`` will integrate itself into Django and take control of the ``runserver`` command. See :doc:`introduction` for more. .. note:: Please be wary of any other third-party apps that require an overloaded or replacement ``runserver`` command. Daphne provides a separate ``runserver`` command and may conflict with it. An example of such a conflict is with `whitenoise.runserver_nostatic `_ from `whitenoise `_. In order to solve such issues, make sure ``daphne`` is at the top of your ``INSTALLED_APPS`` or remove the offending app altogether. Installing the latest development version ----------------------------------------- To install the latest version of Channels, clone the repo, change to the repo, change to the repo directory, and pip install it into your current virtual environment: .. code-block:: sh $ git clone git@github.com:django/channels.git $ cd channels $ (environment) $ pip install -e . # the dot specifies the current repo channels-4.0.0/docs/introduction.rst000066400000000000000000000302411432260166700175200ustar00rootroot00000000000000Introduction ============ Welcome to Channels! Channels wraps Django's native asynchronous view support, allowing Django projects to handle not only HTTP, but protocols that require long-running connections too - WebSockets, MQTT, chatbots, amateur radio, and more. It does this while preserving Django's synchronous and easy-to-use nature, allowing you to choose how you write your code - synchronous in a style like Django views, fully asynchronous, or a mixture of both. On top of this, it provides integrations with Django's auth system, session system, and more, making it easier than ever to extend your HTTP-only project to other protocols. Channels also bundles this event-driven architecture with *channel layers*, a system that allows you to easily communicate between processes, and separate your project into different processes. If you haven't yet installed Channels, you may want to read :doc:`installation` first to get it installed. This introduction isn't a direct tutorial, but you should be able to use it to follow along and make changes to an existing Django project if you like. Turtles All The Way Down ------------------------ Channels operates on the principle of "turtles all the way down" - we have a single idea of what a channels "application" is, and even the simplest of *consumers* (the equivalent of Django views) are an entirely valid :doc:`/asgi` application you can run by themselves. .. note:: ASGI is the name for the asynchronous server specification that Channels is built on. Like WSGI, it is designed to let you choose between different servers and frameworks rather than being locked into Channels and our server Daphne. You can learn more at https://asgi.readthedocs.io Channels gives you the tools to write these basic *consumers* - individual pieces that might handle chat messaging, or notifications - and tie them together with URL routing, protocol detection and other handy things to make a full application. We treat HTTP and the existing Django application as part of a bigger whole. Traditional Django views are still there with Channels and still usable - with Django's native ASGI support but you can also write custom HTTP long-polling handling, or WebSocket receivers, and have that code sit alongside your existing code. URL routing, middleware - they are all just ASGI applications. Our belief is that you want the ability to use safe, synchronous techniques like Django views for most code, but have the option to drop down to a more direct, asynchronous interface for complex tasks. Scopes and Events ------------------ Channels and ASGI split up incoming connections into two components: a *scope*, and a series of *events*. The *scope* is a set of details about a single incoming connection - such as the path a web request was made from, or the originating IP address of a WebSocket, or the user messaging a chatbot. The scope persists throughout the connection. For HTTP, the scope just lasts a single request. For WebSockets, it lasts for the lifetime of the socket (but changes if the socket closes and reconnects). For other protocols, it varies based on how the protocol's ASGI spec is written; for example, it's likely that a chatbot protocol would keep one scope open for the entirety of a user's conversation with the bot, even if the underlying chat protocol is stateless. During the lifetime of this *scope*, a series of *events* occur. These represent user interactions - making a HTTP request, for example, or sending a WebSocket frame. Your Channels or ASGI applications will be **instantiated once per scope**, and then be fed the stream of *events* happening within that scope to decide what action to take. An example with HTTP: * The user makes an HTTP request. * We open up a new ``http`` type scope with details of the request's path, method, headers, etc. * We send a ``http.request`` event with the HTTP body content * The Channels or ASGI application processes this and generates a ``http.response`` event to send back to the browser and close the connection. * The HTTP request/response is completed and the scope is destroyed. An example with a chatbot: * The user sends a first message to the chatbot. * This opens a scope containing the user's username, chosen name, and user ID. * The application is given a ``chat.received_message`` event with the event text. It does not have to respond, but could send one, two or more other chat messages back as ``chat.send_message`` events if it wanted to. * The user sends more messages to the chatbot and more ``chat.received_message`` events are generated. * After a timeout or when the application process is restarted the scope is closed. Within the lifetime of a scope - be that a chat, an HTTP request, a socket connection or something else - you will have one application instance handling all the events from it, and you can persist things onto the application instance as well. You can choose to write a raw ASGI application if you wish, but Channels gives you an easy-to-use abstraction over them called *consumers*. What is a Consumer? ------------------- A consumer is the basic unit of Channels code. We call it a *consumer* as it *consumes events*, but you can think of it as its own tiny little application. When a request or new socket comes in, Channels will follow its routing table - we'll look at that in a bit - find the right consumer for that incoming connection, and start up a copy of it. This means that, unlike Django views, consumers are long-running. They can also be short-running - after all, HTTP requests can also be served by consumers - but they're built around the idea of living for a little while (they live for the duration of a *scope*, as we described above). A basic consumer looks like this: .. code-block:: python class ChatConsumer(WebsocketConsumer): def connect(self): self.username = "Anonymous" self.accept() self.send(text_data="[Welcome %s!]" % self.username) def receive(self, *, text_data): if text_data.startswith("/name"): self.username = text_data[5:].strip() self.send(text_data="[set your username to %s]" % self.username) else: self.send(text_data=self.username + ": " + text_data) def disconnect(self, message): pass Each different protocol has different kinds of events that happen, and each type is represented by a different method. You write code that handles each event, and Channels will take care of scheduling them and running them all in parallel. Underneath, Channels is running on a fully asynchronous event loop, and if you write code like above, it will get called in a synchronous thread. This means you can safely do blocking operations, like calling the Django ORM: .. code-block:: python class LogConsumer(WebsocketConsumer): def connect(self, message): Log.objects.create( type="connected", client=self.scope["client"], ) However, if you want more control and you're willing to work only in asynchronous functions, you can write fully asynchronous consumers: .. code-block:: python class PingConsumer(AsyncConsumer): async def websocket_connect(self, message): await self.send({ "type": "websocket.accept", }) async def websocket_receive(self, message): await asyncio.sleep(1) await self.send({ "type": "websocket.send", "text": "pong", }) You can read more about consumers in :doc:`/topics/consumers`. Routing and Multiple Protocols ------------------------------ You can combine multiple consumers (which are, remember, their own ASGI apps) into one bigger app that represents your project using routing: .. code-block:: python application = URLRouter([ path("chat/admin/", AdminChatConsumer.as_asgi()), path("chat/", PublicChatConsumer.as_asgi(), ]) Channels is not just built around the world of HTTP and WebSockets - it also allows you to build any protocol into a Django environment, by building a server that maps those protocols into a similar set of events. For example, you can build a chatbot in a similar style: .. code-block:: python class ChattyBotConsumer(SyncConsumer): def telegram_message(self, message): """ Simple echo handler for telegram messages in any chat. """ self.send({ "type": "telegram.message", "text": "You said: %s" % message["text"], }) And then use another router to have the one project able to serve both WebSockets and chat requests: .. code-block:: python application = ProtocolTypeRouter({ "websocket": URLRouter([ path("chat/admin/", AdminChatConsumer.as_asgi()), path("chat/", PublicChatConsumer.as_asgi()), ]), "telegram": ChattyBotConsumer.as_asgi(), }) The goal of Channels is to let you build out your Django projects to work across any protocol or transport you might encounter in the modern web, while letting you work with the familiar components and coding style you're used to. For more information about protocol routing, see :doc:`/topics/routing`. Cross-Process Communication --------------------------- Much like a standard WSGI server, your application code that is handling protocol events runs inside the server process itself - for example, WebSocket handling code runs inside your WebSocket server process. Each socket or connection to your overall application is handled by an *application instance* inside one of these servers. They get called and can send data back to the client directly. However, as you build more complex application systems you start needing to communicate between different *application instances* - for example, if you are building a chatroom, when one *application instance* receives an incoming message, it needs to distribute it out to any other instances that represent people in the chatroom. You can do this by polling a database, but Channels introduces the idea of a *channel layer*, a low-level abstraction around a set of transports that allow you to send information between different processes. Each application instance has a unique *channel name*, and can join *groups*, allowing both point-to-point and broadcast messaging. .. note:: Channel layers are an optional part of Channels, and can be disabled if you want (by setting the ``CHANNEL_LAYERS`` setting to an empty value). .. code-block:: python #In a consumer self.channel_layer.send( 'event', { 'type': 'message', 'channel': channel, 'text': text, } ) You can also send messages to a dedicated process that's listening on its own, fixed channel name: .. code-block:: python # In a consumer self.channel_layer.send( "myproject.thumbnail_notifications", { "type": "thumbnail.generate", "id": 90902949, }, ) You can read more about channel layers in :doc:`/topics/channel_layers`. Django Integration ------------------ Channels ships with easy drop-in support for common Django features, like sessions and authentication. You can combine authentication with your WebSocket views by just adding the right middleware around them: .. code-block:: python from django.core.asgi import get_asgi_application from django.urls import re_path # Initialize Django ASGI application early to ensure the AppRegistry # is populated before importing code that may import ORM models. django_asgi_app = get_asgi_application() from channels.routing import ProtocolTypeRouter, URLRouter from channels.auth import AuthMiddlewareStack from channels.security.websocket import AllowedHostsOriginValidator application = ProtocolTypeRouter({ "http": django_asgi_app, "websocket": AllowedHostsOriginValidator( AuthMiddlewareStack( URLRouter([ re_path(r"^front(end)/$", consumers.AsyncChatConsumer.as_asgi()), ]) ) ), }) For more, see :doc:`/topics/sessions` and :doc:`/topics/authentication`. channels-4.0.0/docs/make.bat000066400000000000000000000150661432260166700156620ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.https://www.sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Channels.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Channels.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end channels-4.0.0/docs/releases/000077500000000000000000000000001432260166700160505ustar00rootroot00000000000000channels-4.0.0/docs/releases/1.0.0.rst000066400000000000000000000213251432260166700172410ustar00rootroot000000000000001.0.0 Release Notes =================== Channels 1.0.0 brings together a number of design changes, including some breaking changes, into our first fully stable release, and also brings the databinding code out of alpha phase. It was released on 2017/01/08. The result is a faster, easier to use, and safer Channels, including one major change that will fix almost all problems with sessions and connect/receive ordering in a way that needs no persistent storage. It was unfortunately not possible to make all of the changes backwards compatible, though most code should not be too affected and the fixes are generally quite easy. You **must also update Daphne** to at least 1.0.0 to have this release of Channels work correctly. Major Features -------------- Channels 1.0 introduces a couple of new major features. WebSocket accept/reject flow ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Rather than be immediately accepted, WebSockets now pause during the handshake while they send over a message on ``websocket.connect``, and your application must either accept or reject the connection before the handshake is completed and messages can be received. You **must** update Daphne to at least 1.0.0 to make this work correctly. This has several advantages: * You can now reject WebSockets before they even finish connecting, giving appropriate error codes to browsers and not letting the browser-side socket ever get into a connected state and send messages. * Combined with Consumer Atomicity (below), it means there is no longer any need for the old "slight ordering" mode, as the connect consumer must run to completion and accept the socket before any messages can be received and forwarded onto ``websocket.receive``. * Any ``send`` message sent to the WebSocket will implicitly accept the connection, meaning only a limited set of ``connect`` consumers need changes (see Backwards Incompatible Changes below) Consumer Atomicity ~~~~~~~~~~~~~~~~~~ Consumers will now buffer messages you try to send until the consumer completes and then send them once it exits and the outbound part of any decorators have been run (even if an exception is raised). This makes the flow of messages much easier to reason about - consumers can now be reasoned about as atomic blocks that run and then send messages, meaning that if you send a message to start another consumer you're guaranteed that the sending consumer has finished running by the time it's acted upon. If you want to send messages immediately rather than at the end of the consumer, you can still do that by passing the ``immediately`` argument: .. code-block:: python Channel("thumbnailing-tasks").send({"id": 34245}, immediately=True) This should be mostly backwards compatible, and may actually fix race conditions in some apps that were pre-existing. Databinding Group/Action Overhaul ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Previously, databinding subclasses had to implement ``group_names(instance, action)`` to return what groups to send an instance's change to of the type ``action``. This had flaws, most notably when what was actually just a modification to the instance in question changed its permission status so more clients could see it; to those clients, it should instead have been "created". Now, Channels just calls ``group_names(instance)``, and you should return what groups can see the instance at the current point in time given the instance you were passed. Channels will actually call the method before and after changes, comparing the groups you gave, and sending out create, update or delete messages to clients appropriately. Existing databinding code will need to be adapted; see the "Backwards Incompatible Changes" section for more. Demultiplexer Overhaul ~~~~~~~~~~~~~~~~~~~~~~ Demuliplexers have changed to remove the behaviour where they re-sent messages onto new channels without special headers, and instead now correctly split out incoming messages into sub-messages that still look like ``websocket.receive`` messages, and directly dispatch these to the relevant consumer. They also now forward all ``websocket.connect`` and ``websocket.disconnect`` messages to all of their sub-consumers, so it's much easier to compose things together from code that also works outside the context of multiplexing. For more, read the updated ``/generic`` docs. Delay Server ~~~~~~~~~~~~ A built-in delay server, launched with `manage.py rundelay`, now ships if you wish to use it. It needs some extra initial setup and uses a database for persistence; see ``/delay`` for more information. Minor Changes ------------- * Serializers can now specify fields as ``__all__`` to auto-include all fields, and ``exclude`` to remove certain unwanted fields. * ``runserver`` respects ``FORCE_SCRIPT_NAME`` * Websockets can now be closed with a specific code by calling ``close(status=4000)`` * ``enforce_ordering`` no longer has a ``slight`` mode (because of the accept flow changes), and is more efficient with session saving. * ``runserver`` respects ``--nothreading`` and only launches one worker, takes a ``--http-timeout`` option if you want to override it from the default ``60``, * A new ``@channel_and_http_session`` decorator rehydrates the HTTP session out of the channel session if you want to access it inside receive consumers. * Streaming responses no longer have a chance of being cached. * ``request.META['SERVER_PORT']`` is now always a string. * ``http.disconnect`` now has a ``path`` key so you can route it. * Test client now has a ``send_and_consume`` method. Backwards Incompatible Changes ------------------------------ Connect Consumers ~~~~~~~~~~~~~~~~~ If you have a custom consumer for ``websocket.connect``, you must ensure that it either: * Sends at least one message onto the ``reply_channel`` that generates a WebSocket frame (either ``bytes`` or ``text`` is set), either directly or via a group. * Sends a message onto the ``reply_channel`` that is ``{"accept": True}``, to accept a connection without sending data. * Sends a message onto the ``reply_channel`` that is ``{"close": True}``, to reject a connection mid-handshake. Many consumers already do the former, but if your connect consumer does not send anything you MUST now send an accept message or the socket will remain in the handshaking phase forever and you'll never get any messages. All built-in Channels consumers (e.g. in the generic consumers) have been upgraded to do this. You **must** update Daphne to at least 1.0.0 to make this work correctly. Databinding group_names ~~~~~~~~~~~~~~~~~~~~~~~ If you have databinding subclasses, you will have implemented ``group_names(instance, action)``, which returns the groups to use based on the instance and action provided. Now, instead, you must implement ``group_names(instance)``, which returns the groups that can see the instance as it is presented for you; the action results will be worked out for you. For example, if you want to only show objects marked as "admin_only" to admins, and objects without it to everyone, previously you would have done: .. code-block:: python def group_names(self, instance, action): if instance.admin_only: return ["admins"] else: return ["admins", "non-admins"] Because you did nothing based on the ``action`` (and if you did, you would have got incomplete messages, hence this design change), you can just change the signature of the method like this: .. code-block:: python def group_names(self, instance): if instance.admin_only: return ["admins"] else: return ["admins", "non-admins"] Now, when an object is updated to have ``admin_only = True``, the clients in the ``non-admins`` group will get a ``delete`` message, while those in the ``admins`` group will get an ``update`` message. Demultiplexers ~~~~~~~~~~~~~~ Demultiplexers have changed from using a ``mapping`` dict, which mapped stream names to channels, to using a ``consumers`` dict which maps stream names directly to consumer classes. You will have to convert over to using direct references to consumers, change the name of the dict, and then you can remove any channel routing for the old channels that were in ``mapping`` from your routes. Additionally, the Demultiplexer now forwards messages as they would look from a direct connection, meaning that where you previously got a decoded object through you will now get a correctly-formatted ``websocket.receive`` message through with the content as a ``text`` key, JSON-encoded. You will also now have to handle ``websocket.connect`` and ``websocket.disconnect`` messages. Both of these issues can be solved using the ``JsonWebsocketConsumer`` generic consumer, which will decode for you and correctly separate connection and disconnection handling into their own methods. channels-4.0.0/docs/releases/1.0.1.rst000066400000000000000000000004771432260166700172470ustar00rootroot000000000000001.0.1 Release Notes =================== Channels 1.0.1 is a minor bugfix release, released on 2017/01/09. Changes ------- * WebSocket generic views now accept connections by default in their connect handler for better backwards compatibility. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/1.0.2.rst000066400000000000000000000016731432260166700172470ustar00rootroot000000000000001.0.2 Release Notes =================== Channels 1.0.2 is a minor bugfix release, released on 2017/01/12. Changes ------- * Websockets can now be closed from anywhere using the new ``WebsocketCloseException``, available as ``channels.exceptions.WebsocketCloseException(code=None)``. There is also a generic ``ChannelSocketException`` you can base any exceptions on that, if it is caught, gets handed the current ``message`` in a ``run`` method, so you can do custom behaviours. * Calling ``Channel.send`` or ``Group.send`` from outside a consumer context (i.e. in tests or management commands) will once again send the message immediately, rather than putting it into the consumer message buffer to be flushed when the consumer ends (which never happens) * The base implementation of databinding now correctly only calls ``group_names(instance)``, as documented. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/1.0.3.rst000066400000000000000000000013361432260166700172440ustar00rootroot000000000000001.0.3 Release Notes =================== Channels 1.0.3 is a minor bugfix release, released on 2017/02/01. Changes ------- * Database connections are no longer force-closed after each test is run. * Channel sessions are not re-saved if they're empty even if they're marked as modified, allowing logout to work correctly. * WebsocketDemultiplexer now correctly does sessions for the second/third/etc. connect and disconnect handlers. * Request reading timeouts now correctly return 408 rather than erroring out. * The ``rundelay`` delay server now only polls the database once per second, and this interval is configurable with the ``--sleep`` option. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/1.1.0.rst000066400000000000000000000021521432260166700172370ustar00rootroot000000000000001.1.0 Release Notes =================== Channels 1.1.0 introduces a couple of major but backwards-compatible changes, including most notably the inclusion of a standard, framework-agnostic JavaScript library for easier integration with your site. Major Changes ------------- * Channels now includes a JavaScript wrapper that wraps reconnection and multiplexing for you on the client side. For more on how to use it, see the javascript documentation. * Test classes have been moved from ``channels.tests`` to ``channels.test`` to better match Django. Old imports from ``channels.tests`` will continue to work but will trigger a deprecation warning, and ``channels.tests`` will be removed completely in version 1.3. Minor Changes & Bugfixes ------------------------ * Bindings now support non-integer fields for primary keys on models. * The ``enforce_ordering`` decorator no longer suffers a race condition where it would drop messages under high load. * ``runserver`` no longer errors if the ``staticfiles`` app is not enabled in Django. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/1.1.1.rst000066400000000000000000000006051432260166700172410ustar00rootroot000000000000001.1.1 Release Notes =================== Channels 1.1.1 is a bugfix release that fixes a packaging issue with the JavaScript files. Major Changes ------------- None. Minor Changes & Bugfixes ------------------------ * The JavaScript binding introduced in 1.1.0 is now correctly packaged and included in builds. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/1.1.2.rst000066400000000000000000000012011432260166700172330ustar00rootroot000000000000001.1.2 Release Notes =================== Channels 1.1.2 is a bugfix release for the 1.1 series, released on April 1st, 2017. Major Changes ------------- None. Minor Changes & Bugfixes ------------------------ * Session name hash changed to SHA-1 to satisfy FIPS-140-2. * `scheme` key in ASGI-HTTP messages now translates into `request.is_secure()` correctly. * WebsocketBridge now exposes the underlying WebSocket as `.socket`. Backwards Incompatible Changes ------------------------------ * When you upgrade all current channel sessions will be invalidated; you should make sure you disconnect all WebSockets during upgrade. channels-4.0.0/docs/releases/1.1.3.rst000066400000000000000000000007141432260166700172440ustar00rootroot000000000000001.1.3 Release Notes =================== Channels 1.1.3 is a bugfix release for the 1.1 series, released on April 5th, 2017. Major Changes ------------- None. Minor Changes & Bugfixes ------------------------ * ``enforce_ordering`` now works correctly with the new-style process-specific channels * ASGI channel layer versions are now explicitly checked for version compatibility Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/1.1.4.rst000066400000000000000000000015071432260166700172460ustar00rootroot000000000000001.1.4 Release Notes =================== Channels 1.1.4 is a bugfix release for the 1.1 series, released on June 15th, 2017. Major Changes ------------- None. Minor Changes & Bugfixes ------------------------ * Pending messages correctly handle retries in backlog situations * Workers in threading mode now respond to ctrl-C and gracefully exit. * ``request.meta['QUERY_STRING']`` is now correctly encoded at all times. * Test client improvements * ``ChannelServerLiveTestCase`` added, allows an equivalent of the Django ``LiveTestCase``. * Decorator added to check ``Origin`` headers (``allowed_hosts_only``) * New ``TEST_CONFIG`` setting in ``CHANNEL_LAYERS`` that allows varying of the channel layer for tests (e.g. using a different Redis install) Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/1.1.5.rst000066400000000000000000000005311432260166700172430ustar00rootroot000000000000001.1.5 Release Notes =================== Channels 1.1.5 is a packaging release for the 1.1 series, released on June 16th, 2017. Major Changes ------------- None. Minor Changes & Bugfixes ------------------------ * The Daphne dependency requirement was bumped to 1.3.0. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/1.1.6.rst000066400000000000000000000006401432260166700172450ustar00rootroot000000000000001.1.6 Release Notes =================== Channels 1.1.5 is a packaging release for the 1.1 series, released on June 28th, 2017. Major Changes ------------- None. Minor Changes & Bugfixes ------------------------ * The ``runserver`` ``server_cls`` override no longer fails with more modern Django versions that pass an ``ipv6`` parameter. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/2.0.0.rst000066400000000000000000000026701432260166700172440ustar00rootroot000000000000002.0.0 Release Notes =================== Channels 2.0 is a major rewrite of Channels, introducing a large amount of changes to the fundamental design and architecture of Channels. Notably: * Data is no longer transported over a channel layer between protocol server and application; instead, applications run inside their protocol servers (like with WSGI). * To achieve this, the entire core of channels is now built around Python's ``asyncio`` framework and runs async-native down until it hits either a Django view or a synchronous consumer. * Python 2.7 and 3.4 are no longer supported. More detailed information on the changes and tips on how to port your applications can be found in our ``/one-to-two`` documentation in the 2.x docs version. Backwards Incompatible Changes ------------------------------ Channels 2 is regrettably not backwards-compatible at all with Channels 1 applications due to the large amount of re-architecting done to the code and the switch from synchronous to asynchronous runtimes. A migration guide is available in the 2.x docs version, and a lot of the basic concepts are the same, but the basic class structure and imports have changed. Our apologies for having to make a breaking change like this, but it was the only way to fix some of the fundamental design issues in Channels 1. Channels 1 will continue to receive security and data-loss fixes for the foreseeable future, but no new features will be added. channels-4.0.0/docs/releases/2.0.1.rst000066400000000000000000000022301432260166700172350ustar00rootroot000000000000002.0.1 Release Notes =================== Channels 2.0.1 is a patch release of channels, adding a couple of small new features and fixing one bug in URL resolution. As always, when updating Channels make sure to also update its dependencies (``asgiref`` and ``daphne``) as these also get their own bugfix updates, and some bugs that may appear to be part of Channels are actually in those packages. New Features ------------ * There are new async versions of the Websocket generic consumers, ``AsyncWebsocketConsumer`` and ``AsyncJsonWebsocketConsumer``. Read more about them in :doc:`/topics/consumers`. * The old ``allowed_hosts_only`` decorator has been removed (it was accidentally included in the 2.0 release but didn't work) and replaced with a new ``OriginValidator`` and ``AllowedHostsOriginValidator`` set of ASGI middleware. Read more in :doc:`/topics/security`. Bugfixes -------- * A bug in ``URLRouter`` which didn't allow you to match beyond the first URL in some situations has been resolved, and a test suite was added for URL resolution to prevent it happening again. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/2.0.2.rst000066400000000000000000000014311432260166700172400ustar00rootroot000000000000002.0.2 Release Notes =================== Channels 2.0.2 is a patch release of Channels, fixing a bug in the database connection handling. As always, when updating Channels make sure to also update its dependencies (``asgiref`` and ``daphne``) as these also get their own bugfix updates, and some bugs that may appear to be part of Channels are actually in those packages. New Features ------------ * There is a new ``channels.db.database_sync_to_async`` wrapper that is like ``sync_to_async`` but also closes database connections for you. You can read more about usage in :doc:`/topics/databases`. Bugfixes -------- * SyncConsumer and all its descendant classes now close database connections when they exit. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/2.1.0.rst000066400000000000000000000113011432260166700172340ustar00rootroot000000000000002.1.0 Release Notes =================== Channels 2.1 brings a few new major changes to Channels as well as some more minor fixes. In addition, if you've not yet seen it, we now have a long-form :doc:`tutorial ` to better introduce some of the concepts and sync versus async styles of coding. Major Changes ------------- Async HTTP Consumer ~~~~~~~~~~~~~~~~~~~ There is a new native-async HTTP consumer class, ``channels.generic.http.AsyncHttpConsumer``. This allows much easier writing of long-poll endpoints or other long-lived HTTP connection handling that benefits from native async support. You can read more about it in the :doc:`/topics/consumers` documentation. WebSocket Consumers ~~~~~~~~~~~~~~~~~~~ These consumer classes now all have built-in group join and leave functionality, which will make a consumer join all group names that are in the iterable ``groups`` on the consumer class (this can be a static list or a ``@property`` method). In addition, the ``accept`` methods on both variants now take an optional ``subprotocol`` argument, which will be sent back to the WebSocket client as the subprotocol the server has selected. The client's advertised subprotocols can, as always, be found in the scope as ``scope["subprotocols"]``. Nested URL Routing ~~~~~~~~~~~~~~~~~~ ``URLRouter`` instances can now be nested inside each other and, like Django's URL handling and ``include``, will strip off the matched part of the URL in the outer router and leave only the unmatched portion for the inner router, allowing reusable routing files. Note that you **cannot** use the Django ``include`` function inside of the ``URLRouter`` as it assumes a bit too much about what it is given as its left-hand side and will terminate your regular expression/URL pattern wrongly. Login and Logout ~~~~~~~~~~~~~~~~ As well as overhauling the internals of the ``AuthMiddleware``, there are now also ``login`` and ``logout`` async functions you can call in consumers to log users in and out of the current session. Due to the way cookies are sent back to clients, these come with some caveats; read more about them and how to use them properly in :doc:`/topics/authentication`. In-Memory Channel Layer ~~~~~~~~~~~~~~~~~~~~~~~ The in-memory channel layer has been extended to have full expiry and group support so it should now be suitable for drop-in replacement for most test scenarios. Testing ~~~~~~~ The ``ChannelsLiveServerTestCase`` has been rewritten to use a new method for launching Daphne that should be more resilient (and faster), and now shares code with the Daphne test suite itself. Ports are now left up to the operating system to decide rather than being picked from within a set range. It also now supports static files when the Django ``staticfiles`` app is enabled. In addition, the Communicator classes have gained a ``receive_nothing`` method that allows you to assert that the application didn't send anything, rather than writing this yourself using exception handling. See more in the :doc:`/topics/testing` documentation. Origin header validation ~~~~~~~~~~~~~~~~~~~~~~~~ As well as removing the ``print`` statements that accidentally got into the last release, this has been overhauled to more correctly match against headers according to the Origin header spec and align with Django's ``ALLOWED_HOSTS`` setting. It can now also enforce protocol (``http`` versus ``https``) and port, both optionally. Bugfixes & Small Changes ------------------------ * ``print`` statements that accidentally got left in the ``Origin`` validation code were removed. * The ``runserver`` command now shows the version of Channels you are running. * Orphaned tasks that may have caused warnings during test runs or occasionally live site traffic are now correctly killed off rather than letting them die later on and print warning messages. * ``WebsocketCommunicator`` now accepts a query string passed into the constructor and adds it to the scope rather than just ignoring it. * Test handlers will correctly handle changing the ``CHANNEL_LAYERS`` setting via decorators and wipe the internal channel layer cache. * ``SessionMiddleware`` can be safely nested inside itself rather than causing a runtime error. Backwards Incompatible Changes ------------------------------ * The format taken by the ``OriginValidator`` for its domains has changed and ``*.example.com`` is no longer allowed; instead, use ``.example.com`` to match a domain and all its subdomains. * If you previously nested ``URLRouter`` instances inside each other both would have been matching on the full URL before, whereas now they will match on the unmatched portion of the URL, meaning your URL routes would break if you had intended this usage. channels-4.0.0/docs/releases/2.1.1.rst000066400000000000000000000021151432260166700172400ustar00rootroot000000000000002.1.1 Release Notes =================== Channels 2.1.1 is a bugfix release for an important bug in the new async authentication code. Major Changes ------------- None. Bugfixes & Small Changes ------------------------ Previously, the object in ``scope["user"]`` was one of Django's SimpleLazyObjects, which then called our ``get_user`` async function via ``async_to_sync``. This worked fine when called from SyncConsumers, but because async environments do not run attribute access in an async fashion, when the body of an async consumer tried to call it, the ``asgiref`` library flagged an error where the code was trying to call a synchronous function during a async context. To fix this, the User object is now loaded non-lazily on application startup. This introduces a blocking call during the synchronous application constructor, so the ASGI spec has been updated to recommend that constructors for ASGI apps are called in a threadpool and Daphne 2.1.1 implements this and is recommended for use with this release. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/2.1.2.rst000066400000000000000000000031341432260166700172430ustar00rootroot000000000000002.1.2 Release Notes =================== Channels 2.1.2 is another bugfix release in the 2.1 series. Special thanks to people at the DjangoCon Europe sprints who helped out with several of these fixes. Major Changes ------------- Session and authentication middleware has been overhauled to be non-blocking. Previously, these middlewares potentially did database or session store access in the synchronous ASGI constructor, meaning they would block the entire event loop while doing so. Instead, they have now been modified to add LazyObjects into the scope in the places where the session or user will be, and then when the processing goes through their asynchronous portion, those stores are accessed in a non-blocking fashion. This should be an un-noticeable change for end users, but if you see weird behaviour or an unresolved LazyObject, let us know. Bugfixes & Small Changes ------------------------ * AsyncHttpConsumer now has a disconnect() method you can override if you want to perform actions (such as leaving groups) when a long-running HTTP request disconnects. * URL routing context now includes default arguments from the URLconf in the context's ``url_route`` key, alongside captured arguments/groups from the URL pattern. * The FORCE_SCRIPT_NAME setting is now respected in ASGI mode, and lets you override where Django thinks the root URL of your application is mounted. * ALLOWED_HOSTS is now set correctly during LiveServerTests, meaning you will no longer get ``400 Bad Request`` errors during these test runs. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/2.1.3.rst000066400000000000000000000010621432260166700172420ustar00rootroot000000000000002.1.3 Release Notes =================== Channels 2.1.3 is another bugfix release in the 2.1 series. Bugfixes & Small Changes ------------------------ * An ALLOWED_ORIGINS value of "*" will now also allow requests without a Host header at all (especially important for tests) * The request.path value is now correct in cases when a server has SCRIPT_NAME set. * Errors that happen inside channel listeners inside a runworker or Worker class are now raised rather than suppressed. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/2.1.4.rst000066400000000000000000000021051432260166700172420ustar00rootroot000000000000002.1.4 Release Notes =================== Channels 2.1.4 is another bugfix release in the 2.1 series. Bugfixes & Small Changes ------------------------ * Django middleware is now cached rather than instantiated per request resulting in a significant speed improvement. Some middleware took seconds to load and as a result Channels was unusable for HTTP serving before. * ChannelServerLiveTestCase now serves static files again. * Improved error message resulting from bad Origin headers. * ``runserver`` logging now goes through the Django logging framework to match modern Django. * Generic consumers can now have non-default channel layers - set the ``channel_layer_alias`` property on the consumer class * Improved error when accessing ``scope['user']`` before it's ready - the user is not accessible in the constructor of ASGI apps as it needs an async environment to load in. Previously it raised a generic error when you tried to access it early; now it tells you more clearly what's happening. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/2.1.5.rst000066400000000000000000000005161432260166700172470ustar00rootroot000000000000002.1.5 Release Notes =================== Channels 2.1.5 is another bugfix release in the 2.1 series. Bugfixes & Small Changes ------------------------ * Django middleware caching now works on Django 1.11 and Django 2.0. The previous release only ran on 2.1. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/2.1.6.rst000066400000000000000000000010271432260166700172460ustar00rootroot000000000000002.1.6 Release Notes =================== Channels 2.1.6 is another bugfix release in the 2.1 series. Bugfixes & Small Changes ------------------------ * HttpCommunicator now extracts query strings correctly from its provided arguments * AsyncHttpConsumer provides channel layer attributes following the same conventions as other consumer classes * Prevent late-Daphne import errors where importing ``daphne.server`` didn't work due to a bad linter fix. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/2.1.7.rst000066400000000000000000000015331432260166700172510ustar00rootroot000000000000002.1.7 Release Notes =================== Channels 2.1.7 is another bugfix release in the 2.1 series, and the last release (at least for a long while) with Andrew Godwin as the primary maintainer. Thanks to everyone who has used, supported, and contributed to Channels over the years, and I hope we can keep it going with community support for a good while longer. Bugfixes & Small Changes ------------------------ * HTTP request body size limit is now enforced (the one set by the ``DATA_UPLOAD_MAX_MEMORY_SIZE`` setting) * ``database_sync_to_async`` now closes old connections before it runs code, which should prevent some connection errors in long-running pages or tests. * The auth middleware closes old connections before it runs, to solve similar old-connection issues. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/2.2.0.rst000066400000000000000000000003231432260166700172370ustar00rootroot000000000000002.2.0 Release Notes =================== Channels 2.2.0 updates the requirements for ASGI version 3, and the supporting Daphne v2.3 release. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/2.3.0.rst000066400000000000000000000017441432260166700172500ustar00rootroot000000000000002.3.0 Release Notes =================== Channels 2.3.0 updates the ``AsgiHandler`` HTTP request body handling to use a spooled temporary file, rather than reading the whole request body into memory. This significantly reduces the maximum memory requirements when serving Django views, and protects from DoS attacks, whilst still allowing large file uploads — a combination that had previously been *difficult*. Many thanks to Ivan Ergunov for his work on the improvements! 🎩 Backwards Incompatible Changes ------------------------------ As a result of the reworked body handling, ``AsgiRequest.__init__()`` is adjusted to expect a file-like ``stream``, rather than the whole ``body`` as bytes. Test cases instantiating requests directly will likely need to be updated to wrap the provided ``body`` in, e.g., ``io.BytesIO``. Next Up... ---------- We're looking to address a few issues around ``AsyncHttpConsumer``. Any human-power available to help on that, truly appreciated. 🙂 channels-4.0.0/docs/releases/2.4.0.rst000066400000000000000000000011001432260166700172330ustar00rootroot000000000000002.4.0 Release Notes =================== Channels 2.4 brings compatibility with Django 3.0s ``async_unsafe()`` checks. (Specifically we ensure session save calls are made inside an asgiref ``database_sync_to_async()``.) If you are using Daphne, it is recommended that you install Daphne version 2.4.1 or later for full compatibility with Django 3.0. Backwards Incompatible Changes ------------------------------ In line with the guidance provided by Django's supported versions policy we now also drop support for all Django versions before 2.2, which is the current LTS. channels-4.0.0/docs/releases/3.0.0.rst000066400000000000000000000061731432260166700172470ustar00rootroot000000000000003.0.0 Release Notes =================== The Channels 3 update brings Channels into line with Django's own async ASGI support, introduced with Django 3.0. Channels now integrates with Django's async HTTP handling, whilst continuing to support WebSockets and other exciting consumer types. Channels 3 supports Django 3.x and beyond, as well continuing to support the Django 2.2 LTS. We will support Django 2.2 at least until the Django 3.2 LTS is released, yet may drop support after that, but before Django 2.2 is officially end-of-life. Likewise, we support Python 3.6+ but we **strongly advise** you to update to the latest Python versions, so 3.9 at the time of release. In both our Django and Python support, we reflect the reality that async Python and async Django are still both evolving rapidly. Many issues we see simply disappear if you update. Whatever you are doing with async, you should make sure you're on the latest versions. The highlight of this release is the upgrade to ASGI v3, which allows integration with Django's ASGI support. There are also two additional deprecations that you will need to deal with if you are updating an existing application. Update to ASGI 3 ---------------- * Consumers are now ASGI 3 *single-callables* with the signature:: application(scope, receive, send) For generic consumers this change should be largely transparent, but you will need to update ``__init__()`` (no longer taking the scope) and ``__call__()`` (now taking the scope) **if you implemented these yourself**. * Consumers now have an ``as_asgi()`` class method you need to call when setting up your routing:: websocket_urlpatterns = [ re_path(r'ws/chat/(?P\w+)/$', consumers.ChatConsumer.as_asgi()), ] This returns an ASGI application that will instantiate the consumer per-request. It's similar to Django's ``as_view()``, which serves the same purpose. You can pass in keyword arguments for initialization if your consumer requires them. * Middleware will also need to be updated to the ASGI v3 signature. The ``channels.middleware.BaseMiddleware`` class is simplified, and available as an example. You probably don't need to actually subclass it under ASGI 3. Deprecations ------------ * Using ``ProtocolTypeRouter`` without an explicit ``"http"`` key is now deprecated. Following Django conventions, your entry point script should be named ``asgi.py``, and you should use Django's ``get_asgi_application()``, that is used by Django's default ``asgi.py`` template to route the ``"http"`` handler:: from django.core.asgi import get_asgi_application application = ProtocolTypeRouter({ "http": get_asgi_application(), # Other protocols here. }) Once the deprecation is removed, when we drop support for Django 2.2, not specifying an ``"http"`` key will mean that your application will not handle HTTP requests. * The Channels built-in HTTP protocol ``AsgiHandler`` is also deprecated. You should update to Django 3.0 or higher and use Django's ``get_asgi_application()``. Channel's ``AsgiHandler`` will be removed when we drop support for Django 2.2. channels-4.0.0/docs/releases/3.0.1.rst000066400000000000000000000003261432260166700172420ustar00rootroot000000000000003.0.1 Release Notes =================== Channels 3.0.1 fixes a bug in Channels 3.0. Bugfixes -------- * Fixes a bug in Channels 3.0 where ``SessionMiddleware`` would not correctly isolate per-instance scopes. channels-4.0.0/docs/releases/3.0.2.rst000066400000000000000000000006131432260166700172420ustar00rootroot000000000000003.0.2 Release Notes =================== Channels 3.0.2 fixes a bug in Channels 3.0.1 Bugfixes -------- * Fixes a bug in Channels 3.0 where `StaticFilesWrapper` was not updated to the ASGI 3 single-callable interface. * Users of the ``runworker`` command should ensure to update ``asgiref`` to version 3.3.1 or later, where an issue in ``asgiref.server.StatelessServer`` was addressed. channels-4.0.0/docs/releases/3.0.3.rst000066400000000000000000000041071432260166700172450ustar00rootroot000000000000003.0.3 Release Notes =================== Channels 3.0.3 fixes a security issue in Channels 3.0.2 CVE-2020-35681: Potential leakage of session identifiers using legacy ``AsgiHandler`` ------------------------------------------------------------------------------------- The legacy ``channels.http.AsgiHandler`` class, used for handling HTTP type requests in an ASGI environment prior to Django 3.0, did not correctly separate request scopes in Channels 3.0. In many cases this would result in a crash but, with correct timing responses could be sent to the wrong client, resulting in potential leakage of session identifiers and other sensitive data. This issue affects Channels 3.0.x before 3.0.3, and is resolved in Channels 3.0.3. Users of ``ProtocolTypeRouter`` not explicitly specifying the handler for the ``'http'`` key, or those explicitly using ``channels.http.AsgiHandler``, likely to support Django v2.2, are affected and should update immediately. Note that both an unspecified handler for the ``'http'`` key and using ``channels.http.AsgiHandler`` are deprecated, and will raise a warning, from Channels v3.0.0 This issue affects only the legacy channels provided class, and not Django's similar ``ASGIHandler``, available from Django 3.0. It is recommended to update to Django 3.0+ and use the Django provided ``ASGIHandler``. A simplified ``asgi.py`` script will look like this: .. code-block:: python import os from django.core.asgi import get_asgi_application # Fetch Django ASGI application early to ensure AppRegistry is populated # before importing consumers and AuthMiddlewareStack that may import ORM # models. os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings") django_asgi_app = get_asgi_application() # Import other Channels classes and consumers here. from channels.routing import ProtocolTypeRouter, URLRouter application = ProtocolTypeRouter({ # Explicitly set 'http' key using Django's ASGI application. "http": django_asgi_app, ), }) Please see :doc:`/deploying` for a more complete example. channels-4.0.0/docs/releases/3.0.4.rst000066400000000000000000000014121432260166700172420ustar00rootroot000000000000003.0.4 Release Notes =================== Channels 3.0.4 is a bugfix release in the 3.0 series. Bugfixes & Small Changes ------------------------ * Usage of ``urlparse`` in ``OriginValidator`` is corrected to maintain compatibility with recent point-releases of Python. * The import of ``django.contrib.auth.models.AnonymousUser`` in ``channels.auth`` is deferred until runtime, in order to avoid errors if ``AuthMiddleware`` or ``AuthMiddlewareStack`` were imported before ``django.setup()`` was run. * ``CookieMiddleware`` adds support for the ``samesite`` flag. * ``WebsocketConsumer.init()`` and ``AsyncWebsocketConsumer.init()`` no longer make a bad `super()` call to ``object.init()``. Backwards Incompatible Changes ------------------------------ None. channels-4.0.0/docs/releases/3.0.5.rst000066400000000000000000000006301432260166700172440ustar00rootroot000000000000003.0.5 Release Notes =================== Channels 3.0.5 is a bugfix release in the 3.0 series. Bugfixes & Small Changes ------------------------ * Removed use of ``providing_args`` keyword argument to consumer started signal, as support for this was removed in Django 4.0. Backwards Incompatible Changes ------------------------------ * Drops support for end-of-life Python 3.6 and Django 3.0 and 3.1. channels-4.0.0/docs/releases/4.0.0.rst000066400000000000000000000117361432260166700172510ustar00rootroot000000000000004.0.0 Release Notes =================== Channels 4 is the next major version of the Channels package. Together with the matching Daphne v4 and channels-redis v4 releases, it updates dependencies, fixes issues, and removes outdated code. It so provides the foundation for Channels development going forward. In most cases, you can update now by updating ``channels``, ``daphne``, and ``channels-redis`` as appropriate, with ``pip``, and by adding ``daphne`` at the top of your ``INSTALLED_APPS`` setting. First ``pip``:: pip install -U 'channels[dapne]' channels-redis Then in your Django settings file:: INSTALLED_APPS = [ "daphne", ... ] Read on for the details. Updated Python and Django support --------------------------------- In general Channels will try to follow Python and Django supported versions. As of release, that means Python 3.7, 3.8, 3.9, and 3.10, as well as Django 3.2, 4.0, and 4.1 are currently supported. As a note, we reserve the right to drop older Python versions, or the older Django LTS, once the newer one is released, before their official end-of-life if this is necessary to ease development. Dropping older Python and Django versions will be done in minor version releases, and will not be considered to require a major version change. The async support in both Python and Django continues to evolve rapidly. We advise you to always upgrade to the latest versions in order to avoid issues in older versions if you're building an async application. * Dropped support for Python 3.6. * Minimum Django version is now Django 3.2. * Added compatibility with Django 4.1. Decoupling of the Daphne application server ------------------------------------------- In order to allow users of other ASGI servers to use Channels without the overhead of Daphne and Twisted, the Daphne application server is now an optional dependency, installable either directly or with the ``daphne`` extra, as per the ``pip`` example above. * Where Daphne is used ``daphne>=4.0.0`` is required. The ``channels[daphne]`` extra assures this. * The ``runserver`` command is moved to the ``daphne`` package. In order to use the ``runserver`` command, add ``daphne`` to your ``INSTALLED_APPS``, before ``django.contrib.staticfiles``:: INSTALLED_APPS = [ "daphne", ... ] There is a new system check to ensure this ordering. Note, the ``runworker`` command remains a part of the ``channels`` app. * Use of ``ChannelsLiveServerTestCase`` still requires Daphne. Removal of the Django application wrappers ------------------------------------------ In order to add initial ASGI support to Django, Channels originally provided tools for wrapping your Django application and serving it under ASGI. This included an ASGI handler class, an ASGI HTTP request object, and an ASGI compatible version of the staticfiles handler for use with ``runserver`` Improved equivalents to all of these are what has been added to Django since Django version 3.0. As such serving of Django HTTP applications (whether using sync or async views) under ASGI is now Django's responsibility, and the matching Channels classes have been removed. Use of these classes was deprecated in Channels v3 and, if you've already moved to the Django equivalents there is nothing further to do. * Removed deprecated static files handling in favor of ``django.contrib.staticfiles``. * Removed the deprecated AsgiHandler, which wrapped Django views, in favour of Django's own ASGI support. You should use Django's ``get_asgi_application`` to provide the ``http`` handler for ProtocolTypeRouter, or an appropriate path for URLRouter, in order to route your Django application. * The supporting ``AsgiRequest`` is also removed, as it was only used for ``AsgiHandler``. * Removed deprecated automatic routing of ``http`` protocol handler in ``ProtocolTypeRouter``. You must explicitly register the ``http`` handler in your application if using ``ProtocolTypeRouter``. The minimal ``asgi.py`` file routing the Django ASGI application under a ``ProtocolTypeRouter`` will now look something like this:: import os from channels.routing import ProtocolTypeRouter from django.core.asgi import get_asgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings') application = ProtocolTypeRouter({ "http": get_asgi_application(), }) i.e. We use Django's ``get_asgi_application()``, and explicitly route an ``http`` handler for ``ProtocolTypeRouter``. This is merely for illustration of the changes. Please see the docs for more complete examples. Other changes ------------- * The use of the ``guarantee_single_callable()`` compatibility shim is removed. All applications must be ASGI v3 single-callables. * Removed the ``consumer_started`` and ``consumer_finished`` signals, unused since the 2.0 rewrite. * Fixed ``ChannelsLiveServerTestCase`` when running on systems using the ``spawn`` multiprocessing start method, such as macOS and Windows. channels-4.0.0/docs/releases/index.rst000066400000000000000000000005331432260166700177120ustar00rootroot00000000000000Release Notes ============= .. toctree:: :maxdepth: 1 4.0.0 3.0.5 3.0.4 3.0.3 3.0.2 3.0.1 3.0.0 2.4.0 2.3.0 2.2.0 2.1.7 2.1.6 2.1.5 2.1.4 2.1.3 2.1.2 2.1.1 2.1.0 2.0.2 2.0.1 2.0.0 1.1.6 1.1.5 1.1.4 1.1.3 1.1.2 1.1.1 1.1.0 1.0.3 1.0.2 1.0.1 1.0.0 channels-4.0.0/docs/support.rst000066400000000000000000000120631432260166700165150ustar00rootroot00000000000000Support ======= If you have questions about Channels, need debugging help or technical support, you can turn to community resources like: - `Stack Overflow `_ - The `Django Users mailing list `_ (django-users@googlegroups.com) - The #django channel on the `PySlackers Slack group `_ If you have a concrete bug or feature request (one that is clear and actionable), please file an issue against the appropriate GitHub project. Unfortunately, if you open a GitHub issue with a vague problem (like "it's slow!" or "connections randomly drop!") we'll have to close it as we don't have the volunteers to answer the number of questions we'd get - please go to one of the other places above for support from the community at large. As a guideline, your issue is concrete enough to open an issue if you can provide **exact steps to reproduce** in a fresh, example project. We need to be able to reproduce it on a *normal, local developer machine* - so saying something doesn't work in a hosted environment is unfortunately not very useful to us, and we'll close the issue and point you here. Apologies if this comes off as harsh, but please understand that open source maintenance and support takes up a lot of time, and if we answered all the issues and support requests there would be no time left to actually work on the code itself! Making bugs reproducible ------------------------ If you're struggling with an issue that only happens in a production environment and can't get it to reproduce locally so either you can fix it or someone can help you, take a step-by-step approach to eliminating the differences between the environments. First off, try changing your production environment to see if that helps - for example, if you have Nginx/Apache/etc. between browsers and Channels, try going direct to the Python server and see if that fixes things. Turn SSL off if you have it on. Try from different browsers and internet connections. WebSockets are notoriously hard to debug already, and so you should expect some level of awkwardness from any project involving them. Next, check package versions between your local and remote environments. You'd be surprised how easy it is to forget to upgrade something! Once you've made sure it's none of that, try changing your project. Make a fresh Django project (or use one of the Channels example projects) and make sure it doesn't have the bug, then work on adding code to it from your project until the bug appears. Alternately, take your project and remove pieces back down to the basic Django level until it works. Network programming is also just difficult in general; you should expect some level of reconnects and dropped connections as a matter of course. Make sure that what you're seeing isn't just normal for a production application. How to help the Channels project -------------------------------- If you'd like to help us with support, the first thing to do is to provide support in the communities mentioned at the top (Stack Overflow and the mailing list). If you'd also like to help triage issues, please get in touch and mention you'd like to help out and we can make sure you're set up and have a good idea of what to do. Most of the work is making sure incoming issues are actually valid and actionable, and closing those that aren't and redirecting them to this page politely and explaining why. Some sample response templates are below. General support request ~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: text Sorry, but we can't help out with general support requests here - the issue tracker is for reproduceable bugs and concrete feature requests only! Please see our support documentation (https://channels.readthedocs.io/en/latest/support.html) for more information about where you can get general help. Non-specific bug/"It doesn't work!" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: text I'm afraid we can't address issues without either direct steps to reproduce, or that only happen in a production environment, as they may not be problems in the project itself. Our support documentation (https://channels.readthedocs.io/en/latest/support.html) has details about how to take this sort of problem, diagnose it, and either fix it yourself, get help from the community, or make it into an actionable issue that we can handle. Sorry we have to direct you away like this, but we get a lot of support requests every week. If you can reduce the problem to a clear set of steps to reproduce or an example project that fails in a fresh environment, please re-open the ticket with that information. Problem in application code ~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: text It looks like a problem in your application code rather than in Channels itself, so I'm going to close the ticket. If you can trace it down to a problem in Channels itself (with exact steps to reproduce on a fresh or small example project - see https://channels.readthedocs.io/en/latest/support.html) please re-open the ticket! Thanks. channels-4.0.0/docs/topics/000077500000000000000000000000001432260166700155465ustar00rootroot00000000000000channels-4.0.0/docs/topics/authentication.rst000066400000000000000000000130671432260166700213260ustar00rootroot00000000000000Authentication ============== Channels supports standard Django authentication out-of-the-box for HTTP and WebSocket consumers, and you can write your own middleware or handling code if you want to support a different authentication scheme (for example, tokens in the URL). Django authentication --------------------- The ``AuthMiddleware`` in Channels supports standard Django authentication, where the user details are stored in the session. It allows read-only access to a user object in the ``scope``. ``AuthMiddleware`` requires ``SessionMiddleware`` to function, which itself requires ``CookieMiddleware``. For convenience, these are also provided as a combined callable called ``AuthMiddlewareStack`` that includes all three. To use the middleware, wrap it around the appropriate level of consumer in your ``asgi.py``: .. code-block:: python from django.urls import re_path from channels.routing import ProtocolTypeRouter, URLRouter from channels.auth import AuthMiddlewareStack from channels.security.websocket import AllowedHostsOriginValidator from myapp import consumers application = ProtocolTypeRouter({ "websocket": AllowedHostsOriginValidator( AuthMiddlewareStack( URLRouter([ re_path(r"^front(end)/$", consumers.AsyncChatConsumer.as_asgi()), ]) ) ), }) While you can wrap the middleware around each consumer individually, it's recommended you wrap it around a higher-level application component, like in this case the ``URLRouter``. Note that the ``AuthMiddleware`` will only work on protocols that provide HTTP headers in their ``scope`` - by default, this is HTTP and WebSocket. To access the user, just use ``self.scope["user"]`` in your consumer code: .. code-block:: python class ChatConsumer(WebsocketConsumer): def connect(self, event): self.user = self.scope["user"] Custom Authentication --------------------- If you have a custom authentication scheme, you can write a custom middleware to parse the details and put a user object (or whatever other object you need) into your scope. Middleware is written as a callable that takes an ASGI application and wraps it to return another ASGI application. Most authentication can just be done on the scope, so all you need to do is override the initial constructor that takes a scope, rather than the event-running coroutine. Here's a simple example of a middleware that just takes a user ID out of the query string and uses that: .. code-block:: python from channels.db import database_sync_to_async @database_sync_to_async def get_user(user_id): try: return User.objects.get(id=user_id) except User.DoesNotExist: return AnonymousUser() class QueryAuthMiddleware: """ Custom middleware (insecure) that takes user IDs from the query string. """ def __init__(self, app): # Store the ASGI application we were passed self.app = app async def __call__(self, scope, receive, send): # Look up user from query string (you should also do things like # checking if it is a valid user ID, or if scope["user"] is already # populated). scope['user'] = await get_user(int(scope["query_string"])) return await self.app(scope, receive, send) The same principles can be applied to authenticate over non-HTTP protocols; for example, you might want to use someone's chat username from a chat protocol to turn it into a user. How to log a user in/out ------------------------ Channels provides direct login and logout functions (much like Django's ``contrib.auth`` package does) as ``channels.auth.login`` and ``channels.auth.logout``. Within your consumer you can await ``login(scope, user, backend=None)`` to log a user in. This requires that your scope has a ``session`` object; the best way to do this is to ensure your consumer is wrapped in a ``SessionMiddlewareStack`` or a ``AuthMiddlewareStack``. You can logout a user with the ``logout(scope)`` async function. If you are in a WebSocket consumer, or logging-in after the first response has been sent in a http consumer, the session is populated **but will not be saved automatically** - you must call ``scope["session"].save()`` after login in your consumer code: .. code-block:: python from channels.auth import login class ChatConsumer(AsyncWebsocketConsumer): ... async def receive(self, text_data): ... # login the user to this session. await login(self.scope, user) # save the session (if the session backend does not access the db you can use `sync_to_async`) await database_sync_to_async(self.scope["session"].save)() When calling ``login(scope, user)``, ``logout(scope)`` or ``get_user(scope)`` from a synchronous function you will need to wrap them in ``async_to_sync``, as we only provide async versions: .. code-block:: python from asgiref.sync import async_to_sync from channels.auth import login class SyncChatConsumer(WebsocketConsumer): ... def receive(self, text_data): ... async_to_sync(login)(self.scope, user) self.scope["session"].save() .. note:: If you are using a long running consumer, websocket or long-polling HTTP it is possible that the user will be logged out of their session elsewhere while your consumer is running. You can periodically use ``get_user(scope)`` to be sure that the user is still logged in. channels-4.0.0/docs/topics/channel_layers.rst000066400000000000000000000251321432260166700212720ustar00rootroot00000000000000Channel Layers ============== Channel layers allow you to talk between different instances of an application. They're a useful part of making a distributed realtime application if you don't want to have to shuttle all of your messages or events through a database. Additionally, they can also be used in combination with a worker process to make a basic task queue or to offload tasks - read more in :doc:`/topics/worker`. .. note:: Channel layers are an entirely optional part of Channels. If you don't want to use them, just leave ``CHANNEL_LAYERS`` unset, or set it to the empty dict ``{}``. .. warning:: Channel layers have a purely async interface (for both send and receive); you will need to wrap them in a converter if you want to call them from synchronous code (see below). Configuration ------------- Channel layers are configured via the ``CHANNEL_LAYERS`` Django setting. You can get the default channel layer from a project with ``channels.layers.get_channel_layer()``, but if you are using consumers, then a copy is automatically provided for you on the consumer as ``self.channel_layer``. Redis Channel Layer ******************* `channels_redis`_ is the only official Django-maintained channel layer supported for production use. The layer uses Redis as its backing store, and it supports both a single-server and sharded configurations as well as group support. To use this layer you'll need to install the `channels_redis`_ package. .. _`channels_redis`: https://pypi.org/project/channels-redis/ In this example, Redis is running on localhost (127.0.0.1) port 6379: .. code-block:: python CHANNEL_LAYERS = { "default": { "BACKEND": "channels_redis.core.RedisChannelLayer", "CONFIG": { "hosts": [("127.0.0.1", 6379)], }, }, } In-Memory Channel Layer *********************** Channels also comes packaged with an in-memory Channels Layer. This layer can be helpful in :doc:`/topics/testing` or for local-development purposes: .. code-block:: python CHANNEL_LAYERS = { "default": { "BACKEND": "channels.layers.InMemoryChannelLayer" } } .. warning:: **Do Not Use In Production** In-memory channel layers operate with each process as a separate layer. This means that no cross-process messaging is possible. As the core value of channel layers is to provide distributed messaging, in-memory usage will result in sub-optimal performance, and ultimately data-loss in a multi-instance environment. Synchronous Functions --------------------- By default the ``send()``, ``group_send()``, ``group_add()`` and other functions are async functions, meaning you have to ``await`` them. If you need to call them from synchronous code, you'll need to use the handy ``asgiref.sync.async_to_sync`` wrapper: .. code-block:: python from asgiref.sync import async_to_sync async_to_sync(channel_layer.send)("channel_name", {...}) What To Send Over The Channel Layer ----------------------------------- The channel layer is for high-level application-to-application communication. When you send a message, it is received by the consumers listening to the group or channel on the other end. What this means is that you should send high-level events over the channel layer, and then have consumers handle those events, and do appropriate low-level networking to their attached client. For example, a chat application could send events like this over the channel layer: .. code-block:: python await self.channel_layer.group_send( room.group_name, { "type": "chat.message", "room_id": room_id, "username": self.scope["user"].username, "message": message, } ) And then the consumers define a handling function to receive those events and turn them into WebSocket frames: .. code-block:: python async def chat_message(self, event): """ Called when someone has messaged our chat. """ # Send a message down to the client await self.send_json( { "msg_type": settings.MSG_TYPE_MESSAGE, "room": event["room_id"], "username": event["username"], "message": event["message"], }, ) Any consumer based on Channels' ``SyncConsumer`` or ``AsyncConsumer`` will automatically provide you a ``self.channel_layer`` and ``self.channel_name`` attribute, which contains a pointer to the channel layer instance and the channel name that will reach the consumer respectively. Any message sent to that channel name - or to a group the channel name was added to - will be received by the consumer much like an event from its connected client, and dispatched to a named method on the consumer. The name of the method will be the ``type`` of the event with periods replaced by underscores - so, for example, an event coming in over the channel layer with a ``type`` of ``chat.join`` will be handled by the method ``chat_join``. .. note:: If you are inheriting from the ``AsyncConsumer`` class tree, all your event handlers, including ones for events over the channel layer, must be asynchronous (``async def``). If you are in the ``SyncConsumer`` class tree instead, they must all be synchronous (``def``). Single Channels --------------- Each application instance - so, for example, each long-running HTTP request or open WebSocket - results in a single Consumer instance, and if you have channel layers enabled, Consumers will generate a unique *channel name* for themselves, and start listening on it for events. This means you can send those consumers events from outside the process - from other consumers, maybe, or from management commands - and they will react to them and run code just like they would events from their client connection. The channel name is available on a consumer as ``self.channel_name``. Here's an example of writing the channel name into a database upon connection, and then specifying a handler method for events on it: .. code-block:: python class ChatConsumer(WebsocketConsumer): def connect(self): # Make a database row with our channel name Clients.objects.create(channel_name=self.channel_name) def disconnect(self, close_code): # Note that in some rare cases (power loss, etc) disconnect may fail # to run; this naive example would leave zombie channel names around. Clients.objects.filter(channel_name=self.channel_name).delete() def chat_message(self, event): # Handles the "chat.message" event when it's sent to us. self.send(text_data=event["text"]) Note that, because you're mixing event handling from the channel layer and from the protocol connection, you need to make sure that your type names do not clash. It's recommended you prefix type names (like we did here with ``chat.``) to avoid clashes. To send to a single channel, just find its channel name (for the example above, we could crawl the database), and use ``channel_layer.send``: .. code-block:: python from channels.layers import get_channel_layer channel_layer = get_channel_layer() await channel_layer.send("channel_name", { "type": "chat.message", "text": "Hello there!", }) .. _groups: Groups ------ Obviously, sending to individual channels isn't particularly useful - in most cases you'll want to send to multiple channels/consumers at once as a broadcast. Not only for cases like a chat where you want to send incoming messages to everyone in the room, but even for sending to an individual user who might have more than one browser tab or device connected. You can construct your own solution for this if you like using your existing datastores, or you can use the Groups system built-in to some channel layers. Groups is a broadcast system that: * Allows you to add and remove channel names from named groups, and send to those named groups. * Provides group expiry for clean-up of connections whose disconnect handler didn't get to run (e.g. power failure) They do not allow you to enumerate or list the channels in a group; it's a pure broadcast system. If you need more precise control or need to know who is connected, you should build your own system or use a suitable third-party one. You use groups by adding a channel to them during connection, and removing it during disconnection, illustrated here on the WebSocket generic consumer: .. code-block:: python # This example uses WebSocket consumer, which is synchronous, and so # needs the async channel layer functions to be converted. from asgiref.sync import async_to_sync class ChatConsumer(WebsocketConsumer): def connect(self): async_to_sync(self.channel_layer.group_add)("chat", self.channel_name) def disconnect(self, close_code): async_to_sync(self.channel_layer.group_discard)("chat", self.channel_name) .. note:: Group names are restricted to ASCII alphanumerics, hyphens, and periods only and are limited to a maximum length of 100 in the default backend. Then, to send to a group, use ``group_send``, like in this small example which broadcasts chat messages to every connected socket when combined with the code above: .. code-block:: python class ChatConsumer(WebsocketConsumer): ... def receive(self, text_data): async_to_sync(self.channel_layer.group_send)( "chat", { "type": "chat.message", "text": text_data, }, ) def chat_message(self, event): self.send(text_data=event["text"]) Using Outside Of Consumers -------------------------- You'll often want to send to the channel layer from outside of the scope of a consumer, and so you won't have ``self.channel_layer``. In this case, you should use the ``get_channel_layer`` function to retrieve it: .. code-block:: python from channels.layers import get_channel_layer channel_layer = get_channel_layer() Then, once you have it, you can just call methods on it. Remember that **channel layers only support async methods**, so you can either call it from your own asynchronous context: .. code-block:: python for chat_name in chats: await channel_layer.group_send( chat_name, {"type": "chat.system_message", "text": announcement_text}, ) Or you'll need to use async_to_sync: .. code-block:: python from asgiref.sync import async_to_sync async_to_sync(channel_layer.group_send)("chat", {"type": "chat.force_disconnect"}) channels-4.0.0/docs/topics/consumers.rst000066400000000000000000000363541432260166700203310ustar00rootroot00000000000000Consumers ========= Channels is built around a basic low-level spec called :doc:`ASGI `. ASGI is more designed for interoperability than for writing complex applications in. So, Channels provides you with Consumers, a rich abstraction that allows you to create ASGI applications easily. Consumers do a couple of things in particular: * Structure your code as a series of functions to be called whenever an event happens, rather than making you write an event loop. * Allow you to write synchronous or async code, and deal with handoffs and threading for you. Of course, you are free to ignore consumers and use the other parts of Channels - like routing, session handling and authentication - with any ASGI app, but they're generally the best way to write your application code. .. _sync_to_async: Basic Layout ------------ A consumer is a subclass of either ``channels.consumer.AsyncConsumer`` or ``channels.consumer.SyncConsumer``. As these names suggest, one will expect you to write async-capable code, while the other will run your code synchronously in a threadpool. Let's look at a basic example of a ``SyncConsumer``: .. code-block:: python from channels.consumer import SyncConsumer class EchoConsumer(SyncConsumer): def websocket_connect(self, event): self.send({ "type": "websocket.accept", }) def websocket_receive(self, event): self.send({ "type": "websocket.send", "text": event["text"], }) This is a very simple WebSocket echo server - it will accept all incoming WebSocket connections, and then reply to all incoming WebSocket text frames with the same text. Consumers are structured around a series of named methods corresponding to the ``type`` value of the messages they are going to receive, with any ``.`` replaced by ``_``. The two handlers above are handling ``websocket.connect`` and ``websocket.receive`` messages respectively. How did we know what event types we were going to get and what would be in them (like ``websocket.receive`` having a ``text``) key? That's because we designed this against the ASGI WebSocket specification, which tells us how WebSockets are presented - read more about it in :doc:`ASGI ` - and protected this application with a router that checks for a scope type of ``websocket`` - see more about that in :doc:`/topics/routing`. Apart from that, the only other basic API is ``self.send(event)``. This lets you send events back to the client or protocol server as defined by the protocol - if you read the WebSocket protocol, you'll see that the dict we send above is how you send a text frame to the client. The ``AsyncConsumer`` is laid out very similarly, but all the handler methods must be coroutines, and ``self.send`` is a coroutine: .. code-block:: python from channels.consumer import AsyncConsumer class EchoConsumer(AsyncConsumer): async def websocket_connect(self, event): await self.send({ "type": "websocket.accept", }) async def websocket_receive(self, event): await self.send({ "type": "websocket.send", "text": event["text"], }) When should you use ``AsyncConsumer`` and when should you use ``SyncConsumer``? The main thing to consider is what you're talking to. If you call a slow synchronous function from inside an ``AsyncConsumer`` you're going to hold up the entire event loop, so they're only useful if you're also calling async code (for example, using ``HTTPX`` to fetch 20 pages in parallel). If you're calling any part of Django's ORM or other synchronous code, you should use a ``SyncConsumer``, as this will run the whole consumer in a thread and stop your ORM queries blocking the entire server. We recommend that you **write SyncConsumers by default**, and only use AsyncConsumers in cases where you know you are doing something that would be improved by async handling (long-running tasks that could be done in parallel) *and* you are only using async-native libraries. If you really want to call a synchronous function from an ``AsyncConsumer``, take a look at ``asgiref.sync.sync_to_async``, which is the utility that Channels uses to run ``SyncConsumers`` in threadpools, and can turn any synchronous callable into an asynchronous coroutine. .. important:: If you want to call the Django ORM from an ``AsyncConsumer`` (or any other asynchronous code), you should use the ``database_sync_to_async`` adapter instead. See :doc:`/topics/databases` for more. Closing Consumers ~~~~~~~~~~~~~~~~~ When the socket or connection attached to your consumer is closed - either by you or the client - you will likely get an event sent to you (for example, ``http.disconnect`` or ``websocket.disconnect``), and your application instance will be given a short amount of time to act on it. Once you have finished doing your post-disconnect cleanup, you need to raise ``channels.exceptions.StopConsumer`` to halt the ASGI application cleanly and let the server clean it up. If you leave it running - by not raising this exception - the server will reach its application close timeout (which is 10 seconds by default in Daphne) and then kill your application and raise a warning. The generic consumers below do this for you, so this is only needed if you are writing your own consumer class based on ``AsyncConsumer`` or ``SyncConsumer``. However, if you override their ``__call__`` method, or block the handling methods that it calls from returning, you may still run into this; take a look at their source code if you want more information. Additionally, if you launch your own background coroutines, make sure to also shut them down when the connection is finished, or you'll leak coroutines into the server. Channel Layers ~~~~~~~~~~~~~~ Consumers also let you deal with Channel's *channel layers*, to let them send messages between each other either one-to-one or via a broadcast system called groups. Consumers will use the channel layer ``default`` unless the ``channel_layer_alias`` attribute is set when subclassing any of the provided ``Consumer`` classes. To use the channel layer ``echo_alias`` we would set it like so: .. code-block:: python from channels.consumer import SyncConsumer class EchoConsumer(SyncConsumer): channel_layer_alias = "echo_alias" You can read more in :doc:`/topics/channel_layers`. .. _scope: Scope ----- Consumers receive the connection's ``scope`` when they are called, which contains a lot of the information you'd find on the ``request`` object in a Django view. It's available as ``self.scope`` inside the consumer's methods. Scopes are part of the :doc:`ASGI specification `, but here are some common things you might want to use: * ``scope["path"]``, the path on the request. *(HTTP and WebSocket)* * ``scope["headers"]``, raw name/value header pairs from the request *(HTTP and WebSocket)* * ``scope["method"]``, the method name used for the request. *(HTTP)* If you enable things like :doc:`authentication`, you'll also be able to access the user object as ``scope["user"]``, and the URLRouter, for example, will put captured groups from the URL into ``scope["url_route"]``. In general, the scope is the place to get connection information and where middleware will put attributes it wants to let you access (in the same way that Django's middleware adds things to ``request``). For a full list of what can occur in a connection scope, look at the basic ASGI spec for the protocol you are terminating, plus any middleware or routing code you are using. The web (HTTP and WebSocket) scopes are available in `the Web ASGI spec `_. Generic Consumers ----------------- What you see above is the basic layout of a consumer that works for any protocol. Much like Django's *generic views*, Channels ships with *generic consumers* that wrap common functionality up so you don't need to rewrite it, specifically for HTTP and WebSocket handling. WebsocketConsumer ~~~~~~~~~~~~~~~~~ Available as ``channels.generic.websocket.WebsocketConsumer``, this wraps the verbose plain-ASGI message sending and receiving into handling that just deals with text and binary frames: .. code-block:: python from channels.generic.websocket import WebsocketConsumer class MyConsumer(WebsocketConsumer): groups = ["broadcast"] def connect(self): # Called on connection. # To accept the connection call: self.accept() # Or accept the connection and specify a chosen subprotocol. # A list of subprotocols specified by the connecting client # will be available in self.scope['subprotocols'] self.accept("subprotocol") # To reject the connection, call: self.close() def receive(self, text_data=None, bytes_data=None): # Called with either text_data or bytes_data for each frame # You can call: self.send(text_data="Hello world!") # Or, to send a binary frame: self.send(bytes_data="Hello world!") # Want to force-close the connection? Call: self.close() # Or add a custom WebSocket error code! self.close(code=4123) def disconnect(self, close_code): # Called when the socket closes You can also raise ``channels.exceptions.AcceptConnection`` or ``channels.exceptions.DenyConnection`` from anywhere inside the ``connect`` method in order to accept or reject a connection, if you want reusable authentication or rate-limiting code that doesn't need to use mixins. A ``WebsocketConsumer``'s channel will automatically be added to (on connect) and removed from (on disconnect) any groups whose names appear in the consumer's ``groups`` class attribute. ``groups`` must be an iterable, and a channel layer with support for groups must be set as the channel backend (``channels.layers.InMemoryChannelLayer`` and ``channels_redis.core.RedisChannelLayer`` both support groups). If no channel layer is configured or the channel layer doesn't support groups, connecting to a ``WebsocketConsumer`` with a non-empty ``groups`` attribute will raise ``channels.exceptions.InvalidChannelLayerError``. See :ref:`groups` for more. AsyncWebsocketConsumer ~~~~~~~~~~~~~~~~~~~~~~ Available as ``channels.generic.websocket.AsyncWebsocketConsumer``, this has the exact same methods and signature as ``WebsocketConsumer`` but everything is async, and the functions you need to write have to be as well: .. code-block:: python from channels.generic.websocket import AsyncWebsocketConsumer class MyConsumer(AsyncWebsocketConsumer): groups = ["broadcast"] async def connect(self): # Called on connection. # To accept the connection call: await self.accept() # Or accept the connection and specify a chosen subprotocol. # A list of subprotocols specified by the connecting client # will be available in self.scope['subprotocols'] await self.accept("subprotocol") # To reject the connection, call: await self.close() async def receive(self, text_data=None, bytes_data=None): # Called with either text_data or bytes_data for each frame # You can call: await self.send(text_data="Hello world!") # Or, to send a binary frame: await self.send(bytes_data="Hello world!") # Want to force-close the connection? Call: await self.close() # Or add a custom WebSocket error code! await self.close(code=4123) async def disconnect(self, close_code): # Called when the socket closes JsonWebsocketConsumer ~~~~~~~~~~~~~~~~~~~~~ Available as ``channels.generic.websocket.JsonWebsocketConsumer``, this works like ``WebsocketConsumer``, except it will auto-encode and decode to JSON sent as WebSocket text frames. The only API differences are: * Your ``receive_json`` method must take a single argument, ``content``, that is the decoded JSON object. * ``self.send_json`` takes only a single argument, ``content``, which will be encoded to JSON for you. If you want to customise the JSON encoding and decoding, you can override the ``encode_json`` and ``decode_json`` classmethods. AsyncJsonWebsocketConsumer ~~~~~~~~~~~~~~~~~~~~~~~~~~ An async version of ``JsonWebsocketConsumer``, available as ``channels.generic.websocket.AsyncJsonWebsocketConsumer``. Note that even ``encode_json`` and ``decode_json`` are async functions. AsyncHttpConsumer ~~~~~~~~~~~~~~~~~ Available as ``channels.generic.http.AsyncHttpConsumer``, this offers basic primitives to implement a HTTP endpoint: .. code-block:: python from channels.generic.http import AsyncHttpConsumer class BasicHttpConsumer(AsyncHttpConsumer): async def handle(self, body): await asyncio.sleep(10) await self.send_response(200, b"Your response bytes", headers=[ (b"Content-Type", b"text/plain"), ]) You are expected to implement your own ``handle`` method. The method receives the whole request body as a single bytestring. Headers may either be passed as a list of tuples or as a dictionary. The response body content is expected to be a bytestring. You can also implement a ``disconnect`` method if you want to run code on disconnect - for example, to shut down any coroutines you launched. This will run even on an unclean disconnection, so don't expect that ``handle`` has finished running cleanly. If you need more control over the response, e.g. for implementing long polling, use the lower level ``self.send_headers`` and ``self.send_body`` methods instead. This example already mentions channel layers which will be explained in detail later: .. code-block:: python import json from channels.generic.http import AsyncHttpConsumer class LongPollConsumer(AsyncHttpConsumer): async def handle(self, body): await self.send_headers(headers=[ (b"Content-Type", b"application/json"), ]) # Headers are only sent after the first body event. # Set "more_body" to tell the interface server to not # finish the response yet: await self.send_body(b"", more_body=True) async def chat_message(self, event): # Send JSON and finish the response: await self.send_body(json.dumps(event).encode("utf-8")) Of course you can also use those primitives to implement a HTTP endpoint for `Server-sent events `_: .. code-block:: python from datetime import datetime from channels.generic.http import AsyncHttpConsumer class ServerSentEventsConsumer(AsyncHttpConsumer): async def handle(self, body): await self.send_headers(headers=[ (b"Cache-Control", b"no-cache"), (b"Content-Type", b"text/event-stream"), (b"Transfer-Encoding", b"chunked"), ]) while True: payload = "data: %s\n\n" % datetime.now().isoformat() await self.send_body(payload.encode("utf-8"), more_body=True) await asyncio.sleep(1) channels-4.0.0/docs/topics/databases.rst000066400000000000000000000042451432260166700202340ustar00rootroot00000000000000Database Access =============== The Django ORM is a synchronous piece of code, and so if you want to access it from asynchronous code you need to do special handling to make sure its connections are closed properly. If you're using ``SyncConsumer``, or anything based on it - like ``JsonWebsocketConsumer`` - you don't need to do anything special, as all your code is already run in a synchronous mode and Channels will do the cleanup for you as part of the ``SyncConsumer`` code. If you are writing asynchronous code, however, you will need to call database methods in a safe, synchronous context, using ``database_sync_to_async``. Database Connections -------------------- Channels can potentially open a lot more database connections than you may be used to if you are using threaded consumers (synchronous ones) - it can open up to one connection per thread. If you wish to control the maximum number of threads used, set the ``ASGI_THREADS`` environment variable to the maximum number you wish to allow. By default, the number of threads is set to "the number of CPUs * 5" for Python 3.7 and below, and `min(32, os.cpu_count() + 4)` for Python 3.8+. To avoid having too many threads idling in connections, you can instead rewrite your code to use async consumers and only dip into threads when you need to use Django's ORM (using ``database_sync_to_async``). database_sync_to_async ---------------------- ``channels.db.database_sync_to_async`` is a version of ``asgiref.sync.sync_to_async`` that also cleans up database connections on exit. To use it, write your ORM queries in a separate function or method, and then call it with ``database_sync_to_async`` like so: .. code-block:: python from channels.db import database_sync_to_async async def connect(self): self.username = await database_sync_to_async(self.get_name)() def get_name(self): return User.objects.all()[0].name You can also use it as a decorator: .. code-block:: python from channels.db import database_sync_to_async async def connect(self): self.username = await self.get_name() @database_sync_to_async def get_name(self): return User.objects.all()[0].name channels-4.0.0/docs/topics/routing.rst000066400000000000000000000107261432260166700177750ustar00rootroot00000000000000Routing ======= While consumers are valid :doc:`ASGI ` applications, you don't want to just write one and have that be the only thing you can give to protocol servers like Daphne. Channels provides routing classes that allow you to combine and stack your consumers (and any other valid ASGI application) to dispatch based on what the connection is. .. important:: Channels routers only work on the *scope* level, not on the level of individual *events*, which means you can only have one consumer for any given connection. Routing is to work out what single consumer to give a connection, not how to spread events from one connection across multiple consumers. Routers are themselves valid ASGI applications, and it's possible to nest them. We suggest that you have a ``ProtocolTypeRouter`` as the root application of your project - the one that you pass to protocol servers - and nest other, more protocol-specific routing underneath there. Channels expects you to be able to define a single *root application*, and provide the path to it as the ``ASGI_APPLICATION`` setting (think of this as being analogous to the ``ROOT_URLCONF`` setting in Django). There's no fixed rule as to where you need to put the routing and the root application, but we recommend following Django's conventions and putting them in a project-level file called ``asgi.py``, next to ``urls.py``. You can read more about deploying Channels projects and settings in :doc:`/deploying`. Here's an example of what that ``asgi.py`` might look like: .. include:: ../includes/asgi_example.rst .. note:: We call the ``as_asgi()`` classmethod when routing our consumers. This returns an ASGI wrapper application that will instantiate a new consumer instance for each connection or scope. This is similar to Django's ``as_view()``, which plays the same role for per-request instances of class-based views. It's possible to have routers from third-party apps, too, or write your own, but we'll go over the built-in Channels ones here. ProtocolTypeRouter ------------------ ``channels.routing.ProtocolTypeRouter`` This should be the top level of your ASGI application stack and the main entry in your routing file. It lets you dispatch to one of a number of other ASGI applications based on the ``type`` value present in the ``scope``. Protocols will define a fixed type value that their scope contains, so you can use this to distinguish between incoming connection types. It takes a single argument - a dictionary mapping type names to ASGI applications that serve them: .. code-block:: python ProtocolTypeRouter({ "http": some_app, "websocket": some_other_app, }) If you want to split HTTP handling between long-poll handlers and Django views, use a URLRouter using Django's ``get_asgi_application()`` specified as the last entry with a match-everything pattern. .. _urlrouter: URLRouter --------- ``channels.routing.URLRouter`` Routes ``http`` or ``websocket`` type connections via their HTTP path. Takes a single argument, a list of Django URL objects (either ``path()`` or ``re_path()``): .. code-block:: python URLRouter([ re_path(r"^longpoll/$", LongPollConsumer.as_asgi()), re_path(r"^notifications/(?P\w+)/$", LongPollConsumer.as_asgi()), re_path(r"", get_asgi_application()), ]) Any captured groups will be provided in ``scope`` as the key ``url_route``, a dict with a ``kwargs`` key containing a dict of the named regex groups and an ``args`` key with a list of positional regex groups. Note that named and unnamed groups cannot be mixed: Positional groups are discarded as soon as a single named group is matched. For example, to pull out the named group ``stream`` in the example above, you would do this: .. code-block:: python stream = self.scope["url_route"]["kwargs"]["stream"] Please note that ``URLRouter`` nesting will not work properly with ``path()`` routes if inner routers are wrapped by additional middleware. See `Issue #1428 `__. ChannelNameRouter ----------------- ``channels.routing.ChannelNameRouter`` Routes ``channel`` type scopes based on the value of the ``channel`` key in their scope. Intended for use with the :doc:`/topics/worker`. It takes a single argument - a dictionary mapping channel names to ASGI applications that serve them: .. code-block:: python ChannelNameRouter({ "thumbnails-generate": some_app, "thumbnails-delete": some_other_app, }) channels-4.0.0/docs/topics/security.rst000066400000000000000000000047541432260166700201610ustar00rootroot00000000000000Security ======== This covers basic security for protocols you're serving via Channels and helpers that we provide. WebSockets ---------- WebSockets start out life as a HTTP request, including all the cookies and headers, and so you can use the standard :doc:`/topics/authentication` code in order to grab current sessions and check user IDs. There is also a risk of cross-site request forgery (CSRF) with WebSockets though, as they can be initiated from any site on the internet to your domain, and will still have the user's cookies and session from your site. If you serve private data down the socket, you should restrict the sites which are allowed to open sockets to you. This is done via the ``channels.security.websocket`` package, and the two ASGI middlewares it contains, ``OriginValidator`` and ``AllowedHostsOriginValidator``. ``OriginValidator`` lets you restrict the valid options for the ``Origin`` header that is sent with every WebSocket to say where it comes from. Just wrap it around your WebSocket application code like this, and pass it a list of valid domains as the second argument. You can pass only a single domain (for example, ``.allowed-domain.com``) or a full origin, in the format ``scheme://domain[:port]`` (for example, ``http://allowed-domain.com:80``). Port is optional, but recommended: .. code-block:: python from channels.security.websocket import OriginValidator application = ProtocolTypeRouter({ "websocket": OriginValidator( AuthMiddlewareStack( URLRouter([ ... ]) ), [".goodsite.com", "http://.goodsite.com:80", "http://other.site.com"], ), }) Note: If you want to resolve any domain, then use the origin ``*``. Often, the set of domains you want to restrict to is the same as the Django ``ALLOWED_HOSTS`` setting, which performs a similar security check for the ``Host`` header, and so ``AllowedHostsOriginValidator`` lets you use this setting without having to re-declare the list: .. code-block:: python from channels.security.websocket import AllowedHostsOriginValidator application = ProtocolTypeRouter({ "websocket": AllowedHostsOriginValidator( AuthMiddlewareStack( URLRouter([ ... ]) ), ), }) ``AllowedHostsOriginValidator`` will also automatically allow local connections through if the site is in ``DEBUG`` mode, much like Django's host validation. channels-4.0.0/docs/topics/sessions.rst000066400000000000000000000057751432260166700201640ustar00rootroot00000000000000Sessions ======== Channels supports standard Django sessions using HTTP cookies for both HTTP and WebSocket. There are some caveats, however. Basic Usage ----------- The ``SessionMiddleware`` in Channels supports standard Django sessions, and like all middleware, should be wrapped around the ASGI application that needs the session information in its scope (for example, a ``URLRouter`` to apply it to a whole collection of consumers, or an individual consumer). ``SessionMiddleware`` requires ``CookieMiddleware`` to function. For convenience, these are also provided as a combined callable called ``SessionMiddlewareStack`` that includes both. All are importable from ``channels.session``. To use the middleware, wrap it around the appropriate level of consumer in your ``asgi.py``: .. code-block:: python from channels.routing import ProtocolTypeRouter, URLRouter from channels.security.websocket import AllowedHostsOriginValidator from channels.sessions import SessionMiddlewareStack from myapp import consumers application = ProtocolTypeRouter({ "websocket": AllowedHostsOriginValidator( SessionMiddlewareStack( URLRouter([ path("frontend/", consumers.AsyncChatConsumer.as_asgi()), ]) ) ), }) ``SessionMiddleware`` will only work on protocols that provide HTTP headers in their ``scope`` - by default, this is HTTP and WebSocket. To access the session, use ``self.scope["session"]`` in your consumer code: .. code-block:: python class ChatConsumer(WebsocketConsumer): def connect(self, event): self.scope["session"]["seed"] = random.randint(1, 1000) ``SessionMiddleware`` respects all the same Django settings as the default Django session framework, like ``SESSION_COOKIE_NAME`` and ``SESSION_COOKIE_DOMAIN``. Session Persistence ------------------- Within HTTP consumers or ASGI applications, session persistence works as you would expect from Django HTTP views - sessions are saved whenever you send a HTTP response that does not have status code ``500``. This is done by overriding any ``http.response.start`` messages to inject cookie headers into the response as you send it out. If you have set the ``SESSION_SAVE_EVERY_REQUEST`` setting to ``True``, it will save the session and send the cookie on every response, otherwise it will only save whenever the session is modified. If you are in a WebSocket consumer, however, the session is populated **but will never be saved automatically** - you must call ``scope["session"].save()`` yourself whenever you want to persist a session to your session store. If you don't save, the session will still work correctly inside the consumer (as it's stored as an instance variable), but other connections or HTTP views won't be able to see the changes. .. note:: If you are in a long-polling HTTP consumer, you might want to save changes to the session before you send a response. If you want to do this, call ``scope["session"].save()``. channels-4.0.0/docs/topics/testing.rst000066400000000000000000000270351432260166700177640ustar00rootroot00000000000000Testing ======= Testing Channels consumers is a little trickier than testing normal Django views due to their underlying asynchronous nature. To help with testing, Channels provides test helpers called *Communicators*, which allow you to wrap up an ASGI application (like a consumer) into its own event loop and ask it questions. `You can test asynchronous code `_ using Django's ``TestCase``. Alternately, you can use ``pytest`` with its ``asyncio`` plugin. Setting Up Async Tests ---------------------- To use Django's ``TestCase`` you simply define an ``async def`` test method in order to provide the appropriate async context:: from django.test import TestCase from channels.testing import HttpCommunicator from myproject.myapp.consumers import MyConsumer class MyTests(TestCase): async def test_my_consumer(self): communicator = HttpCommunicator(MyConsumer.as_asgi(), "GET", "/test/") response = await communicator.get_response() self.assertEqual(response["body"], b"test response") self.assertEqual(response["status"], 200) To use ``pytest`` you need to set it up with async test support, and presumably Django test support as well. You can do this by installing the ``pytest-django`` and ``pytest-asyncio`` packages: .. code-block:: sh python -m pip install -U pytest-django pytest-asyncio Then, you need to decorate the tests you want to run async with ``pytest.mark.asyncio``. Note that you can't mix this with ``unittest.TestCase`` subclasses; you have to write async tests as top-level test functions in the native ``pytest`` style: .. code-block:: python import pytest from channels.testing import HttpCommunicator from myproject.myapp.consumers import MyConsumer @pytest.mark.asyncio async def test_my_consumer(): communicator = HttpCommunicator(MyConsumer.as_asgi(), "GET", "/test/") response = await communicator.get_response() assert response["body"] == b"test response" assert response["status"] == 200 There's a few variants of the Communicator - a plain one for generic usage, and one each for HTTP and WebSockets specifically that have shortcut methods, ApplicationCommunicator ----------------------- ``ApplicationCommunicator`` is the generic test helper for any ASGI application. It provides several basic methods for interaction as explained below. You should only need this generic class for non-HTTP/WebSocket tests, though you might need to fall back to it if you are testing things like HTTP chunked responses or long-polling, which aren't supported in ``HttpCommunicator`` yet. .. note:: ``ApplicationCommunicator`` is actually provided by the base ``asgiref`` package, but we let you import it from ``channels.testing`` for convenience. To construct it, pass it an application and a scope: .. code-block:: python from channels.testing import ApplicationCommunicator communicator = ApplicationCommunicator(MyConsumer.as_asgi(), {"type": "http", ...}) send_input ~~~~~~~~~~ Call it to send an event to the application: .. code-block:: python await communicator.send_input({ "type": "http.request", "body": b"chunk one \x01 chunk two", }) receive_output ~~~~~~~~~~~~~~ Call it to receive an event from the application: .. code-block:: python event = await communicator.receive_output(timeout=1) assert event["type"] == "http.response.start" .. _application_communicator-receive_nothing: receive_nothing ~~~~~~~~~~~~~~~ Call it to check that there is no event waiting to be received from the application: .. code-block:: python assert await communicator.receive_nothing(timeout=0.1, interval=0.01) is False # Receive the rest of the http request from above event = await communicator.receive_output() assert event["type"] == "http.response.body" assert event.get("more_body") is True event = await communicator.receive_output() assert event["type"] == "http.response.body" assert event.get("more_body") is None # Check that there isn't another event assert await communicator.receive_nothing() is True # You could continue to send and receive events # await communicator.send_input(...) The method has two optional parameters: * ``timeout``: number of seconds to wait to ensure the queue is empty. Defaults to 0.1. * ``interval``: number of seconds to wait for another check for new events. Defaults to 0.01. wait ~~~~ Call it to wait for an application to exit (you'll need to either do this or wait for it to send you output before you can see what it did using mocks or inspection): .. code-block:: python await communicator.wait(timeout=1) If you're expecting your application to raise an exception, use ``pytest.raises`` around ``wait``: .. code-block:: python with pytest.raises(ValueError): await communicator.wait() HttpCommunicator ---------------- ``HttpCommunicator`` is a subclass of ``ApplicationCommunicator`` specifically tailored for HTTP requests. You need only instantiate it with your desired options: .. code-block:: python from channels.testing import HttpCommunicator communicator = HttpCommunicator(MyHttpConsumer.as_asgi(), "GET", "/test/") And then wait for its response: .. code-block:: python response = await communicator.get_response() assert response["body"] == b"test response" You can pass the following arguments to the constructor: * ``method``: HTTP method name (unicode string, required) * ``path``: HTTP path (unicode string, required) * ``body``: HTTP body (bytestring, optional) The response from the ``get_response`` method will be a dict with the following keys: * ``status``: HTTP status code (integer) * ``headers``: List of headers as (name, value) tuples (both bytestrings) * ``body``: HTTP response body (bytestring) WebsocketCommunicator --------------------- ``WebsocketCommunicator`` allows you to more easily test WebSocket consumers. It provides several convenience methods for interacting with a WebSocket application, as shown in this example: .. code-block:: python from channels.testing import WebsocketCommunicator communicator = WebsocketCommunicator(SimpleWebsocketApp.as_asgi(), "/testws/") connected, subprotocol = await communicator.connect() assert connected # Test sending text await communicator.send_to(text_data="hello") response = await communicator.receive_from() assert response == "hello" # Close await communicator.disconnect() .. note:: All of these methods are coroutines, which means you must ``await`` them. If you do not, your test will either time out (if you forgot to await a send) or try comparing things to a coroutine object (if you forgot to await a receive). .. important:: If you don't call ``WebsocketCommunicator.disconnect()`` before your test suite ends, you may find yourself getting ``RuntimeWarnings`` about things never being awaited, as you will be killing your app off in the middle of its lifecycle. You do not, however, have to ``disconnect()`` if your app already raised an error. You can also pass an ``application`` built with ``URLRouter`` instead of the plain consumer class. This lets you test applications that require positional or keyword arguments in the ``scope``: .. code-block:: python from channels.testing import WebsocketCommunicator application = URLRouter([ path("testws//", KwargsWebSocketApp.as_asgi()), ]) communicator = WebsocketCommunicator(application, "/testws/test/") connected, subprotocol = await communicator.connect() assert connected # Test on connection welcome message message = await communicator.receive_from() assert message == 'test' # Close await communicator.disconnect() .. note:: Since the ``WebsocketCommunicator`` class takes a URL in its constructor, a single Communicator can only test a single URL. If you want to test multiple different URLs, use multiple Communicators. connect ~~~~~~~ Triggers the connection phase of the WebSocket and waits for the application to either accept or deny the connection. Takes no parameters and returns either: * ``(True, )`` if the socket was accepted. ``chosen_subprotocol`` defaults to ``None``. * ``(False, )`` if the socket was rejected. ``close_code`` defaults to ``1000``. send_to ~~~~~~~ Sends a data frame to the application. Takes exactly one of ``bytes_data`` or ``text_data`` as parameters, and returns nothing: .. code-block:: python await communicator.send_to(bytes_data=b"hi\0") This method will type-check your parameters for you to ensure what you are sending really is text or bytes. send_json_to ~~~~~~~~~~~~ Sends a JSON payload to the application as a text frame. Call it with an object and it will JSON-encode it for you, and return nothing: .. code-block:: python await communicator.send_json_to({"hello": "world"}) receive_from ~~~~~~~~~~~~ Receives a frame from the application and gives you either ``bytes`` or ``text`` back depending on the frame type: .. code-block:: python response = await communicator.receive_from() Takes an optional ``timeout`` argument with a number of seconds to wait before timing out, which defaults to 1. It will typecheck your application's responses for you as well, to ensure that text frames contain text data, and binary frames contain binary data. receive_json_from ~~~~~~~~~~~~~~~~~ Receives a text frame from the application and decodes it for you: .. code-block:: python response = await communicator.receive_json_from() assert response == {"hello": "world"} Takes an optional ``timeout`` argument with a number of seconds to wait before timing out, which defaults to 1. receive_nothing ~~~~~~~~~~~~~~~ Checks that there is no frame waiting to be received from the application. For details see :ref:`ApplicationCommunicator `. disconnect ~~~~~~~~~~ Closes the socket from the client side. Takes nothing and returns nothing. You do not need to call this if the application instance you're testing already exited (for example, if it errored), but if you do call it, it will just silently return control to you. ChannelsLiveServerTestCase -------------------------- If you just want to run standard Selenium or other tests that require a webserver to be running for external programs, you can use ``ChannelsLiveServerTestCase``, which is a drop-in replacement for the standard Django ``LiveServerTestCase``: .. code-block:: python from channels.testing import ChannelsLiveServerTestCase class SomeLiveTests(ChannelsLiveServerTestCase): def test_live_stuff(self): call_external_testing_thing(self.live_server_url) .. note:: You can't use an in-memory database for your live tests. Therefore include a test database file name in your settings to tell Django to use a file database if you use SQLite: .. code-block:: python DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": os.path.join(BASE_DIR, "db.sqlite3"), "TEST": { "NAME": os.path.join(BASE_DIR, "db_test.sqlite3"), }, }, } serve_static ~~~~~~~~~~~~ Subclass ``ChannelsLiveServerTestCase`` with ``serve_static = True`` in order to serve static files (comparable to Django's ``StaticLiveServerTestCase``, you don't need to run collectstatic before or as a part of your tests setup). channels-4.0.0/docs/topics/troubleshooting.rst000066400000000000000000000021071432260166700215270ustar00rootroot00000000000000Troubleshooting =============== ImproperlyConfigured exception ------------------------------ .. code-block:: text django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings. This exception occurs when your application tries to import any models before Django finishes `its initialization process `_ aka ``django.setup()``. ``django.setup()`` `should be called only once `_, and should be called manually only in case of standalone apps. In context of Channels usage, ``django.setup()`` is called automatically in ``get_asgi_application()``, which means it needs to be called before any ORM models are imported. The working code order would look like this: .. include:: ../includes/asgi_example.rst channels-4.0.0/docs/topics/worker.rst000066400000000000000000000063471432260166700176230ustar00rootroot00000000000000Worker and Background Tasks =========================== While :doc:`channel layers ` are primarily designed for communicating between different instances of ASGI applications, they can also be used to offload work to a set of worker servers listening on fixed channel names, as a simple, very-low-latency task queue. .. note:: The worker/background tasks system in Channels is simple and very fast, and achieves this by not having some features you may find useful, such as retries or return values. We recommend you use it for work that does not need guarantees around being complete (at-most-once delivery), and for work that needs more guarantees, look into a separate dedicated task queue. This feature does not work with the in-memory channel layer. Setting up background tasks works in two parts - sending the events, and then setting up the consumers to receive and process the events. Sending ------- To send an event, just send it to a fixed channel name. For example, let's say we want a background process that pre-caches thumbnails: .. code-block:: python # Inside a consumer self.channel_layer.send( "thumbnails-generate", { "type": "generate", "id": 123456789, }, ) Note that the event you send **must** have a ``type`` key, even if only one type of message is being sent over the channel, as it will turn into an event a consumer has to handle. Also remember that if you are sending the event from a synchronous environment, you have to use the ``asgiref.sync.async_to_sync`` wrapper as specified in :doc:`channel layers `. Receiving and Consumers ----------------------- Channels will present incoming worker tasks to you as events inside a scope with a ``type`` of ``channel``, and a ``channel`` key matching the channel name. We recommend you use ProtocolTypeRouter and ChannelNameRouter (see :doc:`/topics/routing` for more) to arrange your consumers: .. code-block:: python application = ProtocolTypeRouter({ ... "channel": ChannelNameRouter({ "thumbnails-generate": consumers.GenerateConsumer.as_asgi(), "thumbnails-delete": consumers.DeleteConsumer.as_asgi(), }), }) You'll be specifying the ``type`` values of the individual events yourself when you send them, so decide what your names are going to be and write consumers to match. For example, here's a basic consumer that expects to receive an event with ``type`` ``test.print``, and a ``text`` value containing the text to print: .. code-block:: python class PrintConsumer(SyncConsumer): def test_print(self, message): print("Test: " + message["text"]) Once you've hooked up the consumers, all you need to do is run a process that will handle them. In lieu of a protocol server - as there are no connections involved here - Channels instead provides you this with the ``runworker`` command: .. code-block:: text python manage.py runworker thumbnails-generate thumbnails-delete Note that ``runworker`` will only listen to the channels you pass it on the command line. If you do not include a channel, or forget to run the worker, your events will not be received and acted upon. channels-4.0.0/docs/tutorial/000077500000000000000000000000001432260166700161105ustar00rootroot00000000000000channels-4.0.0/docs/tutorial/index.rst000066400000000000000000000010521432260166700177470ustar00rootroot00000000000000Tutorial ======== Channels allows you to use WebSockets and other non-HTTP protocols in your Django site. For example you might want to use WebSockets to allow a page on your site to immediately receive updates from your Django server without using HTTP long-polling or other expensive techniques. In this tutorial we will build a simple chat server, where you can join an online room, post messages to the room, and have others in the same room see those messages immediately. .. toctree:: :maxdepth: 1 part_1 part_2 part_3 part_4 channels-4.0.0/docs/tutorial/part_1.rst000066400000000000000000000273011432260166700200330ustar00rootroot00000000000000Tutorial Part 1: Basic Setup ============================ .. note:: If you encounter any issue during your coding session, please see the :doc:`/topics/troubleshooting` section. In this tutorial we will build a simple chat server. It will have two pages: * An index view that lets you type the name of a chat room to join. * A room view that lets you see messages posted in a particular chat room. The room view will use a WebSocket to communicate with the Django server and listen for any messages that are posted. We assume that you are familiar with basic concepts for building a Django site. If not we recommend you complete `the Django tutorial`_ first and then come back to this tutorial. We assume that you have `Django installed`_ already. You can tell Django is installed and which version by running the following command in a shell prompt (indicated by the ``$`` prefix): .. code-block:: sh $ python3 -m django --version We also assume that you have :doc:`Channels and Daphne installed ` already. You can check by running the following command: .. code-block:: sh $ python3 -c 'import channels; import daphne; print(channels.__version__, daphne.__version__)' This tutorial is written for Channels 4.0, which supports Python 3.7+ and Django 3.2+. If the Channels version does not match, you can refer to the tutorial for your version of Channels by using the version switcher at the bottom left corner of this page, or update Channels to the newest version. This tutorial also **uses Docker** to install and run Redis. We use Redis as the backing store for the channel layer, which is an optional component of the Channels library that we use in the tutorial. `Install Docker`_ from its official website - there are official runtimes for Mac OS and Windows that make it easy to use, and packages for many Linux distributions where it can run natively. .. note:: While you can run the standard Django ``runserver`` without the need for Docker, the channels features we'll be using in later parts of the tutorial will need Redis to run, and we recommend Docker as the easiest way to do this. .. _the Django tutorial: https://docs.djangoproject.com/en/stable/intro/tutorial01/ .. _Django installed: https://docs.djangoproject.com/en/stable/intro/install/ .. _Install Docker: https://www.docker.com/get-docker Creating a project ------------------ If you don't already have a Django project, you will need to create one. From the command line, ``cd`` into a directory where you'd like to store your code, then run the following command: .. code-block:: sh $ django-admin startproject mysite This will create a ``mysite`` directory in your current directory with the following contents: .. code-block:: text mysite/ manage.py mysite/ __init__.py asgi.py settings.py urls.py wsgi.py Creating the Chat app --------------------- We will put the code for the chat server in its own app. Make sure you're in the same directory as ``manage.py`` and type this command: .. code-block:: sh $ python3 manage.py startapp chat That'll create a directory ``chat``, which is laid out like this: .. code-block:: text chat/ __init__.py admin.py apps.py migrations/ __init__.py models.py tests.py views.py For the purposes of this tutorial, we will only be working with ``chat/views.py`` and ``chat/__init__.py``. So remove all other files from the ``chat`` directory. After removing unnecessary files, the ``chat`` directory should look like: .. code-block:: text chat/ __init__.py views.py We need to tell our project that the ``chat`` app is installed. Edit the ``mysite/settings.py`` file and add ``'chat'`` to the **INSTALLED_APPS** setting. It'll look like this: .. code-block:: python # mysite/settings.py INSTALLED_APPS = [ 'chat', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] Add the index view ------------------ We will now create the first view, an index view that lets you type the name of a chat room to join. Create a ``templates`` directory in your ``chat`` directory. Within the ``templates`` directory you have just created, create another directory called ``chat``, and within that create a file called ``index.html`` to hold the template for the index view. Your chat directory should now look like: .. code-block:: text chat/ __init__.py templates/ chat/ index.html views.py Put the following code in ``chat/templates/chat/index.html``: .. code-block:: html Chat Rooms What chat room would you like to enter?

Create the view function for the room view. Put the following code in ``chat/views.py``: .. code-block:: python # chat/views.py from django.shortcuts import render def index(request): return render(request, "chat/index.html") To call the view, we need to map it to a URL - and for this we need a URLconf. To create a URLconf in the chat directory, create a file called ``urls.py``. Your app directory should now look like: .. code-block:: text chat/ __init__.py templates/ chat/ index.html urls.py views.py In the ``chat/urls.py`` file include the following code: .. code-block:: python # chat/urls.py from django.urls import path from . import views urlpatterns = [ path("", views.index, name="index"), ] The next step is to point the root URLconf at the **chat.urls** module. In ``mysite/urls.py``, add an import for **django.urls.include** and insert an **include()** in the **urlpatterns** list, so you have: .. code-block:: python # mysite/urls.py from django.contrib import admin from django.urls import include, path urlpatterns = [ path("chat/", include("chat.urls")), path("admin/", admin.site.urls), ] Let's verify that the index view works. Run the following command: .. code-block:: sh $ python3 manage.py runserver You'll see the following output on the command line: .. code-block:: text Watching for file changes with StatReloader Performing system checks... System check identified no issues (0 silenced). You have 18 unapplied migration(s). Your project may not work properly until you apply the migrations for app(s): admin, auth, contenttypes, sessions. Run 'python manage.py migrate' to apply them. August 19, 2022 - 10:05:13 Django version 4.1, using settings 'mysite.settings' Starting development server at http://127.0.0.1:8000/ Quit the server with CONTROL-C. .. note:: Ignore the warning about unapplied database migrations. We won't be using a database in this tutorial. Go to http://127.0.0.1:8000/chat/ in your browser and you should see the text "What chat room would you like to enter?" along with a text input to provide a room name. Type in "lobby" as the room name and press enter. You should be redirected to the room view at http://127.0.0.1:8000/chat/lobby/ but we haven't written the room view yet, so you'll get a "Page not found" error page. Go to the terminal where you ran the ``runserver`` command and press Control-C to stop the server. Integrate the Channels library ------------------------------ So far we've just created a regular Django app; we haven't used the Channels library at all. Now it's time to integrate Channels. Let's start by creating a routing configuration for Channels. A Channels :doc:`routing configuration ` is an ASGI application that is similar to a Django URLconf, in that it tells Channels what code to run when an HTTP request is received by the Channels server. Start by adjusting the ``mysite/asgi.py`` file to include the following code: .. code-block:: python # mysite/asgi.py import os from channels.routing import ProtocolTypeRouter from django.core.asgi import get_asgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings") application = ProtocolTypeRouter( { "http": get_asgi_application(), # Just HTTP for now. (We can add other protocols later.) } ) Now add the Daphne library to the list of installed apps, in order to enable an ASGI versions of the ``runserver`` command. Edit the ``mysite/settings.py`` file and add ``'daphne'`` to the top of the ``INSTALLED_APPS`` setting. It'll look like this: .. code-block:: python # mysite/settings.py INSTALLED_APPS = [ 'daphne', 'chat', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] You'll also need to point Daphne at the root routing configuration. Edit the ``mysite/settings.py`` file again and add the following to the bottom of it: .. code-block:: python # mysite/settings.py # Daphne ASGI_APPLICATION = "mysite.asgi.application" With Daphne now in the installed apps, it will take control of the ``runserver`` command, replacing the standard Django development server with the ASGI compatible version. .. note:: The Daphne development server will conflict with any other third-party apps that require an overloaded or replacement runserver command. In order to solve such issues, make sure ``daphne`` is at the top of your ``INSTALLED_APPS``, or remove the offending app altogether. Let's ensure that the Channels development server is working correctly. Run the following command: .. code-block:: sh $ python3 manage.py runserver You'll see the following output on the command line: .. code-block:: text Watching for file changes with StatReloader Performing system checks... System check identified no issues (0 silenced). You have 18 unapplied migration(s). Your project may not work properly until you apply the migrations for app(s): admin, auth, contenttypes, sessions. Run 'python manage.py migrate' to apply them. August 19, 2022 - 10:20:28 Django version 4.1, using settings 'mysite.settings' Starting ASGI/Daphne version 3.0.2 development server at http://127.0.0.1:8000/ Quit the server with CONTROL-C. Notice the line beginning with ``Starting ASGI/Daphne …``. This indicates that the Daphne development server has taken over from the Django development server. Go to http://127.0.0.1:8000/chat/ in your browser and you should still see the index page that we created before. Go to the terminal where you ran the ``runserver`` command and press Control-C to stop the server. This tutorial continues in :doc:`Tutorial 2 `. channels-4.0.0/docs/tutorial/part_2.rst000066400000000000000000000461221432260166700200360ustar00rootroot00000000000000Tutorial Part 2: Implement a Chat Server ======================================== This tutorial begins where :doc:`Tutorial 1 ` left off. We'll get the room page working so that you can chat with yourself and others in the same room. Add the room view ----------------- We will now create the second view, a room view that lets you see messages posted in a particular chat room. Create a new file ``chat/templates/chat/room.html``. Your app directory should now look like: .. code-block:: text chat/ __init__.py templates/ chat/ index.html room.html urls.py views.py Create the view template for the room view in ``chat/templates/chat/room.html``: .. code-block:: html Chat Room

{{ room_name|json_script:"room-name" }} Create the view function for the room view in ``chat/views.py``: .. code-block:: python # chat/views.py from django.shortcuts import render def index(request): return render(request, "chat/index.html") def room(request, room_name): return render(request, "chat/room.html", {"room_name": room_name}) Create the route for the room view in ``chat/urls.py``: .. code-block:: python # chat/urls.py from django.urls import path from . import views urlpatterns = [ path("", views.index, name="index"), path("/", views.room, name="room"), ] Start the Channels development server: .. code-block:: sh $ python3 manage.py runserver Go to http://127.0.0.1:8000/chat/ in your browser and to see the index page. Type in "lobby" as the room name and press enter. You should be redirected to the room page at http://127.0.0.1:8000/chat/lobby/ which now displays an empty chat log. Type the message "hello" and press enter. Nothing happens. In particular the message does not appear in the chat log. Why? The room view is trying to open a WebSocket to the URL ``ws://127.0.0.1:8000/ws/chat/lobby/`` but we haven't created a consumer that accepts WebSocket connections yet. If you open your browser's JavaScript console, you should see an error that looks like: .. code-block:: text WebSocket connection to 'ws://127.0.0.1:8000/ws/chat/lobby/' failed: Unexpected response code: 500 Write your first consumer ------------------------- When Django accepts an HTTP request, it consults the root URLconf to lookup a view function, and then calls the view function to handle the request. Similarly, when Channels accepts a WebSocket connection, it consults the root routing configuration to lookup a consumer, and then calls various functions on the consumer to handle events from the connection. We will write a basic consumer that accepts WebSocket connections on the path ``/ws/chat/ROOM_NAME/`` that takes any message it receives on the WebSocket and echos it back to the same WebSocket. .. note:: It is good practice to use a common path prefix like ``/ws/`` to distinguish WebSocket connections from ordinary HTTP connections because it will make deploying Channels to a production environment in certain configurations easier. In particular for large sites it will be possible to configure a production-grade HTTP server like nginx to route requests based on path to either (1) a production-grade WSGI server like Gunicorn+Django for ordinary HTTP requests or (2) a production-grade ASGI server like Daphne+Channels for WebSocket requests. Note that for smaller sites you can use a simpler deployment strategy where Daphne serves all requests - HTTP and WebSocket - rather than having a separate WSGI server. In this deployment configuration no common path prefix like ``/ws/`` is necessary. Create a new file ``chat/consumers.py``. Your app directory should now look like: .. code-block:: text chat/ __init__.py consumers.py templates/ chat/ index.html room.html urls.py views.py Put the following code in ``chat/consumers.py``: .. code-block:: python # chat/consumers.py import json from channels.generic.websocket import WebsocketConsumer class ChatConsumer(WebsocketConsumer): def connect(self): self.accept() def disconnect(self, close_code): pass def receive(self, text_data): text_data_json = json.loads(text_data) message = text_data_json["message"] self.send(text_data=json.dumps({"message": message})) This is a synchronous WebSocket consumer that accepts all connections, receives messages from its client, and echos those messages back to the same client. For now it does not broadcast messages to other clients in the same room. .. note:: Channels also supports writing *asynchronous* consumers for greater performance. However any asynchronous consumer must be careful to avoid directly performing blocking operations, such as accessing a Django model. See the :doc:`/topics/consumers` reference for more information about writing asynchronous consumers. We need to create a routing configuration for the ``chat`` app that has a route to the consumer. Create a new file ``chat/routing.py``. Your app directory should now look like: .. code-block:: text chat/ __init__.py consumers.py routing.py templates/ chat/ index.html room.html urls.py views.py Put the following code in ``chat/routing.py``: .. code-block:: python # chat/routing.py from django.urls import re_path from . import consumers websocket_urlpatterns = [ re_path(r"ws/chat/(?P\w+)/$", consumers.ChatConsumer.as_asgi()), ] We call the ``as_asgi()`` classmethod in order to get an ASGI application that will instantiate an instance of our consumer for each user-connection. This is similar to Django's ``as_view()``, which plays the same role for per-request Django view instances. (Note we use ``re_path()`` due to limitations in :ref:`URLRouter `.) The next step is to point the main ASGI configuration at the **chat.routing** module. In ``mysite/asgi.py``, import ``AuthMiddlewareStack``, ``URLRouter``, and ``chat.routing``; and insert a ``'websocket'`` key in the ``ProtocolTypeRouter`` list in the following format: .. code-block:: python # mysite/asgi.py import os from channels.auth import AuthMiddlewareStack from channels.routing import ProtocolTypeRouter, URLRouter from channels.security.websocket import AllowedHostsOriginValidator from django.core.asgi import get_asgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings") # Initialize Django ASGI application early to ensure the AppRegistry # is populated before importing code that may import ORM models. django_asgi_app = get_asgi_application() import chat.routing application = ProtocolTypeRouter( { "http": django_asgi_app, "websocket": AllowedHostsOriginValidator( AuthMiddlewareStack(URLRouter(chat.routing.websocket_urlpatterns)) ), } ) This root routing configuration specifies that when a connection is made to the Channels development server, the ``ProtocolTypeRouter`` will first inspect the type of connection. If it is a WebSocket connection (**ws://** or **wss://**), the connection will be given to the ``AuthMiddlewareStack``. The ``AuthMiddlewareStack`` will populate the connection's **scope** with a reference to the currently authenticated user, similar to how Django's ``AuthenticationMiddleware`` populates the **request** object of a view function with the currently authenticated user. (Scopes will be discussed later in this tutorial.) Then the connection will be given to the ``URLRouter``. The ``URLRouter`` will examine the HTTP path of the connection to route it to a particular consumer, based on the provided ``url`` patterns. Let's verify that the consumer for the ``/ws/chat/ROOM_NAME/`` path works. Run migrations to apply database changes (Django's session framework needs the database) and then start the Channels development server: .. code-block:: sh $ python manage.py migrate Operations to perform: Apply all migrations: admin, auth, contenttypes, sessions Running migrations: Applying contenttypes.0001_initial... OK Applying auth.0001_initial... OK Applying admin.0001_initial... OK Applying admin.0002_logentry_remove_auto_add... OK Applying admin.0003_logentry_add_action_flag_choices... OK Applying contenttypes.0002_remove_content_type_name... OK Applying auth.0002_alter_permission_name_max_length... OK Applying auth.0003_alter_user_email_max_length... OK Applying auth.0004_alter_user_username_opts... OK Applying auth.0005_alter_user_last_login_null... OK Applying auth.0006_require_contenttypes_0002... OK Applying auth.0007_alter_validators_add_error_messages... OK Applying auth.0008_alter_user_username_max_length... OK Applying auth.0009_alter_user_last_name_max_length... OK Applying auth.0010_alter_group_name_max_length... OK Applying auth.0011_update_proxy_permissions... OK Applying auth.0012_alter_user_first_name_max_length... OK Applying sessions.0001_initial... OK $ python3 manage.py runserver Go to the room page at http://127.0.0.1:8000/chat/lobby/ which now displays an empty chat log. Type the message "hello" and press enter. You should now see "hello" echoed in the chat log. However if you open a second browser tab to the same room page at http://127.0.0.1:8000/chat/lobby/ and type in a message, the message will not appear in the first tab. For that to work, we need to have multiple instances of the same ``ChatConsumer`` be able to talk to each other. Channels provides a **channel layer** abstraction that enables this kind of communication between consumers. Go to the terminal where you ran the ``runserver`` command and press Control-C to stop the server. Enable a channel layer ---------------------- A channel layer is a kind of communication system. It allows multiple consumer instances to talk with each other, and with other parts of Django. A channel layer provides the following abstractions: * A **channel** is a mailbox where messages can be sent to. Each channel has a name. Anyone who has the name of a channel can send a message to the channel. * A **group** is a group of related channels. A group has a name. Anyone who has the name of a group can add/remove a channel to the group by name and send a message to all channels in the group. It is not possible to enumerate what channels are in a particular group. Every consumer instance has an automatically generated unique channel name, and so can be communicated with via a channel layer. In our chat application we want to have multiple instances of ``ChatConsumer`` in the same room communicate with each other. To do that we will have each ChatConsumer add its channel to a group whose name is based on the room name. That will allow ChatConsumers to transmit messages to all other ChatConsumers in the same room. We will use a channel layer that uses Redis as its backing store. To start a Redis server on port 6379, run the following command: .. code-block:: sh $ docker run -p 6379:6379 -d redis:5 We need to install channels_redis so that Channels knows how to interface with Redis. Run the following command: .. code-block:: sh $ python3 -m pip install channels_redis Before we can use a channel layer, we must configure it. Edit the ``mysite/settings.py`` file and add a ``CHANNEL_LAYERS`` setting to the bottom. It should look like: .. code-block:: python # mysite/settings.py # Channels ASGI_APPLICATION = "mysite.asgi.application" CHANNEL_LAYERS = { "default": { "BACKEND": "channels_redis.core.RedisChannelLayer", "CONFIG": { "hosts": [("127.0.0.1", 6379)], }, }, } .. note:: It is possible to have multiple channel layers configured. However most projects will just use a single ``'default'`` channel layer. Let's make sure that the channel layer can communicate with Redis. Open a Django shell and run the following commands: .. code-block:: pycon $ python3 manage.py shell >>> import channels.layers >>> channel_layer = channels.layers.get_channel_layer() >>> from asgiref.sync import async_to_sync >>> async_to_sync(channel_layer.send)('test_channel', {'type': 'hello'}) >>> async_to_sync(channel_layer.receive)('test_channel') {'type': 'hello'} Type Control-D to exit the Django shell. Now that we have a channel layer, let's use it in ``ChatConsumer``. Put the following code in ``chat/consumers.py``, replacing the old code: .. code-block:: python # chat/consumers.py import json from asgiref.sync import async_to_sync from channels.generic.websocket import WebsocketConsumer class ChatConsumer(WebsocketConsumer): def connect(self): self.room_name = self.scope["url_route"]["kwargs"]["room_name"] self.room_group_name = "chat_%s" % self.room_name # Join room group async_to_sync(self.channel_layer.group_add)( self.room_group_name, self.channel_name ) self.accept() def disconnect(self, close_code): # Leave room group async_to_sync(self.channel_layer.group_discard)( self.room_group_name, self.channel_name ) # Receive message from WebSocket def receive(self, text_data): text_data_json = json.loads(text_data) message = text_data_json["message"] # Send message to room group async_to_sync(self.channel_layer.group_send)( self.room_group_name, {"type": "chat_message", "message": message} ) # Receive message from room group def chat_message(self, event): message = event["message"] # Send message to WebSocket self.send(text_data=json.dumps({"message": message})) When a user posts a message, a JavaScript function will transmit the message over WebSocket to a ChatConsumer. The ChatConsumer will receive that message and forward it to the group corresponding to the room name. Every ChatConsumer in the same group (and thus in the same room) will then receive the message from the group and forward it over WebSocket back to JavaScript, where it will be appended to the chat log. Several parts of the new ``ChatConsumer`` code deserve further explanation: * ``self.scope["url_route"]["kwargs"]["room_name"]`` * Obtains the ``'room_name'`` parameter from the URL route in ``chat/routing.py`` that opened the WebSocket connection to the consumer. * Every consumer has a :ref:`scope ` that contains information about its connection, including in particular any positional or keyword arguments from the URL route and the currently authenticated user if any. * ``self.room_group_name = "chat_%s" % self.room_name`` * Constructs a Channels group name directly from the user-specified room name, without any quoting or escaping. * Group names may only contain alphanumerics, hyphens, underscores, or periods. Therefore this example code will fail on room names that have other characters. * ``async_to_sync(self.channel_layer.group_add)(...)`` * Joins a group. * The ``async_to_sync(...)`` wrapper is required because ChatConsumer is a synchronous WebsocketConsumer but it is calling an asynchronous channel layer method. (All channel layer methods are asynchronous.) * Group names are restricted to ASCII alphanumerics, hyphens, and periods only and are limited to a maximum length of 100 in the default backend. Since this code constructs a group name directly from the room name, it will fail if the room name contains any characters that aren't valid in a group name or exceeds the length limit. * ``self.accept()`` * Accepts the WebSocket connection. * If you do not call ``accept()`` within the ``connect()`` method then the connection will be rejected and closed. You might want to reject a connection for example because the requesting user is not authorized to perform the requested action. * It is recommended that ``accept()`` be called as the *last* action in ``connect()`` if you choose to accept the connection. * ``async_to_sync(self.channel_layer.group_discard)(...)`` * Leaves a group. * ``async_to_sync(self.channel_layer.group_send)`` * Sends an event to a group. * An event has a special ``'type'`` key corresponding to the name of the method that should be invoked on consumers that receive the event. Let's verify that the new consumer for the ``/ws/chat/ROOM_NAME/`` path works. To start the Channels development server, run the following command: .. code-block:: sh $ python3 manage.py runserver Open a browser tab to the room page at http://127.0.0.1:8000/chat/lobby/. Open a second browser tab to the same room page. In the second browser tab, type the message "hello" and press enter. You should now see "hello" echoed in the chat log in both the second browser tab and in the first browser tab. You now have a basic fully-functional chat server! This tutorial continues in :doc:`Tutorial 3 `. channels-4.0.0/docs/tutorial/part_3.rst000066400000000000000000000074161432260166700200420ustar00rootroot00000000000000Tutorial Part 3: Rewrite Chat Server as Asynchronous ==================================================== This tutorial begins where :doc:`Tutorial 2 ` left off. We'll rewrite the consumer code to be asynchronous rather than synchronous to improve its performance. Rewrite the consumer to be asynchronous --------------------------------------- The ``ChatConsumer`` that we have written is currently synchronous. Synchronous consumers are convenient because they can call regular synchronous I/O functions such as those that access Django models without writing special code. However asynchronous consumers can provide a higher level of performance since they don't need to create additional threads when handling requests. ``ChatConsumer`` only uses async-native libraries (Channels and the channel layer) and in particular it does not access synchronous Django models. Therefore it can be rewritten to be asynchronous without complications. .. note:: Even if ``ChatConsumer`` *did* access Django models or other synchronous code it would still be possible to rewrite it as asynchronous. Utilities like :ref:`asgiref.sync.sync_to_async ` and :doc:`channels.db.database_sync_to_async ` can be used to call synchronous code from an asynchronous consumer. The performance gains however would be less than if it only used async-native libraries. Let's rewrite ``ChatConsumer`` to be asynchronous. Put the following code in ``chat/consumers.py``: .. code-block:: python # chat/consumers.py import json from channels.generic.websocket import AsyncWebsocketConsumer class ChatConsumer(AsyncWebsocketConsumer): async def connect(self): self.room_name = self.scope["url_route"]["kwargs"]["room_name"] self.room_group_name = "chat_%s" % self.room_name # Join room group await self.channel_layer.group_add(self.room_group_name, self.channel_name) await self.accept() async def disconnect(self, close_code): # Leave room group await self.channel_layer.group_discard(self.room_group_name, self.channel_name) # Receive message from WebSocket async def receive(self, text_data): text_data_json = json.loads(text_data) message = text_data_json["message"] # Send message to room group await self.channel_layer.group_send( self.room_group_name, {"type": "chat_message", "message": message} ) # Receive message from room group async def chat_message(self, event): message = event["message"] # Send message to WebSocket await self.send(text_data=json.dumps({"message": message})) This new code is for ChatConsumer is very similar to the original code, with the following differences: * ``ChatConsumer`` now inherits from ``AsyncWebsocketConsumer`` rather than ``WebsocketConsumer``. * All methods are ``async def`` rather than just ``def``. * ``await`` is used to call asynchronous functions that perform I/O. * ``async_to_sync`` is no longer needed when calling methods on the channel layer. Let's verify that the consumer for the ``/ws/chat/ROOM_NAME/`` path still works. To start the Channels development server, run the following command: .. code-block:: sh $ python3 manage.py runserver Open a browser tab to the room page at http://127.0.0.1:8000/chat/lobby/. Open a second browser tab to the same room page. In the second browser tab, type the message "hello" and press enter. You should now see "hello" echoed in the chat log in both the second browser tab and in the first browser tab. Now your chat server is fully asynchronous! This tutorial continues in :doc:`Tutorial 4 `. channels-4.0.0/docs/tutorial/part_4.rst000066400000000000000000000160771432260166700200460ustar00rootroot00000000000000Tutorial Part 4: Automated Testing ================================== This tutorial begins where :doc:`Tutorial 3 ` left off. We've built a simple chat server and now we'll create some automated tests for it. Testing the views ----------------- To ensure that the chat server keeps working, we will write some tests. We will write a suite of end-to-end tests using Selenium to control a Chrome web browser. These tests will ensure that: * when a chat message is posted then it is seen by everyone in the same room * when a chat message is posted then it is not seen by anyone in a different room `Install the Chrome web browser`_, if you do not already have it. `Install chromedriver`_. Install Selenium. Run the following command: .. code-block:: sh $ python3 -m pip install selenium .. _Install the Chrome web browser: https://www.google.com/chrome/ .. _Install chromedriver: https://sites.google.com/chromium.org/driver/getting-started Create a new file ``chat/tests.py``. Your app directory should now look like: .. code-block:: text chat/ __init__.py consumers.py routing.py templates/ chat/ index.html room.html tests.py urls.py views.py Put the following code in ``chat/tests.py``: .. code-block:: python # chat/tests.py from channels.testing import ChannelsLiveServerTestCase from selenium import webdriver from selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.wait import WebDriverWait class ChatTests(ChannelsLiveServerTestCase): serve_static = True # emulate StaticLiveServerTestCase @classmethod def setUpClass(cls): super().setUpClass() try: # NOTE: Requires "chromedriver" binary to be installed in $PATH cls.driver = webdriver.Chrome() except: super().tearDownClass() raise @classmethod def tearDownClass(cls): cls.driver.quit() super().tearDownClass() def test_when_chat_message_posted_then_seen_by_everyone_in_same_room(self): try: self._enter_chat_room("room_1") self._open_new_window() self._enter_chat_room("room_1") self._switch_to_window(0) self._post_message("hello") WebDriverWait(self.driver, 2).until( lambda _: "hello" in self._chat_log_value, "Message was not received by window 1 from window 1", ) self._switch_to_window(1) WebDriverWait(self.driver, 2).until( lambda _: "hello" in self._chat_log_value, "Message was not received by window 2 from window 1", ) finally: self._close_all_new_windows() def test_when_chat_message_posted_then_not_seen_by_anyone_in_different_room(self): try: self._enter_chat_room("room_1") self._open_new_window() self._enter_chat_room("room_2") self._switch_to_window(0) self._post_message("hello") WebDriverWait(self.driver, 2).until( lambda _: "hello" in self._chat_log_value, "Message was not received by window 1 from window 1", ) self._switch_to_window(1) self._post_message("world") WebDriverWait(self.driver, 2).until( lambda _: "world" in self._chat_log_value, "Message was not received by window 2 from window 2", ) self.assertTrue( "hello" not in self._chat_log_value, "Message was improperly received by window 2 from window 1", ) finally: self._close_all_new_windows() # === Utility === def _enter_chat_room(self, room_name): self.driver.get(self.live_server_url + "/chat/") ActionChains(self.driver).send_keys(room_name, Keys.ENTER).perform() WebDriverWait(self.driver, 2).until( lambda _: room_name in self.driver.current_url ) def _open_new_window(self): self.driver.execute_script('window.open("about:blank", "_blank");') self._switch_to_window(-1) def _close_all_new_windows(self): while len(self.driver.window_handles) > 1: self._switch_to_window(-1) self.driver.execute_script("window.close();") if len(self.driver.window_handles) == 1: self._switch_to_window(0) def _switch_to_window(self, window_index): self.driver.switch_to.window(self.driver.window_handles[window_index]) def _post_message(self, message): ActionChains(self.driver).send_keys(message, Keys.ENTER).perform() @property def _chat_log_value(self): return self.driver.find_element( by=By.CSS_SELECTOR, value="#chat-log" ).get_property("value") Our test suite extends ``ChannelsLiveServerTestCase`` rather than Django's usual suites for end-to-end tests (``StaticLiveServerTestCase`` or ``LiveServerTestCase``) so that URLs inside the Channels routing configuration like ``/ws/room/ROOM_NAME/`` will work inside the suite. We are using ``sqlite3``, which for testing, is run as an in-memory database, and therefore, the tests will not run correctly. We need to tell our project that the ``sqlite3`` database need not to be in memory for run the tests. Edit the ``mysite/settings.py`` file and add the ``TEST`` argument to the **DATABASES** setting: .. code-block:: python # mysite/settings.py DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": BASE_DIR / "db.sqlite3", "TEST": { "NAME": BASE_DIR / "db.sqlite3", }, } } To run the tests, run the following command: .. code-block:: sh $ python3 manage.py test chat.tests You should see output that looks like: .. code-block:: text Creating test database for alias 'default'... System check identified no issues (0 silenced). .. ---------------------------------------------------------------------- Ran 2 tests in 5.014s OK Destroying test database for alias 'default'... You now have a tested chat server! What's next? ------------ Congratulations! You've fully implemented a chat server, made it performant by writing it in asynchronous style, and written automated tests to ensure it won't break. This is the end of the tutorial. At this point you should know enough to start an app of your own that uses Channels and start fooling around. As you need to learn new tricks, come back to rest of the :ref:`documentation `. channels-4.0.0/loadtesting/000077500000000000000000000000001432260166700156325ustar00rootroot00000000000000channels-4.0.0/loadtesting/2016-09-06/000077500000000000000000000000001432260166700166735ustar00rootroot00000000000000channels-4.0.0/loadtesting/2016-09-06/README.rst000066400000000000000000000106711432260166700203670ustar00rootroot00000000000000Django Channels Load Testing Results for (2016-09-06) ===================================================== The goal of these load tests is to see how Channels performs with normal HTTP traffic under heavy load. In order to handle WebSockets, Channels introduced ASGI, a new interface spec for asynchronous request handling. Also, Channels implemented this spec with Daphne--an HTTP, HTTP2, and WebSocket protocol server. The load testing completed has been to compare how well Daphne using 1 worker performs with normal HTTP traffic in comparison to a WSGI HTTP server. Gunicorn was chosen as its configuration was simple and well-understood. Summary of Results ~~~~~~~~~~~~~~~~~~ Daphne is not as efficient as its WSGI counterpart. When considering only latency, Daphne can have 10 times the latency when under the same traffic load as gunicorn. When considering only throughput, Daphne can have 40-50% of the total throughput of gunicorn while still being at 2 times latency. The results should not be surprising considering the overhead involved. However, these results represent the simplest case to test and should be represented as saying that Daphne is always slower than an WSGI server. These results are a starting point, not a final conclusion. Some additional things that should be tested: - More than 1 worker - A separate server for redis - Comparison to other WebSocket servers, such as Node's socket.io or Rails' Action cable Methodology ~~~~~~~~~~~ In order to control for variances, several measures were taken: - the same testing tool was used across all tests, `loadtest `_. - all target machines were identical - all target code variances were separated into appropriate files in the dir of /testproject in this repo - all target config variances necessary to the different setups were controlled by supervisord so that human error was limited - across different test types, the same target machines were used, using the same target code and the same target config - several tests were run for each setup and test type Setups ~~~~~~ 3 setups were used for this set of tests: 1) Normal Django with Gunicorn (19.6.0) 2) Django Channels with local Redis (0.14.0) and Daphne (0.14.3) 3) Django Channels with IPC (1.1.0) and Daphne (0.14.3) Latency ~~~~~~~ All target and sources machines were identical ec2 instances m3.2xlarge running Ubuntu 16.04. In order to ensure that the same number of requests were sent, the rps flag was set to 300. .. image:: channels-latency.PNG Throughput ~~~~~~~~~~ The same source machine was used for all tests: ec2 instance m3.large running Ubuntu 16.04. All target machines were identical ec2 instances m3.2xlarge running Ubuntu 16.04. For the following tests, loadtest was permitted to autothrottle so as to limit errors; this led to varied latency times. Gunicorn had a latency of 6 ms; daphne and Redis, 12 ms; daphne and IPC, 35 ms. .. image:: channels-throughput.PNG Supervisor Configs ~~~~~~~~~~~~~~~~~~ **Gunicorn (19.6.0)** This is the non-channels config. It's a standard Django environment on one machine, using gunicorn to handle requests. .. code-block:: bash [program:gunicorn] command = gunicorn testproject.wsgi_no_channels -b 0.0.0.0:80 directory = /srv/channels/testproject/ user = root [group:django_http] programs=gunicorn priority=999 **Redis (0.14.0) and Daphne (0.14.3)** This is the channels config using redis as the backend. It's on one machine, so a local redis config. Also, it's a single worker, not multiple, as that's the default config. .. code-block:: bash [program:daphne] command = daphne -b 0.0.0.0 -p 80 testproject.asgi:channel_layer directory = /srv/channels/testproject/ user = root [program:worker] command = python manage.py runworker directory = /srv/channels/testproject/ user = django-channels [group:django_channels] programs=daphne,worker priority=999 **IPC (1.1.0) and Daphne (0.14.3)** This is the channels config using IPC (Inter Process Communication). It's only possible to have this work on one machine. .. code-block:: bash [program:daphne] command = daphne -b 0.0.0.0 -p 80 testproject.asgi_for_ipc:channel_layer directory = /srv/channels/testproject/ user = root [program:worker] command = python manage.py runworker --settings=testproject.settings.channels_ipc directory = /srv/channels/testproject/ user = root [group:django_channels] programs=daphne,worker priority=999 channels-4.0.0/loadtesting/2016-09-06/channels-latency.PNG000066400000000000000000002113121432260166700224710ustar00rootroot00000000000000PNG  IHDR$ iCCPICC ProfileHWTZ RBo"l$@(,*TD@TtDѵVD.lI]_;3|ܹw'wmٹ٨ 9|at31)I(9\﨨0eIZ_ry"Hĩ\'\a>N7+ !A˰Ml/,T6[7(I8 |U{r2\Ap4c48.np܅u^A #BBhB G\O C$$IG,CJRف#"GD3qkXAxjÛv*ĿhLH$fe݄#pDь e18xC|L"HZ$+)&哊H[H{I'IWH}dE>ٞ@N& Ke=+ga7H\ Z.+) ST)fJ,%RNiܣQTT4TtUW\Xx@CŏT5%՗:*ROQoSh4SL˧>(ѕlJ*(TVP6QV\\|H򀊂 [eJQ*CtU;Hժ{T/>W#qՖT;эt}},On^OK}PCMcFJ afd322n0>=7nոqWƽik׼I寕^Y6m=E{vYs?8c3OgNΐnn3z =^Fzt}O}F05lf99hcd 6ae0lhfgp}#QF6Ac}p wLL\L2L6t753M0]alL,جЬ9<ϼ"bE%jhaYiy r[m@:A0fMkuuCMRf'&O\?cW[Gl]wBڵڽW_s98,rhqx5joR[tpm_NN)U7]]\Vw%.r=-_Y{ܟO6̛kcC^Ogv^//W#zm{_70@b.58  OB 8,iƄV> !ED"#AdpQfQyQM!NR9i]z̘=1b}bލ3ǵ+OPЛ81qA$$~RK2)9>ywTMsV4ts_=#{33RR|fGkCU_f .y=OHߐޟQ1W_een|U5?sT&қ5gVOUnQno[ަAapMkN\agAeQ#9r  ko0 X,L]ضhE}-,ZRۥK.KXֺ\w HHXtsm+]VmY[|Ķj?\Ț5]kV#k}]jia 627o|i eʶmlo-+obeݖ+}*WTzJ5qmƴl'qgΧwuKn% j{Yۀ6N۽o_Ku%?~MЃm\569\u~ i4؜ےs4h[{l~=fp'('9YxrTl{&̵)]gCϞ?pLw]ppKN:;.Η[][{&uU_t=zύnN{{ۯ^}et{z?{(ǜ/|[Ϗw1/TL{%|5z7o'mz.Z>|Ϥ_,~ zo$gd$-dKlhZk%C7%K*(E?aL*R n1aR TKޱ,:85eC02FR+_##[GFdop*O[HPW'(Ǩl +m pHYs%%IR$iTXtXML:com.adobe.xmp 736 1186 1 $@IDATx U_'*3In1!3ʐ2+!\23\# 4rϲkiy^^{gMιwߪR4!    Zg    8Q    y fv    @ {@@@@ /N@@@@Dq    E@T^     (@@@@ 3;A@@@ =    Qyaf'    @@@@"@ */@@@@@    @^D兙    @@@@ȋ0@@@@Q    y fv    @ {@@@@ /N@@@@Dq    E@T^     (@@@@ 3;A@@@ =    Qyaf'    @@@@"@ */@@@@@    @^D兙    @@@@ȋ0@@@@Q    y fv    @ {@@@@ /N@@@@Dq    E@T^     (@@@@ 3;A@@@ =    Qyaf'    @@@@"@ */@@@@@    @^D兙    @@@@ȋ0@@@@Q    y fv    @ {@@@@ /N@@@@Dq    E@T^     (@@@@ 3;A@@@ =    Qyaf'    @@@@"@ */@@@@@    @^D兙    @@@@ȋ0@@@@Q    y fv    @ {@@@@ /N@@@@Dq    Ej^N@ QO> X*Ug֬YԩcM6z L2ž;VZxV첋:w&XSϷ~G?mŊFYzYV3f?`ڵZ+6c 裏:k-[̶n;b->:"  *A+Q P>h{gJtz./_n~կ_zYƍ_Y f/l{(rt[Cgr9rL /!ikکO}~.R2dyGwa @@|d#p+&MxM뮻` [֪U+ׯ%%٩_w_֭/(bי3gs9æOZޱcBPjguZ*q+W޽{g Biٳg۶nk#FH܆fܔ9)>>˰:u@@ʁrp8D ߿uaYI@{˜B.̶j+{7mf;\4\ (LnmGa~mVR?Wـʨ˦x6iҤxQG%.wVPK.dɒ"# AQepHh]ڵݏjW_}AVzfJ ޑoo%z  mU8q}i{6xe\r]lRewwG7'L`N?܎=ش #m@@șQ!r#¾z[hѴVhR߮]H*lM+=Ueg~{qQg5jlZڶiif Tm6)}ߧ/͏O>ƍgw;:y `Es=u,ÇaÆoG8\033@@JM ɩ#AA˖-Cć,\wMT/tX ܃mSM/¦MVݶrKlwEҹMi0i2uh[nkt.Q.E`o-Z 6 eҥK]]%Mazz~Q? cW5]O?5r56lOk5mŮZjgܹsnL֓\|Cǣ૶!5l85tKPW֭M6o|S7^{?ϤOm˿!Q:^ޙ9GxCt=+Y 33{!*i 6śjX6pC;3+ )p5zh;蠃Ô!1)+%x+-9Y){9"^4L4Z jA@XdfutJWT5 u9k8M 5+ΐ|0Ewt)CUuiQ,Vr=ȀtC?qj8eO S?W]2?D7^wo?٪U5]נ89~0ѦaeYe3@(%+kw^GE7bFTGxo;C Zq+)diXWټ/i[%sT=Gy$e+SթуQmz~,O|-v U\^. W,b?vdY0|]C=4M=әY.]\a|եmT SxS0*Ȁ]2ՈzED n! I-Q4 -M 8U6MQlj8%UNv:o`HnXKu2]7]Ss ND駟l}I &x=ܔÌSFw&S1uSmlZR%kw+u2;*dޥtӶ7~ƌkzSA^J} ]S~:0@(5F"d3&Q ӋNRz^xa09  XSiX%ߤ!2?¯a;IWFTF=\jŇHO97} 24B' [ Ĕ~7ONR!_:W_}5mAAauФA0A悠~Yo3K\sMJ-h{Ӷ¥=&MfjE[dM맡b9uԴ>RK]V+IC5/W-KrTI\|Xm% }K2JڟcqZұ{G}tiՁ ~.~ ={/;ꪫ L讘F@ʕQ+!P9jJIo{ͣ-xȷ}Zk+WhSHPk*5spOzϴ[4\$_-?f 4/xuol1KS_w{϶mۦelz>f8!nҤIiY2Z޶oI33fLZEGmsΦ̻h{Gae ޸,eě13ДUm߂:_nH_CgpH:ye~c 3>a&Nhh6]#1ޢ`Za{vm7?x]w5N|>yM~^}ѯ ޏҰR;Sfkȴo}t6kH! HA {;  Pn#@@ P;7fm`)5 F6@W_~fiXHp Yji8PtIb9*4G9b7}w<֧^5[k8aqcAs{?O+~omX+B~d 4H͒+_6md֟:& MdMtf\'Ӱ/ c=R;yM6u)ԢM ,IqlwCU+:ܵ$:m-t [m:x 玿\٫WĠY8;u28Lرc 4&:2C{Oq9眓.X \7O=v׿l޺ЀXҶ 5Qep<^ot@NP(_ԣ AX?zS`#HjsT&2T/QӃBS FpW5ry|̈=zg%~q)RX Jgm\ VԨQ#qSPleEtܪ /e/Eixm6˶U0(裏]-5 πmGq)XԢEȓyYXR_q}fLu/+_K%TMu$S4i  @yX8Ǎ@E&eYL^ziکв+yRp@*r1f3 )P[!/@]Ѯ];KUzkb+Gk2i\IUsoG':hӛ2KjԨ*AR0*7wӧOw3 7oKA8W-v=ygӊS?> 7ȕ1)2T>&߂hٮB?@@ *saNzQ!WYNzkUFŇFTCzѧ~w<СC8$` շIjzxn3Oq]7.PIv}i*S铸;e<[2M,T/?5w-WpaÆI {Z) -;jzmʂR u6,n &%5eVF)Ө*Xq<ߑx`))M?|6(vJYL]t5\Iֽ 6ʤm0@(-敖3벥>h5ۤ,.W-4=*gu[҃L_6+XrʔP1FA R I5TO*ZKuπjժ;/Һ)yקq ^md5fw:xo@#F-{Ґ୚_F_o[,x0K}L%o%}weT-^l#ݺus?7STLYs >! Mv8J)je$5=k9`ta&2/xQCRֈf$|[2N=Lѷ>ICw2sTIo ^r۩Ղ5kg{a;>]:ʞЃj)ب{!u*I@#MR}u< (GA @=&5|u]|,7@?CnHW<;PM$ hYIkća)aVK?LF>;)STx'Mt+mrSh{7_t2A-]MǢB&VZEgmZPx7n\^qSɧ 'EH/Pysٯm+XNC@ʥ@@CR8묳T %gG ̉~UZ[`(x|SA0l+_P73cƌg𰔲 /Y+ݴ~P, tgYonEσ@}?R-RmGw=!"K/4  Pn_i PJr=J=g0'd$Ѻ?xA!|жQ}N||w+(2?(8`W܂z- zhZ?e`MA@޽{OzxDOR K;f5+HA~:QFù5}vnюIH W^Ǥs/ "zÇw"~ڞ ~ ~qA`ڞ}m\ѶPیߏ~2 L h߲R`+~ g6X|cY87qى(Mڷ?G]`sA061(~A@(e +@Cia(9$eD>{A2eo`(VMi2m#:?4PQREtM3t4/sgď%w-C__I{#~rK3E CҵOK8>d?) +~ ej E}u>i+?l?)D7{39g:>.+2ފ{)ϠT|.h)4xSiiM=]@TylP k׋&4  P;%<{תM I[O5elpOٶAY=ȠX,7s3V T(:BցG|' "?7N9PuiME3A݂Fnjרht\=e~d S*{Q7~bZ>;w+ef!_toü2P0OjA@NZ<_~tMD:u':_~~-tZu y<^t[{ ]3~xayk߳`8`esduG)`p)b> @(e9JƱ!PҴVI2HaWZ7(]p'')S'辕<|&nGH4|KCvmjIp7= (DzECĢǥ\ܓDA< q_|)~Ž?~>=~]}Fkd\uU):A!nW&\$^xC&nGC1,O͹<8꜒2kj EMM42:6d&\OeGTsIO?t:^Kj_AB ul2`J֙e:NOe4kgZS1m3)#JC?D7uiu]P׺u묯2GoC&Dυi@@`M ZlJ"@ N ;2@P$"tCբ屠`{tӕP@T%$@@b BvAg@ʶ@0jԨ!q OK;`Շ!  Z=?F(1 Ct*r-3-f>  d)[󲄢_@5hu^7Mh   zVϏ@*dɒS2$!y*Tlz-i1*Ǜ@@XyvNB,C.? 6qD?{ӡP־}{&H2vŎ/^lf Uo*lժo™L  d     `h^.5    @FQiX    KQd[    Dea    @.DRm!   d      KM    Q@TF    R@T.5    @FQiX    KQd[    Dea    @.DRm!   d      KM    Q@TF    R@T.5    @FQiX    KQd[    Dea    @.DRm!   d      KM    Q@TF    R@T.5    @FQiX    KB`u konfSN&W\ioYxi[.]l*U$pB{죏>?ӭ.X׮]muM\    *|An6VX}~x[{' mTAs=ז-[L3뮻I&)?c߿7RIMA+:wl>n?lcǎu    {SX 1cl,i|i(gR jժva}4~xwŊ+f͚!jIϴ=#FzS    @ND唓W`ڴi*o]ve7~z뭗%˟)ICԔESVfMkӦ,*?!   @xk^Mb1\rmv7d\[K{v?:}a'ԢEͷjUv>3 -֙    PbQ%c\uQ|^y湷i|ٳg0=   CJǝX@Cz2ڵnEC4$En@@@@ dDߜ=X@AN:)Ygek׮NĄ Js@@:QNǭ鍏k5 _ Ȉ"O?d{-\НV&M;0AϷjժUV?g   yi=;fw1c/7ۭޚƍ}p4'y)sc   PfΜi u릝zެY3fm\&OH뜧=&X߾}mʗ4ɩ믿n7tSΝ;[~x(ʷKɔOeVT4*_@@@-ZdZu]~aԩS|Q޾+p6j(˭VYy; #B~bVSի]tEA(FLjG}f)B_Vb@@@\Q)JP(]z'Py'wHFTHDyP뮻j*U+vکCoԨmg)ݼypqټyZ L   d}i˕}teL$-:ueF Uqm;3Kvm)!u>h<}Q-(anL>4LSO[n6l3{wꫯv+Am    X"2:M5~7{ǬO>i믿ŋݲ[ϔdͬ>+K>@#dի6gF=CΘ1 Ӿ6mXŗ~RJhKu|kѢU^*e'9|/“Q`{ ',YQ뮻.ӻwxw;SAi    k3xƬ\|AwĂ?Yg\υʬ1b hV[vGXR lvYg @_oze˖-Νkw-9ʇ/.Agq^R峚`[.RW\F> *zږZj*,);}t74OL+~5XMy;ا~~Sڵݾ|wqK~wՂRS+^lA-4#tݴ6Pah^   I@YR[ozO>a.YP>5->S)5iR+܈;͛4iRڰ=-Pa+G}{nZ(CFTFFTEœ   eL@|YT`G~ڦNꆸi^_/;zg6r]Ip”Ք]勡8U8t[@={v8{֬YXX%]WC@@@ Mw$Rƒj8SI +S)޴L]vavm6m$bd *z#Uѯ0   @0ѣG#R^"vM7T;cǎ.i6?&5 -6;o˳L :#yÂJ/ laÆ7i|Qp]OZjѢETiĈ֧Ob-|7[tigo> `6}tx2dm&0^{oO~C@@r 4=zq[S?3[c.ٹL Bp};7kQ-ͺ&NYl[ئX@xM?5eu=Rzu44h V{.cŽ3gN| ֍7QOAhUۍ/W}ػO&M.]ᧆ ^~ߟ"wa69  PW ?WI"߬4|o ?Ev@vO5OQ` 4n:u-zc 'Fs'~h_߾}Mo5y-T)]@СC]`wң52}Χ5=QuԱ;'/kg7?kHF53LzOG^)۩d<.*( e']M[GC]VjQ[?{_CbZsߖS-RCT[ (oذa;9cw .4(Xq>_4+ck֬Yt|zK*Sch^eڜ+  \@<&X&hYJlR]ԏRvZV/-E[jOE[ݚA^0@@jժU+IUn]OYlWs+RcJYҭ   PF R,Yiz\Z9$FM.o]4+lto_-wg~ZPin|A[0ߝB@@ʃE$Cd@t2j]cc&s.Zi׭-]iSQb9@`ٲTf6A *@@*[پ7=U\K-ۿԎ8gϙGj,+B_AW6ڨ"N?Qs   K;Ҷ߲ *5Jk07dTi১rl bzS)~bZFD a!  @8|q: G|]5@CfT `Fr@@@ K/ų/_&͚ZյXfe!C@TΜ%  @I'S%nQ}k\]?' @ @ @@R`ɲUaLM E@""L"   L[xuQ$*XjHAO.3 @F   eWשYO3kPr@ vi;[_g6@ [*U@@@ {mx̹Q?IpE<$*@IDATfgDѐ[O9 *\]4 PDUϹ#  P VdﳨS^N3f/>V/~ >\o5Y@@ּ@NJk`mZkj%5/Mkl2/tRol̙K/Y۶mcVdȑ/… N:֩S';AaL(q5ؤI{ڵkg̮ [`]p馛fs1b6|m: k޼> "ⲙ6mw}{,/ΗQF٠Aln5뮳]v٥8{_Qy'g   P\*ydO * ˛AW^y=c;СCmv1g֭͞=;}Yne3g,ѣGg^v] ?O?m]vuz۰aÂuS좋.r|I1cէ,}馛]vβW^n Xw)s%@@@(SN^{駟n>+I* SSy.ӼC9v}wM#%~|y䑦7)J->-i۪դ7֩իW<̬&m/:믿v?p;sݢڵk\2lϯ1fuZWX_|LaO@2}MYPzSٳMZ|>3OBڿڜ9slذazgqzڵk粥TMT4\YRj'N % ѥ+ /Əo~[N;[?\FZ8 L*C@@@@< ,[iQf WQݦ5TI?IMCӧOF>~~gWgj6Oe;>`  vrg2|S}7ma'CJK"   Pʹ;3:ujOz(kJ=z KRP'ڷ$5jpA!dh)P!s~YS:;Iu-05~-?NyPMXzc^i;U&Ɉ*@@@@ P (+JA޽:6}tLoMC|Iu]v5kE95$5Ϻt)ӱcǤn)||]1s )}z0CIo?Tm1cƸu7oV[+echpL۾S ji ؇~h?*dD*?;G@@@ uLvB3zN9{M~ufQ3*ȭ N>i_ڏoi?sǠ:tI'1㲺մpBmРAn YpU+ԹsgoqNxxA7~z{Y Uǀ   @9p 3PN)gn tMMoEĕJl#5eXMgOQܲنrJ? vm]-)-TPIoll[n%\ ď?{4C/֯_pݐHU ^^zټy=A G@@@ %iXRr:=ؐ!C(ժUuSjժEW8:OI27=veEv}%lz]NS>G+cC)2}u]_7՜6m8 C=uKaJvwZ~@@@bt=_ױwO#.m oaqƹ ,N8ᄬ׋vlҤ'S1 e14]@@@r(9\6:ZWznjeVej*\@@@Xʊѣۃk-U`̘1myջwԅJp9E@@@!}ZMYQ'On> 4$﮻JTnNd5b嫁Ǫ   Ǚ kZ5-@@@@(n@@@@ 3;A@@@ =    Qyaf'    @@@@"@ */@@@@@    @^D兙    @ O?FݯV\iW_}q6rp{;nVZ/U@@@@DcǎQFw}WKs˗K/doM2%<_~ŝӘ1cye@TyR'   [`ҥnu[[klv]J,^8W^&8@@@@(xswQj*ΪexvVz0Pر $U/    @y?lΜ9l2Vz֨Q#SvO6o<ӏZڵq}hx>-[zI>Hկ_ķQo6sLw~-ZH;^-S56mZ.CժUI&:$SF Oh"[`mV6,Hp     Pb>:{gӶny) &X~l)묻vWWCUW]_{=\={Զm[Ss9*?O>)k{ xvg"=kӦMK|6m[nM74e|6`?~|Lf'|;p&d|[N ~gKwߍbz Sp+m54@e   Tw_THDwCݺus, uQD n=zp}T+]%{dž}&fdv.EɽvD_}uy?W}''dv pB;mܹ~Vʧ=R:K漸<A@@@-㏇A(e}.`ӥ^SO=e (t}~ɓ'7|cÆ 3S[mŊnzܸqaJ_x:u;>I6$0t-e]i(B)鮻 iȐ!f{Z)HvE˔;kfH:z>5x`/5xmrz뭷Ҳ܂@Z\ #pTp=VZ(4NM}^ufm0͛,֭[GC|Ipă:Ȟxp>'e* LPj9rV'q.(OZ{v׺ZXChehi4ȭe^D>F@@@ C K)þO?%9A>8 ]mq_r)z{}6c W\Q۝wRZU|!s3'Rv!hI3 c-`RS nƮVռyO>w}מ{X4$Po;K]@ >   XsD G^ L8Ѿ+ilM7uu=PW]u[gƑ-p݇:*RZMC|lΜ9㓃)#~mۺt4uPd]չs+>zR:Ǘ>rчyPd    *2j@E\"I<4,Rr5m8x謼NX?j:7x#m*Ʈ"짝vxnyԐ!C¬*0KTFYge}uoSoմiS?T? D*?;G@@@.GglT(7e>Rzlwwy{M)5ՃݴԎ?xֺ[nŦLblv ;„vgSO=2a:fߔl:LH/8a~'p[`K.\hwM>-Wmx0vkbl@@@*):t &a9`(k|fӠAL?mqjl=#vI'*irբۍos6r0`H [Ϟ=M6nQ#cWWZM-ZT㴺gUף_|e:%  ߿ˆ˕|Q?Laݻww_} *}54~n;Gynݺ9F-*AVo3c_   Pa:Wa%DWG2!P4nkժҥK]$YaÆaşGD͚5s}}`H}x \ѣG-t~%T09)K( ihB5uuCKo?2d$hЪ$:!\m    @S駸M<=Z[ƍg e^i}/ AJߏ,6QepL     (h}u(՘R~̨iӦ5\sk`RްWܦN֭,@@([ +[׃A4oK:mƞ}ٴa}jY_ж1cƔ>2ޑ2~8<@@@0S7|;SR:urYR]t)4Xf>j&ek|rkժU^eGFTe8@@ʁQ"q @keч.    &d    F>    -@ j     @6Q    j ZmB6    l    V    @\`Y|VZe+WU+s    sK~g3gδ^zڶmCA#G/l .:uXN J2QPP`\sM4^]vVQ+ \`niVE;M0|Iw5kִ69|ͣ?]ڴi;eyeR%PD9G@@(z+ők,z/ÕW^i=6tP[{݁ϙ3ǺufgNܔԴilVsCˎ9W)#Ν)+Kxk޼5lS>ʊRӐD. 9 )?Lzoiz5~j+٤I1=+ԯ_ve}W( @@@ӧO7V(]rՎӈo9C=J|ἤ o-ܢ_5?ĝԈ*CK@e}18 FR+Wn*O*DL'_ouj֬ig'|b|"   3CC<)5k֬pmZM5Ԕ>}gi霔21j>{}Uotm~ )ʯsoH;f ni@@@ jwt?uԴ`|}7x# JEFFm]iiqPυ_J;L9on޺糭}4dio׮]tQ8_5…t za+i)sI*07 ̟??H"G h @@@ ~TӔ P^p׽{w7jEo.lv kܓO>kTr8ORSK.njw1[l.((" "Tqq7uY¢#BHft[nUDS9: =UVݶm9yhN)zڵ+"oOet1ZSjʕ‚cPp\(T߿,[\kf~tM ym޾}}ּeue^>1@@Z?,߼y9pUˡCD};G::H3\YĘEh`}z0jΧ{ܠS4U-rou ͛g4GU0EI` (3ϸrkҤ<旗'/ MHgg"8 ރLArL@@)٣%zgt˭*BZn@@}"5 NYiX;hO_St>B:~<\+V(|۳g]T),Znr=H߾}}ng  @ ʓ&,8\#C]FyTt_Wy饗܂PN;V ๛i;zhP9d@@@@'@zzIwh|xjmUJSP  ( 9 PH yqZ* /\y6Єpkf@@@Xyy L    $    |a         |a         |a         |a     a    6N?GP"`\'    P*@@Kh5d٥vQOܲP' S̙3eҥvPڵ\{ұcG*{qI婧::.\,   y'pyW^zL={yʬYJ*^ueJJ|7)=X^'@    \|Iy*T zԭ[Wdɒ%Gǎnݺ… t>KQo@@@8&O,\sڣG9r 2LKMMzKz!M~-ԩSG,X aaa˯,l!U   @NGߖv΅RJ믿nrD:tH}]qN:%/<<ܴ?CN>m\˗79w}uʹ|1 c](g9!   @Xz8pc߾}m۶<%JȽk4h@̲~4UfMy7rk3DG8h~Aׯ/]t1:HzL^*g7o69rĹɬ4CݦIյ?f]jY4ߔ&x}%HVe%96̔*iÞ3*@2p@xV{妛n JY%##C4ؤEGyܶ駢/ϢaÆ&Ho߾fu *BY'xB֬Y#׿uݻҜWʄ dܹ&OsoIuq0,3"0@(`Փ}J%:wbJj%wזѷ\ ֊5bIXVKI9@ p4]ʕ+w^SA(IYuvk1cʕ+KQYEG3Y:^˸q ZxlݺU̙#̙35kx@ ҄o5Iuk|nY>4o\ZhUnbǭBˆBr#8 @@vCW>=Ұz uW6 c2wٿ'p:dd϶Մz@".ǏGɳh;vxVuMfS4?w_bw`ҪUh@Q?LcwyGzi}f>dQꫯˀL(ۧO&ܮr=9:+ M@@Ф}R侉z+Rlsnh`t[/駳dkv?ւNGA\B#TXk@IPU4HӺuk_ϻ^Gd:g@4[ѤVUy&Mdz2oH\rV]LV"s=4=g_ Y+lf/W@Ta;  S3kh&?[71SNdȔ9YJY$"yއ6mIv, I-[|FzXS.b/nOqӜJʥkРYc4CϢn< ovJGD6m\ eQA@]e醳9B]CJ#gdTCyǹ?1o_r,s#1[Q@ (ʕ+'Ço߾O? 6lg!RreS#_~:t V˖-kw5eʔ4麕iӦҼys>13Hv ygL;9e9DR@@o/'}2dT\׵~k~tՔR~Wz?JIn|b1kkjEv_s_(}yGL;Sڵkg.NGxIy|YF~#7pt]#ͭ#G?0N*GS *Oz$ի_~|ח:~A dk>Ik׮msi,<n2 }\Q\WKSf7z&0#ԇ{>19{QXiݰe%X@\}ZkfF:͛7\ yѕ*U7xCիg舨[oUf̘a{=W~={iݺukMEG<ӌ҄V0ι>9rV2#s  ENw B}7JNZl+6BYgZ֨(w@g)t:"Wѣ 0i BYmIq:-w~h?iO׭=Me[ڮԝdɒ&uw;Ͳu}* >kߏ>̙3)|:R[VC\7]ώsB@UՐe Nbw#|+Gѷ\&:Jw?%˘7H夌+ُr8?sҼQ:eb>|΋p.7q;=jxvŞL:x;**J@Kn߿WsGYvMB篡=wͅ#|r zz@@ @UJ˲UKGثNdXf$jӨ B鎙d2T|KQGXUZQs ms0^`^/W  غ'IxZu+S9GI*G!Wm9&; ҮQQ=4UVb- A.@ *?\> S Ba# 嶫+J+4GeGgՇe#7ǕU DGdͲ6Wo״  @(>  @9{U;V{)yOU.?d6_٠&   05/7!#   E@_@@@@ 7Q @@@@/H'     M    ~ F:A@@@S,O 33S222#,h G@@@ 8qBv*{/B6lgΜ~zy$66k; |_JJJiwK޽|^ɭR?,[4-UtEz%cʕ+壏>2\D ]\wuRNYYYҭ[7ٺuҩS'gB/5LQQQ " y"jHDI煠Ӥ- Yp -缠GO>)7@IDAT\y h h)?8޽{O>r۴'o6_}1f i|\X>| ypرҿM宻:u`+L; =x 0@4h 'OvkQYA@@@ X6mdPz> ԧ~jPSb9iiinA( X=&Z4wY>{l C=$'N5k};&u>eԩvJiҤInץAKuվ}{iڴ=vw:|\Çs2|}饗YfrUWI&M|?n   @_~\M=^z9,55FcgŌ3PZ~In&3jŒ`vIonɓfӠѼy={ܹsk1]YF4[9zhD3O{2k,{n- 5l|;w(wy频P-tرY_4r,    *eꫯs=9)))I7n,ӦM˕HZ^{3]NҥD}չCv=T^GuС$Z_h7xg%&&޿~cuݻmpWQX:Ec TDy_vr-fQRᲊelVL"w@@@F@GzU&uq}zwcm4 UAs=?pEX:ʳhZ˗tܹmDGG{նm[]EB9 d}INNvne:?_(nG;5n;;vˣ    l:5nΜ9u[XXpCDD /GH:r䈩UܳYipQXu:?ݤSty:R|5JGyRJ&PW6ֱZu>M/ #r?MVԢY5zE{fyժU   ΰ3:tz7x#"_E;-[4.\cGg-YS4ҹ& |r뭷+b [ү_?2m[cY4APoLkԨY: 4ԩSlY`yw&IӬɵʞ={̢@@@`lZ4Kn+fΜ֝my]'o&ݻ./kA[-Z3ϹbFiݲer=sߢL Cv`?'\ԢtUN>m?6o@@@( 'K-\BVy inkŋ[M]G1S&M$?Stʞ&"?W 73ܦרy۷ORRR<:(n97NLrƍ2dQN)@@@A@G'Y#*T %Ke׮][Ǝk k 9q6eM|j^Ŋeԩv5ktڝѲaÆT\9oe7. |*7O㳞guUVdZ4efw\ Ƶ5Krʦͻ:x]s/Ws\QL[qJLL4 Ε[!   ֈ$ݟפ?4>^G_T8}583Py|M~' ^isoE(ב[p+PxڿNsb2+A@E~4Ϛh&&C   F@GhHB[l ̔.]O}WO5j*UʭO} 5H/wm%!!TkIDvY^GBiiڴ9GX5תU˴&e*C:ƍ;7ز+ty縱۶m7:m4Jedd=tj>}?6FBM   /`orJ3HsCm߾PSO>FVoF hpvl^={ |ʭ@-z}[s *i"t-: Z9鈫%K*h:2#|U͘׿V վ}{!չ\̚u*@@@`kďy6oޜct\~>AOR5j$2zh3N?cnOA%;w6js3e˖fVN{W$22Rnv>k,ӧ~緊4R4q qWZe{NS+ iiӦh`(qx'5*^III/(7~xv@@@dAy:$/v u{]uQzzk&V:7˻k:rj@@@ ~ֽ{wsBot:|EEyꩧ^ˑ93Ij@5o?+Y6Rɳ? *Yg=&&sY7oyCͽ 3xarMJrw… O>1S<,۷okך ܹs/ z6e@SVCiy} 9/7S@ ֵkWJ-n:=&޷oy*T :ζLNN6Or{=/?=O ibs%l?}6 uט1cwLqSCeѢE^Qeʔ}5k)    ֭y[o%HU^̮>ѧy>9gؠ Ϸ,[4y05F$L8~|T!   K@K?ᅲ-[E@aiAQ>/*Q>nf饗c:ořnT#   :*zJݳ>kS- _dEy UVqo9"|٪, 4'| 37؛j@@@O@GE1BtڴiS!u4ԸqLɓ'39)қHVir?c /[ݫ48չsgiԨlR<3軷f @IoJOy' p1$+(E8h ٺuy>%Y֯_/82 @@EqSz!izv풍7ҥKe…s\za3    (/ZreYj 85mn)5k4=zٻwԨQn    =̚*t.VgJ,)u֕,!    r8%111*7xwv+K    *@ ʇ؂ СCfС 6L,Yb59@@@@[i٢/-ҵkW+aÆRZ5)]ިF(=['HVV(V,D,=(3/nb톪RJ 3ǑS?;  @ qc#͚5kJRR̝;<1jiRV`JO.W\qՄw@h\4V⼯#vNd*gI1[[XF@@ `DѮ/:I_^eǎrJYb|n{>}m@8t!%%54 {zUPiYf1Q)Zʖ~E~ }r"  P$D]m}Hڿl۶M/_.5jԸh PT&L!_.?%ewon&Q>5%/l{sDi\#F\j>D  pI.ODUTɼڴis =+ @aش3)x_];ud<.:*T-eo}BiU>~p\.J֊U[@@@ Pxj^Y @ 4ygMS얝qkU*.%CͲNKp|gȎ}'Mmy   rM "0{%r~mz]ltm_);@v\KapXA@ZQA{p@Kye9{EkmgG7 P)-   `\; E r冊/nPʳ$IʵO^'  A.@ *?\> toou DYvUe@@f`ܮ}2}tȐKڵ^zRbEKEn~lGQ .&LjU)a.MG<}am_kD'*"}!S^{@@ .ZHcHͥM6ҬY3S-[V @ @:^'C՜pMz!Wq6#U!537koT"  DNG :WYrY>SiҤe@h׸\q$nqBNGE R71:Z}u`GC%  $pϵtUx-N#Ȗ-[D}72i$[$111QN< @(U"{4 mJyq{\Oۺ;l {zUqk{K R>.Ȫ5w   ((/w522ڳgnк5kW=^G]vڵkeRB/Q (QR1n!veԯcFPiTZ<>uѥ~uE; W@k4k7@@XQ>nmժU 25jkRJ6l(چJ@unҒ%붟{Ӈ*#jǚ}6.-?<,[?RR3evX#  Lr{CBB䮻SOɁ @ 2]y. ~awao{OʀgBC[@@#|ܥx޽|gҩS'q>ZS  \N]Gt:Rrq"LWEk\p_  @Q 4%55U:PիK\\\ S S`e.g   g}i ]ʕW^irDUVMJ.}@@@.Q>w>}E~zYh̙3GN:e4A)+0Mfw?X~sС\{&PO>D.]*YYYRLiӦtu    3⟮n/T:vy&*U>,۷o}ŋoqkK֭X:ꫯz<""BL"*Tp۾b =z[R\9w0VC_NA{|΃s `<5uCWi޼ԪUK4EB9DVG4kLnv,Y"Çwn   Ey:K4p`rAlRի'+V(FDv}]uUy+pIymɓ'^zɃ>(֭{ʆ QFN` 7`O<$%%q/ ?@@@#\\1b4mT.27Ȼ+t$&׀ĉek.pۇu2l0;:rРAhx:}(*-z V}F):5Ob;   `DSvw۪U+뮻 ,\_ҠAy>g=̙3M&0{Æ em'בNǏ7Muds%JHeO?|a%Kt6a@@@ @ bڵe̘1f>-OG>mܸ}ycJ\\]ݶm[iӦ,ZȌ=:   \S|8pL Bi%K&okYfQ|ҤI4< TlmԀuEGB=Zc    ?FDyx'OL8Qw.|<#v5̪=tӄ Ln'$VlY`7lSx'6    #|G(O$ NUTIӆu@@@@ rbIxkNnP:%OG>33N7Ҹqc>@@@@\ORBٷoIJ>|pS :tJׯ/*U%Jm@@@@.@ ʻ 4=ZVXaQf3fUiFN5h@@@@!@ ʁ\LLL:xlݺ<oŲtRgs3b=    (ׇ`۶mrJ߾}ͻ>YBCC|>IOO7#6m$˗/O>DJ*܍e@@@@(ƩS$55U}] D͜9S_/M6uJŊ%,,,""BVj^W_}3"    (&VyJOY~jJڷooSjՒx 6    C@ FA}f]xx'ir-,III/贽;'KM#   89vQsD5nX~i;˚5kdΜ92a3MOS΢͛'O=h(     wFD\222LӧСC%**i{J_/JR۷ڵkE7ߘh*{g@@@@lQ6EBb+SK~:$e˖U@@@@K@%q 'JsJQ@@@@| >lA@@@@DyP<]XE@@@89N:%ݺu.]H-^zRbE,"   \(};c^֦ HΝMr:u꘤aaY>#   #@4<֯_/r;J˖-yRZ5g@@@@_7e޼ycG~Ae}rތ3I&V    Q ]ҥKKbb\}2fIMM={ڵkeʕpBٹs۞V     YQg-|.EGGKZ3=*v2$9sh;     wQ]r-UaÆү_?ys݇    @0  拷=##Z,{@@@@ 0"eYNodfx @@@@ 'լYӼ,@@@@+`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    Q>`F@@@(z    0T#P222d2|9vDGGK gϞRre~QOdҥ%eʔ6mHn$22~l@@@@K u1BN7o{#Z _xj_9,#?=ZOn'0߱c|t{ff}@@@'(> #'O? 0\޽BqPN", +J@/wF(|΋= @#)d~y+WNͶ 6җUN8a-޽۬k)GA@@@ GCIO?4m%11Q+&#JLZ`t dҥNĸmg@@@#(8K> TRɓvGPrcrQrw*XcJBp9sF$   S~r5LGԩ-/c{.L@T+aW*    @` 0"*'W@ nXZ^@ґL}W&~>jś4:Rj_I\p@@@ B 4*%\ly-uUT3VʗbQ][:6-(   A Լ \"y)p<5CROeN=SP2ԨX>N2_u@@@S@Tpw<hTx_=aay:    +@ *x=W_,NDg՜P//q @@@ *3G\٠[*3<;m|Bs   @ *{ P4'㷟 r"CQ6L-#  _@Mһ-n_sF[KxX]g-t?)K])   '@ *9W_Z+_\l[@TrQM'u g2Nߓc})YǎH1ըX=O!g>~;@x!U N+Ys'3}nwnr噢 @H۸ZvJ;vbiɑJܶ@Q]T PD;#Pir~;}O'!W3enPUw˩'c%DT&zt@^ kaG@,! % d>R~Գ"!Shc;]3ێL]b;)VGOT#P|{!\䳀Ij̙arC˔=n|Ώo?|-CW) VotD5n"iÇ̙, Q~t|R@!@ *  ڲ^v d}y4^|)ڛX@O{A6C$,rS?,n@IDAT "q8I@ 8+A:~5- K(+q̴rM}f19Z2 ?tIa%V|ڰl OLBzkFVw5 @  %!@ ti栱Ww OᕹN5]+GR$u)Ѻs GC N]!%'̕ӻ#IpK\ws@  C|. -_ƵOA)$_'`"-3r䣩vJ'ac`iK7@T@T9@DV>A"kזbţs^rŲ]_p(yWN$sq՝\IdfKXw|.@ ?W%"QV-3YYfkd>ZQ@(n}JJJП2{uA-> @# Jw S ?^|*jҹeB 4>A*?}-?-Y@ .wG(l |@ `izod'2/z^'` .̣rrOrj:P555l;n*QYX|;y!SrM P&կd?a%jى 8.B5Hơd))5殑p2O$Z^+Av(| @TQzc,(8duWJsaKs~q/S\kZixb5OV)9:){Ytڇ@]$n' 8^r% P/Zv욎/uex^!8_Np}Q/tσwȱq${8gy:{5>뽞;ϳ@ K2i@͜~nf_7X!?mЊ/eGuD7;`@2 Uf "Nu3ܯ7E˗Zp٦2f )XZxRB8*ε& AeoMcw޴}ԪE D%@L/-&XsL!z)Qe@ W˖jW}T`  y%@FT^]NN[Zfnw)r<@@@ dDEQi@@@@ zwͨ1    IQlT@@@cDEQc@@@u?5q[xc݊Yn[}\;8Ws+܊_~r }-DmV@TiX@@@7t /*T *'\tR3^{#\p?$no@l蚗   \JznG^slԴpe0m Vvi*Z-W8g[9Sd{@@@ \z 3֬E\A~ڸwu3:~ FGʈ  Q@TF"V@@@#W\^L*լ6}ճGnu+2ƖZWtsWcǝeL Fm@@@ (R-J-b]6W.yV-c[÷ƘڸIqyV@TiX@@ ]Ygt=0em+bV<;}њV[3 @G&d#6A@@X.s{+Qmm\,/R۱" Qʱ   q6 Y}φ#@DE*QG@@@ V6Ϲ *% /@ *ރw   z#P|yp6:f(+Qe%~@@@ .cQ|- H^ۿ3/d. P:Qbm@@@ o* 8JՒ?J&:E+dP3@J!@ X  __~VT^CW}N-WϋeF-2rf  U-E@@H`ܟ_3gTPwfϸ\3V-^}:5n9o@ *#  y$hsjst=R]\շK>V̚GgΩ .1@@@*#q۴s5wW־m]?ܪE_n77 @6dDe6   @DVZtdݟ^r.:ŞYGO)@R U*.VF@@%ǖs59sKLzD۳ PQ%Qb@@@`=X 7!r D+y @Lo2ke 3*NeI о˝" sQ9'eY`nԨQnҤI&lw_wᇻիWS7@@@"/@ *()S\޽Vݴi4h7    ;;KT կߚ~.sM6^|K\a'TSj   DV^:*^~)ؤr9縣>Or!nĉnܹw쳏_Ə`l  DKh]/j+ /lܸܹs׫W/WZ5?_=(X   N@T,SPӟk ʕ+մVZnN-Z7    Qqd/X`ҥAZlLۄZn*jʕW@@@ȡb)0o޼bGA@@@u#@ jݸsrP<ukҤI9C!   @X5KUN`z]Nݱps_s}]R5v}GQ KզL=;$@FT&!G^jժ ?@@@QQrԻ 6 ]|y0.'e]sl@@@u%@FԺ&L(+/ɸٳgGJq8A@@@ Grn*ƈRIw)VUVI&7vk.3@@@@`D!{ 4pZ|ܜ9sjݿ7n۷VԑGYqO!   qX9P}2 0)=1r9 @@@@`D[GH@'|=CqV&ԥ^>A@@@ ʭ'{%Kfm}   e+@ l};     VNS@@@@(QA@@@@D@@@@E@T0s@@@@Q@@@@r U.@@@@@m{B_Ah]VB+ʕe%@/+Y[QhJPȅ@\} ?~]AAAݮ\pnݺq.\F|M?4h9׾}{WZuF8p{ի[oյm۶غ >]zn/wݺu+3(~x 7gWNV[;v!nh)iXPJƿK矻U&=}flmr> &ց@i۸8k,c?ׯ_ߵk~R9upa9$@A抯˜ @Itq9q >*SNu{l>}{`Y,Ϗuւymuc~]-u 3@i-jժ~v 2=  &*@m</k|tl>?7 ^S 62(XˬrvLVh&+QyZO@]r`Z_*kСCz;ԩ1ջwo??@ 6,gV6\j_kgykܸ;3.NcA{Uf mx@6m\q o̷W;>U9.JY dgWs|p\w1bRom۫ƥ@A 䙀VF2O?{֊GuTqu6`צM~?u?( G4u @־.web{x㍃.SL6^X 6ә;w@:a K 6>{ztP0m"¾HPR@6nB"@ ʷ+ aXʩ4FC==Psf-w˖- 'yQi(ȤZjWIqT4o,qoc]Cu]7~&?(ڷԫWլY3֖ź"9xR&fV@l۸Ne9;e_N:$;c>Nj0m ѕXULbx@ _D˕<eO7;f9y'I=ztpǚfX)Xdv\O;묳ZqccJiӦ9eCp I,J.75~bq|rcbFȶЩr-z?i$wǻ]4q)PS 6njoXŪeZ/$h&+| x[iuz̮Y^)Ы(XԤI`Lz7di?}*^~=/vuGQFv~3!=1-=3Iӊ Ƴfu%Mɓ*\NO2l0)k7 ^S 6~oϞ=aޟ~{GmJ(@<X3oPw<+ wEqk۶{ 7W>Էo_ؓ|K$'" dƕ hI: l7Yxov/ T (Glxf\ƪT&v֭l[o8qJSa>DY vsU IĢ1ڷoD M0}SO;9r}֔Ӏn2S?pA=z8(T@s1Wg$b *P6n]j ?M{= /.6x1fP 6==/]uhtX @׼|y&,ST7SN9%8[ ^1T^{57 h,K+ݬʝwޙtL`+no;g oѢz#m<7(mSRyl>KźȦK0}viqSWߨQ|WvB$rtͫW! hpf=5FQni!c,^8جY|uӷ銞Pv5UJ'z>vy̙3]T  4mԏupBߎFuS8$F7 ^+@iڸڽgVVvT'J?3;㙄X Ҵq;hꪫ|z8^gO˳_i"G +F}c J ?=NY7+zP7iѷ*)(X+-[L/{toN;jZV(F@mIYtp_Dߒ[Iޞy6F/0klڸ*}]6m|HsPWl+l9&kYd?ԩSg#WJ-ۨQ#?W_zq٤ Pf''٘Oꞡ,}oF}cl<=MI6lx͝DS4iX|gv'i<랖9ځe'MW})Jݝ2unf[8؅Oc:3t ɏ;0oH#L??UuwǎyJ7 ^ MW_ٰcƌ;E}[SN|`>CL@6m\(aE_v 8MO=DU@TTF`=MҥK݊+12ͤ|sOKt1f̘3Zj^L>~Rի;찤`&H#vrJrd] 6 }Qzdx:@mQ4ޥTh㙄XQ E]@@@@ 0Xy/UG@@@$@ *JW"    G@@@@( բ    @DEQu@@@@ Jt+    aQxT@@@(]-   DX@T/UG@@@$@ *JW"    G@@@@( բ    @DEQu@@@@ Jt+    aQxT@@@(]-   DX@T/UG@@@$@ *JW"    G@@@@( բ    @DEQu@@@@ Jt+    aQxT@@@(]-   DX@T/UG@@@$@ *JW"    G@@@@( բ    @DEQu@@@@ Jt+    aQxT@@@(]-   DX@T/UG@@@$@ *JW"    G@@@@( բ    @DEQu@@@@ Jt+    aQxT@@@(]-   DX@T/UG@@@$@ *JW"    G@@@@( բ "|rWXXX. D믿\QQQx6  @9N@%Kߍ;-Zȵh]xnwLXs|]znĉn7^|4h{G#NnذavĬY֯Zjnݺnwv{{Jr~mvWX˹\,+Wt}u;c\-K]Sm٫W/׬Y;;w|/v[ouY @ +y!빀 ڵss $toѣGLVBXw}w\JQ&SW^y%d3ԩ~r;du&M9{{UqW\}{z)C9L'2kƌ_yg@U@T^Y X (eu?~|@n`O?tv';+ܨQuM7UVʕ+'}G2e~ԯ_?xo?{g[e]uUnsz~I'y."*fkٲe0]߭mYCL>ݯ-2 @ +y!e܈#E]tQ֋1o;s@(.rZ]{+X`em]~C\8kU.tsKx<\AgVv[++iwlkڴiuf~>vwʹN8~wu6ʴ:@[rb 냀. (HYP* +47ʞJ… դIWjդ[~7>N7`__QrVFp? ;2h`uі.]#إ V*DXڬQF)βnmsO:Riܸm*egi^es _@.{"M74eqTMUdY~~؏~6|shU/)Xi붍n&=X>Ce=eao})KOʦ>yE@" W!d%`7˺Ս&UA+={ۇz5lU77pC0FMx.t9rSTƸ}Ë 0{ѷ,R`U{ǏdU;õm6k BQ4E9-S >|vjW݃6az-RKEcYɦ裏.>{|G8ҭ[7\M`׮]s?ppb ‚&rq1ԥUmȊڎ5w]wƴڳ*j[z24? :koZWᢌ&wړHVTfVT eu~ۦmy(kY-XYÙrZ_x S햸O# UQQr@ ƲѠ*zrgqͭ2*P+!X\52lذ DO X摲o'u 1ve?o`Lڂ/^7 \ aOve;)8 (V,`j2SLE7 J)IE}`_jJѶ6Xrr[],gO.;y>ФutN% joE]@2Tt\:uaN2s˿I#׫<[ JYWMeefE\XVZMEA( vm$5V4+B\Ufm__^jΖ M[WpKI?}>A[hE,^Y;c0vyh=u{}V n`Yb@  t3@+駟{tngr>q4 (Hg*H,h:Vb n4U4ZqKRƒGd;1c\Q֊ObX4Xz٢2a„ ٶaWs zV[-WnÁk&ؗm@`WQ&PQI:eh# 0)>يDQQKREE9_y8.~Ak{йY|$+V栊\mૂyE`?>^wp !v1ì}r]{gO˶?Ӷ[|l3gEA_ ټpVֿ닍f.-qG@ UZq @:8+{AEI[E)GeN7Vt e()FӫY#]l]ł4mΌusӍƕр?u% ghn~cP'ώW:i !4PbQ͂l6>,%SLRvUZ|?77\Q zR0DT*z+R8˺)X֭[NAV@ME'')f] V?m}>(}*ڇT? }&Zt{Hd{[% Wb\\g7$uӸE0an|mO,8k%Uf6~yR~5OEdɂWٴuWL&U?;'/SO83h3ߊ=e" u2~? @F ^z>}of5_ORQO* (umc ˞кI,:Fx eeY$뮓m{ݐ[ JYGV4Nrc&ڗ)C2ZY^[pUA>=OźiZ GEY["7-ZՅn OΧ9r?ek)kDA a׮]S߬+briq[+GjLԽOId\\9X7R3ѫ) ʪ$fY_^?GmU@'OV۰~,pk@z6Hz0~oR˂ٴu XoOJ, ݩ(`,(F,+Ђq:Vi[m'8@@@ "p" >TpʈOL\vSd%cŲt1_l^սKc5(e7cXwRG is݈[4:ZfM뽊2z9) 2,kHiY-YI8H1{lF᳓n-ী]e%fC)Hͥ O8;gرV-}+dEA [Js)8sl[]={ی^ J]EmeCUl,&Fz`eٓ$]"ame8j?6kXv~88~)vٴu0)n{}>[/U&`nXVieuW'L6jɂWW@.@ *W#и@~|~=K]E b')0(c2(t>~x`ͨQ1k6IJdN~£~ j9S׾t*euELuʹ|R0O]ԕQcKYƌ_]4%9mWuҠ:2PIhuӱL W`Q$OFx, d!q,تUhTZ" @wQy@(k'js'4mv1{B@RPGbIDAT0FT f#   Vydo    )Da6    @nD֓!         [O    B@T f#   V@Tn=    @ Q)`    [Qdo    )Da6    @nD֓!         [O    B@T f#   V@Tn=    @ Q)`    [Qdo    )Da6    @nD֓!         [O    B@T f#   V@Tn=    @ Q)`    [Qdo    )Da6    @nD֓!         [O    B@T f#   V@Tn=    @ Q)`    [Qdo    )Da6    @nD֓!        vS~IENDB`channels-4.0.0/loadtesting/2016-09-06/channels-throughput.PNG000066400000000000000000000602331432260166700232470ustar00rootroot00000000000000PNG  IHDR)&sRGBgAMA a pHYsod`0IDATx^ e~TSuNTʩJUNIVԘI cP48 "7 2BDF #FT˄#0AP@@)7A.m޽z^?OUkZW νfp(L _œZMcT+L ߷Ϲݝ֮\ W!+@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ƍ uzꩧE]uUYlwv:tWm}'NpWvwۻwoٳs=ܯ&Mn/-PׁÇW=sg/pfr>l3'|wuڵ!߳ɓl 29r2צ^miq&.КȌ@~fFAϭe8]O?tZ37Q9O/*l:ȴ׮́<|̞=r͙sg ^۸l76mTkSl8zhMrEで_tV*;v?~2W_}}[rVZUSNgzjb wWkj@~6@޶m[W~m?ٳ_ TT;9#.YPgܹ>(JݻwwCu'=zŋkFr}GH`;N:ROk8z?҇~?__S6>~͙3gk nվ X)-'?ˤ9ZWZ>%m\T[6IZzMkh)p)x#W$}#h|lGש7o޽˾fZ\5j]D?+3TP{ο]cFi}D_~f_]d}!L^ EIגZ0\w ?ZOsh=Dm~{D=~h]W㲫͸s>m|M̺^@0/`}V!Dc_nٲevK::=CS_rҥ۱cGGA=q\Ze5&=I z+GS4D5ouuuСf hT[ИuD1t]Z>ƀcem5鱣IMroz>Z{7}? 'ON|mJOt~B5G?II1^Gڢ%}ä0駟EAXMi\UEXǎ~^) U/R5kTn5?GKXd T{PcV竮oCuN2y#."ED~z)tKҥK}%УTx۷hl _ 47z#F$貍9?g]>aAEӯ [n^.xu?I5$Mj|zz}ZzZ7|ǏUcm6_ˉ':&Qz=SϭeFQO}:=$,˺wCz~L;vt7o0t_=F><ꫯzmӄ<$>̙3ϛ7_'K_փև3eK$KXMa}ۅZdye?A |-Wc]/LAxo5.Z^6Y?g="?,֛;-uhmH==v 뢏@N_h8_^x[vmrVPW0 ӯx)K7%/A6^y7|֐]*}tl .°xQ#23 $^-ePW W0ׯ_yu?N#LDN j7I$F#:%u-}|M_gyv)FB x'~0Ֆ)\}| orԥZ —iы6fr:=RE{G! Ce:è!.c9>::@EoF{눂oouCykԓs=~uvp[3y!@ק4&\Xϯ@(Huuyt?]1z76^q9RD'k@tPg]P=nBqҺr ϡ){[Xc=vպToZ}j ).<~ZJ.3N]؟@o퉟A ʡNEG;yY8m.cE p[-¨փg&m~}CemP&Zv-w~~1("9@ Z_AALjD_՗GBMBc/2}0))J4&Map̚ttp ?I1줥LQm4iذa,GY6ISXZwZZpTjG=Ѥ גnu]%ɺ% BvLz j%|㟧- [I4ld}t{ #6~e}q!<6^ FG#,=פl_}uP}}{Pr%ĿlxԘ9u ` F6?dJضӎCeֈƗ141qun/'xI@6:z-رc`^8A`R> zMzN?5zZL4~G0DOסyGE]j|WWG6t|W[WM/z z-M ^4k z_.\q&|>OQ[>#Yb>e":5(t_}' 3:/wP`Fb\= -O Yn3X{DZk>}~Y z~҆t|aZh3fn3kVI p6)r(:9LKpGqo]߾}}H?~{GԩSݡC_]@q `޽nr+VhvyϞ=nnժU~|nV}P8z?~|;w 6rz8)n߾ի C 梿 vp 'N:CG;{w꜀j}i< v4i>} KIWrB` Rzڵ=CK.qwӡk= -k'LkLYϼzzCMmizr }?]w?ۍ7~ӟ'O7]{_CV~_Yѱu)SC3r{hmPS[ZKnyRV5jTp뮻v>yAlڴ <@˗/oT`?iZ@n-jjKksG<)k<ݦ_|ի[xqo?tkֽ[.]xm]|nƌرc\ 4ȵi]}nٲeY(>/*ZUn9r ܯ~WdrdB i5;z{YةS'f͚5:۵k|A#5444~W9sf4Tf֭M ͛_[ ֭[W@LB=V=_IbNM=ׯ__}ܴcgBN >>/Ou-[tn=3?@LB=V=[@$KH*z.=fQV:w~m?/rB i5z" qzCMmHC G&r{hlPS['ȑ [=4rdB i5z" qzCMmHC G&r{hlPS['ȑ [=Ԗ2snڴi?u\~qXqʜt'?]r%駟nQ:А!C\E&MUvrdB i5<|3fۼys烶N@c/K6+V=uTww`t|M<ٟHu9_|_frdB i5r:K+5I י0tucǺŋQ۷owݺusk֬\S{_}tJ _ݻ׍7ΟHYl;ydmtb (+8-+<*Fir z$Ms~r$.OtmRiQ|477 .664L"n` ܖS˭P.H!P@4@SC/B+eAݟB!3@4rdRZy7Hcz,-/Fs]esV}H۠{Ya\ڨ҆BN,NکYU@4rdR@B=[Ej҆_㤤-[~B$-OtעZy YVڲi^ھ:U߹I ΋@4rdR@.I\^(R`|C7-kzt|6d\0VkcAylynȑ@LE=U߼{iʦjF?9=W380"l$Myoh%iE.&-S|meM꜉Nz/iȤ"ճV+Ɩ6^4˓e|{^Cj^My-[4e#\!#=4IGQoe+X$ G4=ptҼe6j;iȄ@n-EzջФPǰteNꁖZXKGQ>]s5okE͛wVL #=4POѢb$z"lȤ [sO_ܐkwt3?]W4drdB g h5phհ,R֬e/ ljk,Ȅ@ni5msybG<)k<ݦ^7nt{vmڴq_|1c;v옟;<ߍ7s=1Zc=zp#Gt\p{ݎ;ܐ!C\}nٲeɓ4﮻J|Z˜'92!Cco ZLStv;q :ur6mr+VpwsGucǎm fΜ9,Y.vW_/1|k׮SPW?_2@LB=V=L 1G't(~'nܹ}>644.\n]׮]ݖ-_sonq W/5ke֭۾}'y^{m)6-sȄ@n-jjKkճ5{.ȇѣGp^{+0_r[o={zvNyy5&MDǟ/mF G&r{hmPS[Z~ yo%qMWr MUVҥK0!#f5$c]oyvG_cӞ/y##=4PO{-Yϴ^z{EC!tm۶^;Xjz/^/+fӝ&~c!4ډ_l|M}-sȄ@n-jjKk}&!א"+Lk^GYAVXY.?CH'A(+ƍ;rٶmz>=_(,:"vm͞2@LB= CV䑌@LB=K8믿0!7oqrdB i5\I/a.:JΝ8pg<~Ԕ0)92!Cco B=@L8B=PO!#=4PO{-iȄ@n-jj D92!C` Rz^M6{M˵[ׯɩ=SSWS9ur />ڑ_'4ǟ&;zCMm)k=u1cC*zϢ #=4PO{-eB ϰ= }8Hڵ' njst Վ[޷o_Os纻 zCMm)c=f $ GǂgqN }wk=3ܣ>ZԜƍdC#Fp{\[,rdB i5T =3ȵS)tEȑ#}oyz̙z~?4fΝ>رP;wȑ {[=Ԗh Y \GmPkZϩ:th(oqz@ G&r{hmPS[ȣ=+NA<ϛ7!OI:sV!Tvm׮]@_rdB i5a'Oz{(Icȣzȳ\ 8p[jzzՃ@n-jj Dy(4G<Qa~~D|W_}[lYcf*+92!Cco b)ֆ <ؽ+z:ur6m]v;c׮]:ü|| -}׾}{?O7h 7sL?OЯeF G&r{hmPS[@XCY4VYZBvz'N胶)Yϋ>W<]׭[7Vs zˊ@LB=X mذaH.Ay>L@'>o<⁼Ȅ@n-jjK>a|Wq5Te׮]<)/Xp+g Gu3gtwy)ԇzCMm)c=<`&<:\uZ }9-[]~W_u'OqnƌG\*'92!Cco Rz*hӧqHʙ ӧOw˖-]ǎ/Zȍ9zCMm)k p]nɐ ȵ„ݒ@0?qD3i:th02"#=4PO{-e rNڙSjy! SPp S !9@n-jjKD$8p a%z׮]{u%K!+6l܂18zCMm)c=GY'H=ᎎZ㏣!*kN ԭ[7rJ?O8 )r{hmPS[ZO-By^E088'zCMm)k=cYDgL8wzCMm)s=uTiӦ\2PV)/92!Cco B=@L8B=PO!#=4PO{-iȄ@n-jj D92!C` B=@L8B=PO!#=4PO{-iȄ@n-jj D92!C` B=GkMvVȑ [=ԖV^ƹI뺂!'##=4PO{-ZϏug"]Ҥy(Sj#Cco j<0It:mܸ۵i]|nƌرc~wǏwy^_oͫx/_si9}!u#é*zCMmiz;&9G'4(~'nܹ}> 744.\Cy]׮]ݖ-[oe]z-?O'Nt>[fkNU<_]r{hmPS[ZC\tE[nnѾgZY=毽ѣ*+WSpVH޳g|a7tkҤI͞NU:u/_|f͚-g SB"Cco jc(9kҼ:ǐkhZ_Vw.]i Q.8qex!Woz!C۷C ow}2=4PO{-Zϴ^:{E9C9KX;T*K>)|y| vT'!7n;tρ;v=1 zr^TA?EП}{GpzSEzCMmiz/uG\?ktB*^WVpK.Y ѣ(Hnzxƣ GRe/o2,JV+ر_zjw-HzB= CV5ėYig מ*ȑ#ݷm,J.=4PO{-R=5CJ4ECV4}͕[ @TP~*105l?K{[=Ԗse/ryΝ;qѼpq᚟4C 8B=:i!uֹ~eڷ/mw _]YOO _\EI#vrN/=4PO{-eF!3! tV r[e<ta`_'ٺuσ tŲJTcȵĩA i5r:K+5f z[w54|}f5kA^reÇW=} u{=zvr`N=4PO{-eB aS=лv ȳ6~eu*GM޷o_Os纻tj {[=Ԗ2SAW#z K 1x/y裏V.5:ވ#޽{+זGv!Cco Rz* qg;/ZeuΙ3w֪VcwC:o˲sgy ©=4PO{-e Ѡuyڐchr|(N ahwڤMvuH PV<=4PO{-e g\P9%ST!Ѡlخ]&J7?[~?k E i5w&yyT;ԐarQ˖-s'NY֭ ݓO>14\E *z,=4PO{-egѣjeN,UK:1(I-+W!\;uv-Yh+1/rEEGS1ȣW]ujw^KzCMm)k=՛?S6{3uZP@SOScǎ.s˗/ܢNW=4PO{-e:?MK/u3ZxT!oذVW=4PO{-iJCqaQuw ]i߾2e?LNm߾ݍ;}ˇ.zՃ@n-jj DRY 4cܸq^+auYf5/j ח S}PM\/s n KIWzqzCMm)c=5\Xä@_e :=qb Y=:z~Yy?&>jb팩.7W=4PO{-e(0wyiG o4\COݫ&N设jjLrP!6g^=B=X 뮻^RYQזZgj @.'m^KzCMmȕFf̘/oܸ۵ipC+Эm۶Nu=c?Ai<';v#|]l5Qf2`CeEc-{[=K\tpžW^yŏfԩ۴iڵGӘkXya?@7E'{lhhh6#"4O7h 7sL?OЯeVr H\oqȊ, ZoyQzCMmc尹slPI!;_vy k֬ϕ6Ou.Zc*M זz3'Ovpbug{[=Kȑ#>a${Ee۶m>(+ڟ0z*U t zo>r-Z{[=Ԗs`J&LෆwvyZFvr{hmPS[HS@"~ӟԏK}ۄA i5@4 ?G\:ک@n-jj D/r-~h C i5z"MƐ/Xӽ;\!|n08h=r{hlPS['Ҕ*kSO=scYꔬ˗/@n-jj D'N2~MX^uC])8B=0m41}4J .hܷı_~Ë:qPکWX:w쏃mܸ_~FI::4SVnΞB0o]r{hmPS[ZOe̘1t :ɓ}겮FYGZщ}:W5Z ߺu(8~3g۷oc!~N)t ׎:UD]+^?ԩ[E+:!uzCMm)k= ˡw\ihh4W^U{ɗ.]ꮸ 4@y i=Q:u]O/۷o۷_.Br.~믿]~n=? B= :!N/:θڵ'rpPGx@!3gtwy)ԗ4 _|ɒ%@nO^p{1}c*" ›=Ԗ2S hcW )kw#K ߲eڵw^}&pƌG\*Rr;~)RnqiTnZoPS[XO>}4I9S\aznٲerruhEȑ#K}rMz'QU8#}r{8lxLr+$i[ɯ+կ Hwb~=i}i҆߱O_[r[ȣ\[2d%r0!@$+@ڙ4+a,eT@B͛7 4]r%p7]tk׮CvdJ#Sa sd1q}mgS|E٦+*-e rUGvZ;u@lLjeCkJ@Y" \z饾STk@nOQ]6Y8O[#/I*L9GsuJ\_ʋSN T߿^v5j!a%z׮]{uCC!+6l܂1䅠駟z --Xj|4S6d䶔񣬈zÓN <%<8@ݺus+WDA Edy֐MUeQ#\b!zQU|ʛj\o.'0i=E'm A>A{s`_SٕuK˭P^-0i$5ѩOp!Tm廮Ȥh|[5yx)TBkRh',,º+Z;Nj;(܆ g:OyphZy*g46>6v@CYS9ȑIyZ楨ozz5䊨 <(Jc_@U3e 6̋ HC G&E :Fҗo E{H磻u&ջ(yJQ3O-$)ײHvVXMy~ ) 9ȑIzȓpEuJ y9+ZcGk3/FoYaq!# )+(e;ĩQ2heV֦u .¡R>֓֗g ʦY(ʆUZ}@4rdR@ KWS?m,7HAi5-mx& i) 4rdR@.I?$$n-jMq..ү iȤh}w vNqy C` B=@^[lofo0SL-]+W +[=4rdR@ >8So0-`4PO{-iȤP\a<ģӯ/v i5z"ͩV@>[a~`[Y4PO{-iN%BrQ/x4jRWΓ?\azʍr%}h?_ϡ"MB=TJj+\ |k^NC6%t]~=COul1DhPO92Mä <(eҤyynh*iB6ck_\n?)љ'Z?26\*ү1~2i?+W[.u8369Ҝj]d KTn|eYz86IW5KZ.K]ãMLyKZ6mp)Me+>(ڠ Xzg࿰ȑ'@xC:iQR#OI7Lyߴ}4嵁%[^5~3&lyo&gGl B GSXX{n7bwUW\/yFz,5i|yQ&&zS^]i S^t S/Il4ՃNymh%0zӖ- 9Ҝz¢ǏGyM::te]Ɵ\ZDjJJ I,yZWS^-iYS>%:嵱eV^ HZB_Ia Hs VZ/[ogup:ҩTCVμҕ?qD,yQIQ o 7Ə,!={vHs1> n@g|v~5);(!*5U ?/֯_O@_{-[_F=zԇѣGz-{=>@gao߾53jJ_1|qS<0`^{{饗ɓ'|~O/^g4aر?okΝkiѢE~QTC GU|q t17o(4gϞ?>#_7?3[> ׳fr˗/h9+;rn~|FQ U 8TǦO^e@9h1QX2e~$@gߵѺE5rTLSi1O-k grꫯܸq|̖=a?@%ITLk֬=37nɼGvUԐpEG?Ж|f/CgkDŽ #Gnjgȑ^=rzW[812lE9:ɓN n|F@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@܅^;f__!C۴s6yK.=c/ܢGn͚5عsO~&Ll[x;v;vX>#wu eA `l=+״>CD`nĉ>}8Po1c|W88p۴iSeng8̴ V[oU#0A>`!ܸq'p_|߿۰aq^jQ[`C8=zZO$uėUC֧ 4m۶K[cj9s_%$m9Et޼yKP|rq d!]nܪUX*Ο?xn֭~<#Ox7|&e͘1-ZG5rƱ/VX?q{g̏zwpÇ׮)^j{94FT˗^lfCo poرï{ 9ѐݮڲׯe_{nϞ=U3~(9W(ЩX=_B.]_PQF+8 Spzo׮]MvT8U 鹪rpkE5xiE ӉT`]mْy W >ʄ@4A ݱ/zn'X^S” ߡ3=˖-x|*dk {cYGlҥ?}ЌSСC}=zt!5 iutTR+৭^{'l^4Ga[Aڲic"~?lSX@95A4(rmU0TkRE2ˡ6pTh ':$'rz\SU V!uW[S/w{uh WOh{K\ }Z^s4@²jhL,:tƁk'YVOWϩiիq.=lz-[aySI[NrKk"(5jDtzRPu]0z~mwW7D0/:|Cܒ%K|PNj.BBBC="a+?I\楽IY㹓:4^GBQh֑l/iK[6]x'e( 9Ʋ ?Wh\*@9c\  eD Xm0 9)k@81b4߂IENDB`channels-4.0.0/loadtesting/README.md000066400000000000000000000004111432260166700171050ustar00rootroot00000000000000Django Channels Load Testing Results Index =============== [2016-09-06 Results](2016-09-06/README.rst) --------------- **Normal Django, WSGI** - Gunicorn (19.6.0) **Django Channels, ASGI** - Redis (0.14.0) and Daphne (0.14.3) - IPC (1.1.0) and Daphne (0.14.3) channels-4.0.0/setup.cfg000066400000000000000000000002731432260166700151400ustar00rootroot00000000000000[flake8] exclude = venv/*,tox/*,docs/*,testproject/*,build/* max-line-length = 88 extend-ignore = E203, W503 [isort] profile = black [tool:pytest] testpaths = tests asyncio_mode = auto channels-4.0.0/setup.py000066400000000000000000000031231432260166700150260ustar00rootroot00000000000000from setuptools import find_packages, setup from channels import __version__ setup( name="channels", version=__version__, url="http://github.com/django/channels", author="Django Software Foundation", author_email="foundation@djangoproject.com", description="Brings async, event-driven capabilities to Django 3.2 and up.", license="BSD", packages=find_packages(exclude=["tests"]), include_package_data=True, python_requires=">=3.7", install_requires=[ "Django>=3.2", "asgiref>=3.5.0,<4", ], extras_require={ "tests": [ "pytest", "pytest-django", "pytest-asyncio", "async-timeout", "coverage~=4.5", ], "daphne": [ "daphne>=4.0.0", ] }, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Framework :: Django", "Framework :: Django :: 3", "Framework :: Django :: 3.2", "Framework :: Django :: 4", "Framework :: Django :: 4.0", "Framework :: Django :: 4.1", "Topic :: Internet :: WWW/HTTP", ], ) channels-4.0.0/tests/000077500000000000000000000000001432260166700144575ustar00rootroot00000000000000channels-4.0.0/tests/__init__.py000066400000000000000000000000001432260166700165560ustar00rootroot00000000000000channels-4.0.0/tests/conftest.py000066400000000000000000000015421432260166700166600ustar00rootroot00000000000000import pytest from django.conf import settings def pytest_configure(): settings.configure( DATABASES={"default": {"ENGINE": "django.db.backends.sqlite3"}}, INSTALLED_APPS=[ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.admin", "channels", ], SECRET_KEY="Not_a_secret_key", ) def pytest_generate_tests(metafunc): if "samesite" in metafunc.fixturenames: metafunc.parametrize("samesite", ["Strict", "None"], indirect=True) @pytest.fixture def samesite(request, settings): """Set samesite flag to strict.""" settings.SESSION_COOKIE_SAMESITE = request.param @pytest.fixture def samesite_invalid(settings): """Set samesite flag to strict.""" settings.SESSION_COOKIE_SAMESITE = "Hello" channels-4.0.0/tests/security/000077500000000000000000000000001432260166700163265ustar00rootroot00000000000000channels-4.0.0/tests/security/test_auth.py000066400000000000000000000155551432260166700207130ustar00rootroot00000000000000from importlib import import_module from unittest import mock import pytest from asgiref.sync import sync_to_async from django.conf import settings from django.contrib.auth import ( BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY, get_user_model, user_logged_in, user_logged_out, ) from django.contrib.auth.models import AnonymousUser from channels.auth import get_user, login, logout from channels.db import database_sync_to_async class CatchSignal: """ Capture (and detect) a django signal event. This should be used as a Contextmanager. :Example: with CatchSignal(user_logged_in) as handler: # do the django action here that will create the signal assert handler.called :Async Example: async with CatchSignal(user_logged_in) as handler: await ... # the django action the creates the signal assert handler.called """ def __init__(self, signal): self.handler = mock.Mock() self.signal = signal async def __aenter__(self): await sync_to_async(self.signal.connect)(self.handler) return self.handler async def __aexit__(self, exc_type, exc, tb): await sync_to_async(self.signal.disconnect)(self.handler) def __enter__(self): self.signal.connect(self.handler) return self.handler def __exit__(self, exc_type, exc_val, exc_tb): self.signal.disconnect(self.handler) @pytest.fixture def user_bob(): return get_user_model().objects.create(username="bob", email="bob@example.com") @pytest.fixture def user_bill(): return get_user_model().objects.create(username="bill", email="bill@example.com") @pytest.fixture def session(): SessionStore = import_module(settings.SESSION_ENGINE).SessionStore session = SessionStore() session.create() return session async def assert_is_logged_in(scope, user): """ Assert that the provided user is logged in to the session contained within the scope. """ assert "user" in scope assert scope["user"] == user session = scope["session"] # logged in! assert SESSION_KEY in session assert BACKEND_SESSION_KEY in session assert HASH_SESSION_KEY in session assert isinstance( await get_user(scope), await database_sync_to_async(get_user_model)() ) assert await get_user(scope) == user @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_login_no_session_in_scope(): """ Test to ensure that a `ValueError` is raised if when tying to login a user to a scope that has no session. """ msg = ( "Cannot find session in scope. You should wrap your consumer in " "SessionMiddleware." ) with pytest.raises(ValueError, match=msg): await login(scope={}, user=None) @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_login_no_user_in_scope(session): """ Test the login method to ensure it raises a `ValueError` if no user is passed and is no user in the scope. """ scope = {"session": session} with pytest.raises( ValueError, match="User must be passed as an argument or must be present in the scope.", ): await login(scope, user=None) @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_login_user_as_argument(session, user_bob): """ Test that one can login to a scope that has a session by passing the scope and user as arguments to the login function. """ scope = {"session": session} assert isinstance(await get_user(scope), AnonymousUser) # not logged in assert SESSION_KEY not in session async with CatchSignal(user_logged_in) as handler: assert not handler.called await login(scope, user=user_bob) assert handler.called await assert_is_logged_in(scope, user_bob) @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_login_user_on_scope(session, user_bob): """ Test that in the absence of a user being passed to the `login` function the function will use the user set on the scope. """ scope = {"session": session, "user": user_bob} # check that we are not logged in on the session assert isinstance(await get_user(scope), AnonymousUser) async with CatchSignal(user_logged_in) as handler: assert not handler.called await login(scope, user=None) assert handler.called await assert_is_logged_in(scope, user_bob) @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_login_change_user(session, user_bob, user_bill): """ Test logging in a second user into a scope were another user is already logged in. """ scope = {"session": session} # check that we are not logged in on the session assert isinstance(await get_user(scope), AnonymousUser) async with CatchSignal(user_logged_in) as handler: assert not handler.called await login(scope, user=user_bob) assert handler.called await assert_is_logged_in(scope, user_bob) session_key = session[SESSION_KEY] assert session_key async with CatchSignal(user_logged_in) as handler: assert not handler.called await login(scope, user=user_bill) assert handler.called await assert_is_logged_in(scope, user_bill) assert session_key != session[SESSION_KEY] @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_logout(session, user_bob): """ Test that one can logout a user from a logged in session. """ scope = {"session": session} # check that we are not logged in on the session assert isinstance(await get_user(scope), AnonymousUser) async with CatchSignal(user_logged_in) as handler: assert not handler.called await login(scope, user=user_bob) assert handler.called await assert_is_logged_in(scope, user_bob) assert SESSION_KEY in session session_key = session[SESSION_KEY] assert session_key async with CatchSignal(user_logged_out) as handler: assert not handler.called await logout(scope) assert handler.called assert isinstance(await get_user(scope), AnonymousUser) assert isinstance(scope["user"], AnonymousUser) assert SESSION_KEY not in session @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_logout_not_logged_in(session): """ Test that the `logout` function does nothing in the case were there is no user logged in. """ scope = {"session": session} # check that we are not logged in on the session assert isinstance(await get_user(scope), AnonymousUser) async with CatchSignal(user_logged_out) as handler: assert not handler.called await logout(scope) assert not handler.called assert "user" not in scope assert isinstance(await get_user(scope), AnonymousUser) channels-4.0.0/tests/security/test_websocket.py000066400000000000000000000074041432260166700217320ustar00rootroot00000000000000import pytest from channels.generic.websocket import AsyncWebsocketConsumer from channels.security.websocket import OriginValidator from channels.testing import WebsocketCommunicator @pytest.mark.asyncio async def test_origin_validator(): """ Tests that OriginValidator correctly allows/denies connections. """ # Make our test application application = OriginValidator(AsyncWebsocketConsumer(), ["allowed-domain.com"]) # Test a normal connection communicator = WebsocketCommunicator( application, "/", headers=[(b"origin", b"http://allowed-domain.com")] ) connected, _ = await communicator.connect() assert connected await communicator.disconnect() # Test a bad connection communicator = WebsocketCommunicator( application, "/", headers=[(b"origin", b"http://bad-domain.com")] ) connected, _ = await communicator.connect() assert not connected await communicator.disconnect() # Make our test application, bad pattern application = OriginValidator(AsyncWebsocketConsumer(), ["*.allowed-domain.com"]) # Test a bad connection communicator = WebsocketCommunicator( application, "/", headers=[(b"origin", b"http://allowed-domain.com")] ) connected, _ = await communicator.connect() assert not connected await communicator.disconnect() # Make our test application, good pattern application = OriginValidator(AsyncWebsocketConsumer(), [".allowed-domain.com"]) # Test a normal connection communicator = WebsocketCommunicator( application, "/", headers=[(b"origin", b"http://www.allowed-domain.com")] ) connected, _ = await communicator.connect() assert connected await communicator.disconnect() # Make our test application, with scheme://domain[:port] for http application = OriginValidator( AsyncWebsocketConsumer(), ["http://allowed-domain.com"] ) # Test a normal connection communicator = WebsocketCommunicator( application, "/", headers=[(b"origin", b"http://allowed-domain.com")] ) connected, _ = await communicator.connect() assert connected await communicator.disconnect() # Test a bad connection communicator = WebsocketCommunicator( application, "/", headers=[(b"origin", b"https://bad-domain.com:443")] ) connected, _ = await communicator.connect() assert not connected await communicator.disconnect() # Make our test application, with all hosts allowed application = OriginValidator(AsyncWebsocketConsumer(), ["*"]) # Test a connection without any headers communicator = WebsocketCommunicator(application, "/", headers=[]) connected, _ = await communicator.connect() assert connected await communicator.disconnect() # Make our test application, with no hosts allowed application = OriginValidator(AsyncWebsocketConsumer(), []) # Test a connection without any headers communicator = WebsocketCommunicator(application, "/", headers=[]) connected, _ = await communicator.connect() assert not connected await communicator.disconnect() # Test bug with subdomain and empty origin header application = OriginValidator(AsyncWebsocketConsumer(), [".allowed-domain.com"]) communicator = WebsocketCommunicator(application, "/", headers=[(b"origin", b"")]) connected, _ = await communicator.connect() assert not connected await communicator.disconnect() # Test bug with subdomain and invalid origin header application = OriginValidator(AsyncWebsocketConsumer(), [".allowed-domain.com"]) communicator = WebsocketCommunicator( application, "/", headers=[(b"origin", b"something-invalid")] ) connected, _ = await communicator.connect() assert not connected await communicator.disconnect() channels-4.0.0/tests/test_generic_http.py000066400000000000000000000100641432260166700205440ustar00rootroot00000000000000import asyncio import json import time import pytest from channels.generic.http import AsyncHttpConsumer from channels.testing import HttpCommunicator @pytest.mark.asyncio async def test_async_http_consumer(): """ Tests that AsyncHttpConsumer is implemented correctly. """ class TestConsumer(AsyncHttpConsumer): async def handle(self, body): data = json.loads(body.decode("utf-8")) await self.send_response( 200, json.dumps({"value": data["value"]}).encode("utf-8"), headers={b"Content-Type": b"application/json"}, ) app = TestConsumer() # Open a connection communicator = HttpCommunicator( app, method="POST", path="/test/", body=json.dumps({"value": 42, "anything": False}).encode("utf-8"), ) response = await communicator.get_response() assert response["body"] == b'{"value": 42}' assert response["status"] == 200 assert response["headers"] == [(b"Content-Type", b"application/json")] @pytest.mark.asyncio async def test_per_scope_consumers(): """ Tests that a distinct consumer is used per scope, with AsyncHttpConsumer as the example consumer class. """ class TestConsumer(AsyncHttpConsumer): def __init__(self): super().__init__() self.time = time.time() async def handle(self, body): body = f"{self.__class__.__name__} {id(self)} {self.time}" await self.send_response( 200, body.encode("utf-8"), headers={b"Content-Type": b"text/plain"}, ) app = TestConsumer.as_asgi() # Open a connection communicator = HttpCommunicator(app, method="GET", path="/test/") response = await communicator.get_response() assert response["status"] == 200 # And another one. communicator = HttpCommunicator(app, method="GET", path="/test2/") second_response = await communicator.get_response() assert second_response["status"] == 200 assert response["body"] != second_response["body"] @pytest.mark.asyncio async def test_async_http_consumer_future(): """ Regression test for channels accepting only coroutines. The ASGI specification states that the `receive` and `send` arguments to an ASGI application should be "awaitable callable" objects. That includes non-coroutine functions that return Futures. """ class TestConsumer(AsyncHttpConsumer): async def handle(self, body): await self.send_response( 200, b"42", headers={b"Content-Type": b"text/plain"}, ) app = TestConsumer() # Ensure the passed functions are specifically coroutines. async def coroutine_app(scope, receive, send): async def receive_coroutine(): return await asyncio.ensure_future(receive()) async def send_coroutine(*args, **kwargs): return await asyncio.ensure_future(send(*args, **kwargs)) await app(scope, receive_coroutine, send_coroutine) communicator = HttpCommunicator(coroutine_app, method="GET", path="/") response = await communicator.get_response() assert response["body"] == b"42" assert response["status"] == 200 assert response["headers"] == [(b"Content-Type", b"text/plain")] # Ensure the passed functions are "Awaitable Callables" and NOT coroutines. async def awaitable_callable_app(scope, receive, send): def receive_awaitable_callable(): return asyncio.ensure_future(receive()) def send_awaitable_callable(*args, **kwargs): return asyncio.ensure_future(send(*args, **kwargs)) await app(scope, receive_awaitable_callable, send_awaitable_callable) # Open a connection communicator = HttpCommunicator(awaitable_callable_app, method="GET", path="/") response = await communicator.get_response() assert response["body"] == b"42" assert response["status"] == 200 assert response["headers"] == [(b"Content-Type", b"text/plain")] channels-4.0.0/tests/test_generic_websocket.py000066400000000000000000000332301432260166700215530ustar00rootroot00000000000000import pytest from django.test import override_settings from channels.generic.websocket import ( AsyncJsonWebsocketConsumer, AsyncWebsocketConsumer, JsonWebsocketConsumer, WebsocketConsumer, ) from channels.layers import get_channel_layer from channels.sessions import SessionMiddlewareStack from channels.testing import WebsocketCommunicator @pytest.mark.django_db @pytest.mark.asyncio async def test_websocket_consumer(): """ Tests that WebsocketConsumer is implemented correctly. """ results = {} class TestConsumer(WebsocketConsumer): def connect(self): results["connected"] = True self.accept() def receive(self, text_data=None, bytes_data=None): results["received"] = (text_data, bytes_data) self.send(text_data=text_data, bytes_data=bytes_data) def disconnect(self, code): results["disconnected"] = code app = TestConsumer() # Test a normal connection communicator = WebsocketCommunicator(app, "/testws/") connected, _ = await communicator.connect() assert connected assert "connected" in results # Test sending text await communicator.send_to(text_data="hello") response = await communicator.receive_from() assert response == "hello" assert results["received"] == ("hello", None) # Test sending bytes await communicator.send_to(bytes_data=b"w\0\0\0") response = await communicator.receive_from() assert response == b"w\0\0\0" assert results["received"] == (None, b"w\0\0\0") # Close out await communicator.disconnect() assert "disconnected" in results @pytest.mark.django_db @pytest.mark.asyncio async def test_multiple_websocket_consumers_with_sessions(): """ Tests that multiple consumers use the correct scope when using SessionMiddleware. """ class TestConsumer(WebsocketConsumer): def connect(self): self.accept() def receive(self, text_data=None, bytes_data=None): path = self.scope["path"] self.send(text_data=path) app = SessionMiddlewareStack(TestConsumer.as_asgi()) # Create to communicators. communicator = WebsocketCommunicator(app, "/first/") second_communicator = WebsocketCommunicator(app, "/second/") connected, _ = await communicator.connect() assert connected connected, _ = await second_communicator.connect() assert connected # Test out of order await second_communicator.send_to(text_data="Echo Path") response = await second_communicator.receive_from() assert response == "/second/" await communicator.send_to(text_data="Echo Path") response = await communicator.receive_from() assert response == "/first/" # Close out await communicator.disconnect() await second_communicator.disconnect() @pytest.mark.django_db @pytest.mark.asyncio async def test_websocket_consumer_subprotocol(): """ Tests that WebsocketConsumer correctly handles subprotocols. """ class TestConsumer(WebsocketConsumer): def connect(self): assert self.scope["subprotocols"] == ["subprotocol1", "subprotocol2"] self.accept("subprotocol2") app = TestConsumer() # Test a normal connection with subprotocols communicator = WebsocketCommunicator( app, "/testws/", subprotocols=["subprotocol1", "subprotocol2"] ) connected, subprotocol = await communicator.connect() assert connected assert subprotocol == "subprotocol2" @pytest.mark.django_db @pytest.mark.asyncio async def test_websocket_consumer_groups(): """ Tests that WebsocketConsumer adds and removes channels from groups. """ results = {} class TestConsumer(WebsocketConsumer): groups = ["chat"] def receive(self, text_data=None, bytes_data=None): results["received"] = (text_data, bytes_data) self.send(text_data=text_data, bytes_data=bytes_data) app = TestConsumer() channel_layers_setting = { "default": {"BACKEND": "channels.layers.InMemoryChannelLayer"} } with override_settings(CHANNEL_LAYERS=channel_layers_setting): communicator = WebsocketCommunicator(app, "/testws/") await communicator.connect() channel_layer = get_channel_layer() # Test that the websocket channel was added to the group on connect message = {"type": "websocket.receive", "text": "hello"} await channel_layer.group_send("chat", message) response = await communicator.receive_from() assert response == "hello" assert results["received"] == ("hello", None) # Test that the websocket channel was discarded from the group on disconnect await communicator.disconnect() assert channel_layer.groups == {} @pytest.mark.asyncio async def test_async_websocket_consumer(): """ Tests that AsyncWebsocketConsumer is implemented correctly. """ results = {} class TestConsumer(AsyncWebsocketConsumer): async def connect(self): results["connected"] = True await self.accept() async def receive(self, text_data=None, bytes_data=None): results["received"] = (text_data, bytes_data) await self.send(text_data=text_data, bytes_data=bytes_data) async def disconnect(self, code): results["disconnected"] = code app = TestConsumer() # Test a normal connection communicator = WebsocketCommunicator(app, "/testws/") connected, _ = await communicator.connect() assert connected assert "connected" in results # Test sending text await communicator.send_to(text_data="hello") response = await communicator.receive_from() assert response == "hello" assert results["received"] == ("hello", None) # Test sending bytes await communicator.send_to(bytes_data=b"w\0\0\0") response = await communicator.receive_from() assert response == b"w\0\0\0" assert results["received"] == (None, b"w\0\0\0") # Close out await communicator.disconnect() assert "disconnected" in results @pytest.mark.asyncio async def test_async_websocket_consumer_subprotocol(): """ Tests that AsyncWebsocketConsumer correctly handles subprotocols. """ class TestConsumer(AsyncWebsocketConsumer): async def connect(self): assert self.scope["subprotocols"] == ["subprotocol1", "subprotocol2"] await self.accept("subprotocol2") app = TestConsumer() # Test a normal connection with subprotocols communicator = WebsocketCommunicator( app, "/testws/", subprotocols=["subprotocol1", "subprotocol2"] ) connected, subprotocol = await communicator.connect() assert connected assert subprotocol == "subprotocol2" @pytest.mark.asyncio async def test_async_websocket_consumer_groups(): """ Tests that AsyncWebsocketConsumer adds and removes channels from groups. """ results = {} class TestConsumer(AsyncWebsocketConsumer): groups = ["chat"] async def receive(self, text_data=None, bytes_data=None): results["received"] = (text_data, bytes_data) await self.send(text_data=text_data, bytes_data=bytes_data) app = TestConsumer() channel_layers_setting = { "default": {"BACKEND": "channels.layers.InMemoryChannelLayer"} } with override_settings(CHANNEL_LAYERS=channel_layers_setting): communicator = WebsocketCommunicator(app, "/testws/") await communicator.connect() channel_layer = get_channel_layer() # Test that the websocket channel was added to the group on connect message = {"type": "websocket.receive", "text": "hello"} await channel_layer.group_send("chat", message) response = await communicator.receive_from() assert response == "hello" assert results["received"] == ("hello", None) # Test that the websocket channel was discarded from the group on disconnect await communicator.disconnect() assert channel_layer.groups == {} @pytest.mark.asyncio async def test_async_websocket_consumer_specific_channel_layer(): """ Tests that AsyncWebsocketConsumer uses the specified channel layer. """ results = {} class TestConsumer(AsyncWebsocketConsumer): channel_layer_alias = "testlayer" async def receive(self, text_data=None, bytes_data=None): results["received"] = (text_data, bytes_data) await self.send(text_data=text_data, bytes_data=bytes_data) app = TestConsumer() channel_layers_setting = { "testlayer": {"BACKEND": "channels.layers.InMemoryChannelLayer"} } with override_settings(CHANNEL_LAYERS=channel_layers_setting): communicator = WebsocketCommunicator(app, "/testws/") await communicator.connect() channel_layer = get_channel_layer("testlayer") # Test that the specific channel layer is retrieved assert channel_layer is not None channel_name = list(channel_layer.channels.keys())[0] message = {"type": "websocket.receive", "text": "hello"} await channel_layer.send(channel_name, message) response = await communicator.receive_from() assert response == "hello" assert results["received"] == ("hello", None) await communicator.disconnect() @pytest.mark.django_db @pytest.mark.asyncio async def test_json_websocket_consumer(): """ Tests that JsonWebsocketConsumer is implemented correctly. """ results = {} class TestConsumer(JsonWebsocketConsumer): def connect(self): self.accept() def receive_json(self, data=None): results["received"] = data self.send_json(data) app = TestConsumer() # Open a connection communicator = WebsocketCommunicator(app, "/testws/") connected, _ = await communicator.connect() assert connected # Test sending await communicator.send_json_to({"hello": "world"}) response = await communicator.receive_json_from() assert response == {"hello": "world"} assert results["received"] == {"hello": "world"} # Test sending bytes breaks it await communicator.send_to(bytes_data=b"w\0\0\0") with pytest.raises(ValueError): await communicator.wait() @pytest.mark.asyncio async def test_async_json_websocket_consumer(): """ Tests that AsyncJsonWebsocketConsumer is implemented correctly. """ results = {} class TestConsumer(AsyncJsonWebsocketConsumer): async def connect(self): await self.accept() async def receive_json(self, data=None): results["received"] = data await self.send_json(data) app = TestConsumer() # Open a connection communicator = WebsocketCommunicator(app, "/testws/") connected, _ = await communicator.connect() assert connected # Test sending await communicator.send_json_to({"hello": "world"}) response = await communicator.receive_json_from() assert response == {"hello": "world"} assert results["received"] == {"hello": "world"} # Test sending bytes breaks it await communicator.send_to(bytes_data=b"w\0\0\0") with pytest.raises(ValueError): await communicator.wait() @pytest.mark.asyncio async def test_block_underscored_type_function_call(): """ Test that consumer prevent calling private functions as handler """ class TestConsumer(AsyncWebsocketConsumer): channel_layer_alias = "testlayer" async def _my_private_handler(self, _): await self.send(text_data="should never be called") app = TestConsumer() channel_layers_setting = { "testlayer": {"BACKEND": "channels.layers.InMemoryChannelLayer"} } with override_settings(CHANNEL_LAYERS=channel_layers_setting): communicator = WebsocketCommunicator(app, "/testws/") await communicator.connect() channel_layer = get_channel_layer("testlayer") # Test that the specific channel layer is retrieved assert channel_layer is not None channel_name = list(channel_layer.channels.keys())[0] # Should block call to private functions handler and raise ValueError message = {"type": "_my_private_handler", "text": "hello"} await channel_layer.send(channel_name, message) with pytest.raises( ValueError, match=r"Malformed type in message \(leading underscore\)" ): await communicator.receive_from() @pytest.mark.asyncio async def test_block_leading_dot_type_function_call(): """ Test that consumer prevent calling private functions as handler """ class TestConsumer(AsyncWebsocketConsumer): channel_layer_alias = "testlayer" async def _my_private_handler(self, _): await self.send(text_data="should never be called") app = TestConsumer() channel_layers_setting = { "testlayer": {"BACKEND": "channels.layers.InMemoryChannelLayer"} } with override_settings(CHANNEL_LAYERS=channel_layers_setting): communicator = WebsocketCommunicator(app, "/testws/") await communicator.connect() channel_layer = get_channel_layer("testlayer") # Test that the specific channel layer is retrieved assert channel_layer is not None channel_name = list(channel_layer.channels.keys())[0] # Should not replace dot by underscore and call private function (see # issue: #1430) message = {"type": ".my_private_handler", "text": "hello"} await channel_layer.send(channel_name, message) with pytest.raises( ValueError, match=r"Malformed type in message \(leading underscore\)" ): await communicator.receive_from() channels-4.0.0/tests/test_http.py000066400000000000000000000070411432260166700170510ustar00rootroot00000000000000import re import pytest from channels.consumer import AsyncConsumer from channels.db import database_sync_to_async from channels.sessions import SessionMiddlewareStack from channels.testing import HttpCommunicator class SimpleHttpApp(AsyncConsumer): """ Barebones HTTP ASGI app for testing. """ async def http_request(self, event): await database_sync_to_async(self.scope["session"].save)() assert self.scope["path"] == "/test/" assert self.scope["method"] == "GET" await self.send({"type": "http.response.start", "status": 200, "headers": []}) await self.send({"type": "http.response.body", "body": b"test response"}) @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_sessions(): app = SimpleHttpApp() communicator = HttpCommunicator(SessionMiddlewareStack(app), "GET", "/test/") response = await communicator.get_response() headers = response.get("headers", []) assert len(headers) == 1 name, value = headers[0] assert name == b"Set-Cookie" value = value.decode("utf-8") assert re.compile(r"sessionid=").search(value) is not None assert re.compile(r"expires=").search(value) is not None assert re.compile(r"HttpOnly").search(value) is not None assert re.compile(r"Max-Age").search(value) is not None assert re.compile(r"Path").search(value) is not None samesite = re.compile(r"SameSite=(\w+)").search(value) assert samesite is not None assert samesite.group(1) == "Lax" @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_session_samesite(samesite, settings): app = SimpleHttpApp() communicator = HttpCommunicator(SessionMiddlewareStack(app), "GET", "/test/") response = await communicator.get_response() headers = response.get("headers", []) assert len(headers) == 1 name, value = headers[0] assert name == b"Set-Cookie" value = value.decode("utf-8") samesite = re.compile(r"SameSite=(\w+)").search(value) assert samesite is not None assert samesite.group(1) == settings.SESSION_COOKIE_SAMESITE @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_session_samesite_invalid(samesite_invalid): app = SimpleHttpApp() communicator = HttpCommunicator(SessionMiddlewareStack(app), "GET", "/test/") with pytest.raises(AssertionError): await communicator.get_response() @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_muliple_sessions(): """ Create two application instances and test then out of order to verify that separate scopes are used. """ async def inner(scope, receive, send): send(scope["path"]) class SimpleHttpApp(AsyncConsumer): async def http_request(self, event): await database_sync_to_async(self.scope["session"].save)() assert self.scope["method"] == "GET" await self.send( {"type": "http.response.start", "status": 200, "headers": []} ) await self.send( {"type": "http.response.body", "body": self.scope["path"].encode()} ) app = SessionMiddlewareStack(SimpleHttpApp.as_asgi()) first_communicator = HttpCommunicator(app, "GET", "/first/") second_communicator = HttpCommunicator(app, "GET", "/second/") second_response = await second_communicator.get_response() assert second_response["body"] == b"/second/" first_response = await first_communicator.get_response() assert first_response["body"] == b"/first/" channels-4.0.0/tests/test_inmemorychannel.py000066400000000000000000000140541432260166700212640ustar00rootroot00000000000000import asyncio import async_timeout import pytest from channels.exceptions import ChannelFull from channels.layers import InMemoryChannelLayer @pytest.fixture() async def channel_layer(): """ Channel layer fixture that flushes automatically. """ channel_layer = InMemoryChannelLayer(capacity=3) yield channel_layer await channel_layer.flush() await channel_layer.close() @pytest.mark.asyncio async def test_send_receive(channel_layer): """ Makes sure we can send a message to a normal channel then receive it. """ await channel_layer.send( "test-channel-1", {"type": "test.message", "text": "Ahoy-hoy!"} ) message = await channel_layer.receive("test-channel-1") assert message["type"] == "test.message" assert message["text"] == "Ahoy-hoy!" @pytest.mark.asyncio async def test_send_capacity(channel_layer): """ Makes sure we get ChannelFull when we hit the send capacity """ await channel_layer.send("test-channel-1", {"type": "test.message"}) await channel_layer.send("test-channel-1", {"type": "test.message"}) await channel_layer.send("test-channel-1", {"type": "test.message"}) with pytest.raises(ChannelFull): await channel_layer.send("test-channel-1", {"type": "test.message"}) @pytest.mark.asyncio async def test_process_local_send_receive(channel_layer): """ Makes sure we can send a message to a process-local channel then receive it. """ channel_name = await channel_layer.new_channel() await channel_layer.send( channel_name, {"type": "test.message", "text": "Local only please"} ) message = await channel_layer.receive(channel_name) assert message["type"] == "test.message" assert message["text"] == "Local only please" @pytest.mark.asyncio async def test_multi_send_receive(channel_layer): """ Tests overlapping sends and receives, and ordering. """ channel_layer = InMemoryChannelLayer() await channel_layer.send("test-channel-3", {"type": "message.1"}) await channel_layer.send("test-channel-3", {"type": "message.2"}) await channel_layer.send("test-channel-3", {"type": "message.3"}) assert (await channel_layer.receive("test-channel-3"))["type"] == "message.1" assert (await channel_layer.receive("test-channel-3"))["type"] == "message.2" assert (await channel_layer.receive("test-channel-3"))["type"] == "message.3" @pytest.mark.asyncio async def test_groups_basic(channel_layer): """ Tests basic group operation. """ channel_layer = InMemoryChannelLayer() await channel_layer.group_add("test-group", "test-gr-chan-1") await channel_layer.group_add("test-group", "test-gr-chan-2") await channel_layer.group_add("test-group", "test-gr-chan-3") await channel_layer.group_discard("test-group", "test-gr-chan-2") await channel_layer.group_send("test-group", {"type": "message.1"}) # Make sure we get the message on the two channels that were in async with async_timeout.timeout(1): assert (await channel_layer.receive("test-gr-chan-1"))["type"] == "message.1" assert (await channel_layer.receive("test-gr-chan-3"))["type"] == "message.1" # Make sure the removed channel did not get the message with pytest.raises(asyncio.TimeoutError): async with async_timeout.timeout(1): await channel_layer.receive("test-gr-chan-2") @pytest.mark.asyncio async def test_groups_channel_full(channel_layer): """ Tests that group_send ignores ChannelFull """ channel_layer = InMemoryChannelLayer() await channel_layer.group_add("test-group", "test-gr-chan-1") await channel_layer.group_send("test-group", {"type": "message.1"}) await channel_layer.group_send("test-group", {"type": "message.1"}) await channel_layer.group_send("test-group", {"type": "message.1"}) await channel_layer.group_send("test-group", {"type": "message.1"}) await channel_layer.group_send("test-group", {"type": "message.1"}) @pytest.mark.asyncio async def test_expiry_single(): """ Tests that a message can expire. """ channel_layer = InMemoryChannelLayer(expiry=0.1) await channel_layer.send("test-channel-1", {"type": "message.1"}) assert len(channel_layer.channels) == 1 await asyncio.sleep(0.1) # Message should have expired and been dropped. with pytest.raises(asyncio.TimeoutError): async with async_timeout.timeout(0.5): await channel_layer.receive("test-channel-1") # Channel should be cleaned up. assert len(channel_layer.channels) == 0 @pytest.mark.asyncio async def test_expiry_unread(): """ Tests that a message on a channel can expire and be cleaned up even if the channel is not read from again. """ channel_layer = InMemoryChannelLayer(expiry=0.1) await channel_layer.send("test-channel-1", {"type": "message.1"}) await asyncio.sleep(0.1) await channel_layer.send("test-channel-2", {"type": "message.2"}) assert len(channel_layer.channels) == 2 assert (await channel_layer.receive("test-channel-2"))["type"] == "message.2" # Both channels should be cleaned up. assert len(channel_layer.channels) == 0 @pytest.mark.asyncio async def test_expiry_multi(): """ Tests that multiple messages can expire. """ channel_layer = InMemoryChannelLayer(expiry=0.1) await channel_layer.send("test-channel-1", {"type": "message.1"}) await channel_layer.send("test-channel-1", {"type": "message.2"}) await channel_layer.send("test-channel-1", {"type": "message.3"}) assert (await channel_layer.receive("test-channel-1"))["type"] == "message.1" await asyncio.sleep(0.1) await channel_layer.send("test-channel-1", {"type": "message.4"}) assert (await channel_layer.receive("test-channel-1"))["type"] == "message.4" # The second and third message should have expired and been dropped. with pytest.raises(asyncio.TimeoutError): async with async_timeout.timeout(0.5): await channel_layer.receive("test-channel-1") # Channel should be cleaned up. assert len(channel_layer.channels) == 0 channels-4.0.0/tests/test_layers.py000066400000000000000000000047761432260166700174050ustar00rootroot00000000000000import unittest import pytest from django.test import override_settings from channels import DEFAULT_CHANNEL_LAYER from channels.exceptions import InvalidChannelLayerError from channels.layers import ( BaseChannelLayer, InMemoryChannelLayer, channel_layers, get_channel_layer, ) class TestChannelLayerManager(unittest.TestCase): @override_settings( CHANNEL_LAYERS={"default": {"BACKEND": "channels.layers.InMemoryChannelLayer"}} ) def test_config_error(self): """ If channel layer doesn't specify TEST_CONFIG, `make_test_backend` should result into error. """ with self.assertRaises(InvalidChannelLayerError): channel_layers.make_test_backend(DEFAULT_CHANNEL_LAYER) @override_settings( CHANNEL_LAYERS={ "default": { "BACKEND": "channels.layers.InMemoryChannelLayer", "TEST_CONFIG": {"expiry": 100500}, } } ) def test_config_instance(self): """ If channel layer provides TEST_CONFIG, `make_test_backend` should return channel layer instance appropriate for testing. """ layer = channel_layers.make_test_backend(DEFAULT_CHANNEL_LAYER) self.assertEqual(layer.expiry, 100500) def test_override_settings(self): """ The channel layers cache is reset when the CHANNEL_LAYERS setting changes. """ with override_settings( CHANNEL_LAYERS={ "default": {"BACKEND": "channels.layers.InMemoryChannelLayer"} } ): self.assertEqual(channel_layers.backends, {}) get_channel_layer() self.assertNotEqual(channel_layers.backends, {}) self.assertEqual(channel_layers.backends, {}) # In-memory layer tests @pytest.mark.asyncio async def test_send_receive(): layer = InMemoryChannelLayer() message = {"type": "test.message"} await layer.send("test.channel", message) assert message == await layer.receive("test.channel") @pytest.mark.parametrize( "method", [BaseChannelLayer().valid_channel_name, BaseChannelLayer().valid_group_name], ) @pytest.mark.parametrize( "channel_name,expected_valid", [("¯\\_(ツ)_/¯", False), ("chat", True), ("chat" * 100, False)], ) def test_channel_and_group_name_validation(method, channel_name, expected_valid): if expected_valid: method(channel_name) else: with pytest.raises(TypeError): method(channel_name) channels-4.0.0/tests/test_routing.py000066400000000000000000000213201432260166700175550ustar00rootroot00000000000000import pytest from django.core.exceptions import ImproperlyConfigured from django.urls import path, re_path from channels.routing import ChannelNameRouter, ProtocolTypeRouter, URLRouter class MockApplication: call_args = None def __init__(self, return_value): self.return_value = return_value super().__init__() async def __call__(self, scope, receive, send): self.call_args = ((scope, receive, send), None) return self.return_value @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore::DeprecationWarning") async def test_protocol_type_router(): """ Tests the ProtocolTypeRouter """ # Test basic operation router = ProtocolTypeRouter( { "websocket": MockApplication(return_value="ws"), "http": MockApplication(return_value="http"), } ) assert await router({"type": "websocket"}, None, None) == "ws" assert await router({"type": "http"}, None, None) == "http" # Test an unmatched type with pytest.raises(ValueError): await router({"type": "aprs"}, None, None) # Test a scope with no type with pytest.raises(KeyError): await router({"tyyyype": "http"}, None, None) @pytest.mark.asyncio async def test_channel_name_router(): """ Tests the ChannelNameRouter """ # Test basic operation router = ChannelNameRouter( { "test": MockApplication(return_value=1), "other_test": MockApplication(return_value=2), } ) assert await router({"channel": "test"}, None, None) == 1 assert await router({"channel": "other_test"}, None, None) == 2 # Test an unmatched channel with pytest.raises(ValueError): await router({"channel": "chat"}, None, None) # Test a scope with no channel with pytest.raises(ValueError): await router({"type": "http"}, None, None) @pytest.mark.asyncio async def test_url_router(): """ Tests the URLRouter """ posarg_app = MockApplication(return_value=4) kwarg_app = MockApplication(return_value=5) defaultkwarg_app = MockApplication(return_value=6) router = URLRouter( [ path("", MockApplication(return_value=1)), path("foo/", MockApplication(return_value=2)), re_path(r"bar", MockApplication(return_value=3)), re_path(r"^posarg/(\d+)/$", posarg_app), path("kwarg//", kwarg_app), path("defaultkwargs/", defaultkwarg_app, kwargs={"default": 42}), ] ) # Valid basic matches assert await router({"type": "http", "path": "/"}, None, None) == 1 assert await router({"type": "http", "path": "/foo/"}, None, None) == 2 assert await router({"type": "http", "path": "/bar/"}, None, None) == 3 assert await router({"type": "http", "path": "/bar/baz/"}, None, None) == 3 # Valid positional matches assert await router({"type": "http", "path": "/posarg/123/"}, None, None) == 4 assert posarg_app.call_args[0][0]["url_route"] == {"args": ("123",), "kwargs": {}} assert await router({"type": "http", "path": "/posarg/456/"}, None, None) == 4 assert posarg_app.call_args[0][0]["url_route"] == {"args": ("456",), "kwargs": {}} # Valid keyword argument matches assert await router({"type": "http", "path": "/kwarg/hello/"}, None, None) == 5 assert kwarg_app.call_args[0][0]["url_route"] == { "args": tuple(), "kwargs": {"name": "hello"}, } assert await router({"type": "http", "path": "/kwarg/hellothere/"}, None, None) == 5 assert kwarg_app.call_args[0][0]["url_route"] == { "args": tuple(), "kwargs": {"name": "hellothere"}, } # Valid default keyword arguments assert await router({"type": "http", "path": "/defaultkwargs/"}, None, None) == 6 assert defaultkwarg_app.call_args[0][0]["url_route"] == { "args": tuple(), "kwargs": {"default": 42}, } # Invalid matches with pytest.raises(ValueError): await router({"type": "http", "path": "/nonexistent/"}, None, None) @pytest.mark.asyncio async def test_url_router_nesting(): """ Tests that nested URLRouters add their keyword captures together. """ test_app = MockApplication(return_value=1) inner_router = URLRouter( [ re_path(r"^book/(?P[\w\-]+)/page/(?P\d+)/$", test_app), re_path(r"^test/(\d+)/$", test_app), ] ) outer_router = URLRouter( [ re_path( r"^universe/(?P\d+)/author/(?P\w+)/", inner_router ), re_path(r"^positional/(\w+)/", inner_router), ] ) assert ( await outer_router( { "type": "http", "path": "/universe/42/author/andrewgodwin/book/channels-guide/page/10/", }, None, None, ) == 1 ) assert test_app.call_args[0][0]["url_route"] == { "args": (), "kwargs": { "book": "channels-guide", "author": "andrewgodwin", "page": "10", "universe": "42", }, } assert ( await outer_router( {"type": "http", "path": "/positional/foo/test/3/"}, None, None ) == 1 ) assert test_app.call_args[0][0]["url_route"] == {"args": ("foo", "3"), "kwargs": {}} @pytest.mark.asyncio async def test_url_router_nesting_path(): """ Tests that nested URLRouters add their keyword captures together when used with path(). """ from django.urls import path test_app = MockApplication(return_value=1) inner_router = URLRouter([path("test//", test_app)]) def asgi_middleware(inner): # Some middleware which hides the fact that we have an inner URLRouter async def app(scope, receive, send): return await inner(scope, receive, send) app._path_routing = True return app outer_router = URLRouter( [path("number//", asgi_middleware(inner_router))] ) assert await inner_router({"type": "http", "path": "/test/3/"}, None, None) == 1 assert ( await outer_router({"type": "http", "path": "/number/42/test/3/"}, None, None) == 1 ) assert test_app.call_args[0][0]["url_route"] == { "args": (), "kwargs": {"number": 42, "page": 3}, } with pytest.raises(ValueError): assert await outer_router( {"type": "http", "path": "/number/42/test/3/bla/"}, None, None ) with pytest.raises(ValueError): assert await outer_router( {"type": "http", "path": "/number/42/blub/"}, None, None ) @pytest.mark.asyncio async def test_url_router_path(): """ Tests that URLRouter also works with path() """ from django.urls import path kwarg_app = MockApplication(return_value=3) router = URLRouter( [ path("", MockApplication(return_value=1)), path("foo/", MockApplication(return_value=2)), path("author//", kwarg_app), path("year//", kwarg_app), ] ) # Valid basic matches assert await router({"type": "http", "path": "/"}, None, None) == 1 assert await router({"type": "http", "path": "/foo/"}, None, None) == 2 # Named without typecasting assert ( await router({"type": "http", "path": "/author/andrewgodwin/"}, None, None) == 3 ) assert kwarg_app.call_args[0][0]["url_route"] == { "args": tuple(), "kwargs": {"name": "andrewgodwin"}, } # Named with typecasting assert await router({"type": "http", "path": "/year/2012/"}, None, None) == 3 assert kwarg_app.call_args[0][0]["url_route"] == { "args": tuple(), "kwargs": {"year": 2012}, } # Invalid matches with pytest.raises(ValueError): await router({"type": "http", "path": "/nonexistent/"}, None, None) @pytest.mark.asyncio async def test_path_remaining(): """ Resolving continues in outer router if an inner router has no matching routes """ inner_router = URLRouter([path("no-match/", MockApplication(return_value=1))]) test_app = MockApplication(return_value=2) outer_router = URLRouter( [path("prefix/", inner_router), path("prefix/stuff/", test_app)] ) outermost_router = URLRouter([path("", outer_router)]) assert ( await outermost_router({"type": "http", "path": "/prefix/stuff/"}, None, None) == 2 ) def test_invalid_routes(): """ Test URLRouter route validation """ from django.urls import include with pytest.raises(ImproperlyConfigured) as exc: URLRouter([path("", include([]))]) assert "include() is not supported in URLRouter." in str(exc) channels-4.0.0/tests/test_testing.py000066400000000000000000000120661432260166700175520ustar00rootroot00000000000000import asyncio from urllib.parse import unquote import pytest from django.urls import path from channels.consumer import AsyncConsumer from channels.generic.websocket import WebsocketConsumer from channels.routing import URLRouter from channels.testing import HttpCommunicator, WebsocketCommunicator class SimpleHttpApp(AsyncConsumer): """ Barebones HTTP ASGI app for testing. """ async def http_request(self, event): assert self.scope["path"] == "/test/" assert self.scope["method"] == "GET" assert self.scope["query_string"] == b"foo=bar" await self.send({"type": "http.response.start", "status": 200, "headers": []}) await self.send({"type": "http.response.body", "body": b"test response"}) @pytest.mark.asyncio async def test_http_communicator(): """ Tests that the HTTP communicator class works at a basic level. """ communicator = HttpCommunicator(SimpleHttpApp(), "GET", "/test/?foo=bar") response = await communicator.get_response() assert response["body"] == b"test response" assert response["status"] == 200 class SimpleWebsocketApp(WebsocketConsumer): """ Barebones WebSocket ASGI app for testing. """ def connect(self): assert self.scope["path"] == "/testws/" self.accept() def receive(self, text_data=None, bytes_data=None): self.send(text_data=text_data, bytes_data=bytes_data) class ErrorWebsocketApp(WebsocketConsumer): """ Barebones WebSocket ASGI app for error testing. """ def receive(self, text_data=None, bytes_data=None): pass class KwargsWebSocketApp(WebsocketConsumer): """ WebSocket ASGI app used for testing the kwargs arguments in the url_route. """ def connect(self): self.accept() self.send(text_data=self.scope["url_route"]["kwargs"]["message"]) @pytest.mark.django_db @pytest.mark.asyncio async def test_websocket_communicator(): """ Tests that the WebSocket communicator class works at a basic level. """ communicator = WebsocketCommunicator(SimpleWebsocketApp(), "/testws/") # Test connection connected, subprotocol = await communicator.connect() assert connected assert subprotocol is None # Test sending text await communicator.send_to(text_data="hello") response = await communicator.receive_from() assert response == "hello" # Test sending bytes await communicator.send_to(bytes_data=b"w\0\0\0") response = await communicator.receive_from() assert response == b"w\0\0\0" # Test sending JSON await communicator.send_json_to({"hello": "world"}) response = await communicator.receive_json_from() assert response == {"hello": "world"} # Close out await communicator.disconnect() @pytest.mark.django_db @pytest.mark.asyncio async def test_websocket_application(): """ Tests that the WebSocket communicator class works with the URLRoute application. """ application = URLRouter([path("testws//", KwargsWebSocketApp())]) communicator = WebsocketCommunicator(application, "/testws/test/") connected, subprotocol = await communicator.connect() # Test connection assert connected assert subprotocol is None message = await communicator.receive_from() assert message == "test" await communicator.disconnect() @pytest.mark.django_db @pytest.mark.asyncio async def test_timeout_disconnect(): """ Tests that disconnect() still works after a timeout. """ communicator = WebsocketCommunicator(ErrorWebsocketApp(), "/testws/") # Test connection connected, subprotocol = await communicator.connect() assert connected assert subprotocol is None # Test sending text (will error internally) await communicator.send_to(text_data="hello") with pytest.raises(asyncio.TimeoutError): await communicator.receive_from() # Close out await communicator.disconnect() class ConnectionScopeValidator(WebsocketConsumer): """ Tests ASGI specification for the connection scope. """ def connect(self): assert self.scope["type"] == "websocket" # check if path is a unicode string assert isinstance(self.scope["path"], str) # check if path has percent escapes decoded assert self.scope["path"] == unquote(self.scope["path"]) # check if query_string is a bytes sequence assert isinstance(self.scope["query_string"], bytes) self.accept() paths = [ "user:pass@example.com:8080/p/a/t/h?query=string#hash", "wss://user:pass@example.com:8080/p/a/t/h?query=string#hash", ( "ws://www.example.com/%E9%A6%96%E9%A1%B5/index.php?" "foo=%E9%A6%96%E9%A1%B5&spam=eggs" ), ] @pytest.mark.django_db @pytest.mark.asyncio @pytest.mark.parametrize("path", paths) async def test_connection_scope(path): """ Tests ASGI specification for the the connection scope. """ communicator = WebsocketCommunicator(ConnectionScopeValidator(), path) connected, _ = await communicator.connect() assert connected await communicator.disconnect() channels-4.0.0/tox.ini000066400000000000000000000007771432260166700146430ustar00rootroot00000000000000[tox] envlist = py{37,38,39,310}-dj32 py{38,39,310}-dj{40,41,main} qa [testenv] usedevelop = true extras = tests, daphne commands = pytest -v {posargs} deps = dj32: Django>=3.2.9,<4.0 dj40: Django>=4.0,<4.1 dj41: Django>=4.1b1,<4.2 djmain: https://github.com/django/django/archive/main.tar.gz [testenv:qa] skip_install=true deps = black flake8 isort commands = flake8 channels tests black --check channels tests isort --check-only --diff channels tests