././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1640185721.1024053 django-redis-5.2.0/0000755000175100001710000000000000000000000014664 5ustar00runnerdocker00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/AUTHORS.rst0000644000175100001710000000143400000000000016545 0ustar00runnerdocker00000000000000Andrei Antoukh / niwibe Sean Bleier Matt Dennewitz Jannis Leidel S. Angel / Twidi Noah Kantrowitz / coderanger Martin Mahner / bartTC Timothée Peignier / cyberdelia Lior Sion / liorsion Ales Zoulek / aleszoulek James Aylett / jaylett Todd Boland / boland David Zderic / dzderic Kirill Zaitsev / teferi Jon Dufresne Anès Foufa ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/LICENSE0000644000175100001710000000264400000000000015677 0ustar00runnerdocker00000000000000Copyright (c) 2011-2016 Andrey Antukh Copyright (c) 2011 Sean Bleier All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS`` AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/MANIFEST.in0000644000175100001710000000024000000000000016416 0ustar00runnerdocker00000000000000include LICENSE include AUTHORS.rst include README.rst include CHANGES.txt recursive-include tests README.txt *.py recursive-include doc Makefile *.adoc *.html ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1640185721.1024053 django-redis-5.2.0/PKG-INFO0000644000175100001710000007407100000000000015772 0ustar00runnerdocker00000000000000Metadata-Version: 2.1 Name: django-redis Version: 5.2.0 Summary: Full featured redis cache backend for Django. Home-page: https://github.com/jazzband/django-redis Author: Andrei Antoukh Author-email: niwi@niwi.nz License: BSD-3-Clause Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: Web Environment Classifier: Framework :: Django Classifier: Framework :: Django :: 2.2 Classifier: Framework :: Django :: 3.1 Classifier: Framework :: Django :: 3.2 Classifier: Framework :: Django :: 4.0 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: BSD License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: Utilities Requires-Python: >=3.6 Description-Content-Type: text/x-rst Provides-Extra: hiredis License-File: LICENSE License-File: AUTHORS.rst ============================== Redis cache backend for Django ============================== .. image:: https://jazzband.co/static/img/badge.svg :target: https://jazzband.co/ :alt: Jazzband .. image:: https://github.com/jazzband/django-redis/actions/workflows/ci.yml/badge.svg :target: https://github.com/jazzband/django-redis/actions/workflows/ci.yml :alt: GitHub Actions .. image:: https://codecov.io/gh/jazzband/django-redis/branch/master/graph/badge.svg :target: https://codecov.io/gh/jazzband/django-redis :alt: Coverage .. image:: https://img.shields.io/pypi/v/django-redis.svg?style=flat :target: https://pypi.org/project/django-redis/ This is a `Jazzband `_ project. By contributing you agree to abide by the `Contributor Code of Conduct `_ and follow the `guidelines `_. Introduction ------------ django-redis is a BSD licensed, full featured Redis cache and session backend for Django. Why use django-redis? ~~~~~~~~~~~~~~~~~~~~~ - Uses native redis-py url notation connection strings - Pluggable clients - Pluggable parsers - Pluggable serializers - Primary/secondary support in the default client - Comprehensive test suite - Used in production in several projects as cache and session storage - Supports infinite timeouts - Facilities for raw access to Redis client/connection pool - Highly configurable (can emulate memcached exception behavior, for example) - Unix sockets supported by default Requirements ~~~~~~~~~~~~ - `Python`_ 3.6+ - `Django`_ 2.2+ - `redis-py`_ 3.0+ - `Redis server`_ 2.8+ .. _Python: https://www.python.org/downloads/ .. _Django: https://www.djangoproject.com/download/ .. _redis-py: https://pypi.org/project/redis/ .. _Redis server: https://redis.io/download User guide ---------- Installation ~~~~~~~~~~~~ Install with pip: .. code-block:: console $ python -m pip install django-redis Configure as cache backend ~~~~~~~~~~~~~~~~~~~~~~~~~~ To start using django-redis, you should change your Django cache settings to something like: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } } } django-redis uses the redis-py native URL notation for connection strings, it allows better interoperability and has a connection string in more "standard" way. Some examples: - ``redis://[[username]:[password]]@localhost:6379/0`` - ``rediss://[[username]:[password]]@localhost:6379/0`` - ``unix://[[username]:[password]]@/path/to/socket.sock?db=0`` Three URL schemes are supported: - ``redis://``: creates a normal TCP socket connection - ``rediss://``: creates a SSL wrapped TCP socket connection - ``unix://`` creates a Unix Domain Socket connection There are several ways to specify a database number: - A ``db`` querystring option, e.g. ``redis://localhost?db=0`` - If using the ``redis://`` scheme, the path argument of the URL, e.g. ``redis://localhost/0`` When using `Redis' ACLs `_, you will need to add the username to the URL (and provide the password with the Cache ``OPTIONS``). The login for the user ``django`` would look like this: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://django@localhost:6379/0", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "PASSWORD": "mysecret" } } } An alternative would be write both username and password into the URL: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://django:mysecret@localhost:6379/0", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } } } In some circumstances the password you should use to connect Redis is not URL-safe, in this case you can escape it or just use the convenience option in ``OPTIONS`` dict: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "PASSWORD": "mysecret" } } } Take care, that this option does not overwrites the password in the uri, so if you have set the password in the uri, this settings will be ignored. Configure as session backend ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Django can by default use any cache backend as session backend and you benefit from that by using django-redis as backend for session storage without installing any additional backends: .. code-block:: python SESSION_ENGINE = "django.contrib.sessions.backends.cache" SESSION_CACHE_ALIAS = "default" Testing with django-redis ~~~~~~~~~~~~~~~~~~~~~~~~~ django-redis supports customizing the underlying Redis client (see "Pluggable clients"). This can be used for testing purposes. In case you want to flush all data from the cache after a test, add the following lines to your test class: .. code-block:: python from django_redis import get_redis_connection def tearDown(self): get_redis_connection("default").flushall() Advanced usage -------------- Pickle version ~~~~~~~~~~~~~~ For almost all values, django-redis uses pickle to serialize objects. The ``pickle.DEFAULT_PROTOCOL`` version of pickle is used by default to ensure safe upgrades and compatibility across Python versions. If you want set a concrete version, you can do it, using ``PICKLE_VERSION`` option: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "PICKLE_VERSION": -1 # Will use highest protocol version available } } } Socket timeout ~~~~~~~~~~~~~~ Socket timeout can be set using ``SOCKET_TIMEOUT`` and ``SOCKET_CONNECT_TIMEOUT`` options: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "SOCKET_CONNECT_TIMEOUT": 5, # seconds "SOCKET_TIMEOUT": 5, # seconds } } } ``SOCKET_CONNECT_TIMEOUT`` is the timeout for the connection to be established and ``SOCKET_TIMEOUT`` is the timeout for read and write operations after the connection is established. Compression support ~~~~~~~~~~~~~~~~~~~ django-redis comes with compression support out of the box, but is deactivated by default. You can activate it setting up a concrete backend: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", } } } Let see an example, of how make it work with *lzma* compression format: .. code-block:: python import lzma CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.lzma.LzmaCompressor", } } } *Lz4* compression support (requires the lz4 library): .. code-block:: python import lz4 CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", } } } *Zstandard (zstd)* compression support (requires the pyzstd library): .. code-block:: python import pyzstd CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.zstd.ZStdCompressor", } } } Memcached exceptions behavior ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In some situations, when Redis is only used for cache, you do not want exceptions when Redis is down. This is default behavior in the memcached backend and it can be emulated in django-redis. For setup memcached like behaviour (ignore connection exceptions), you should set ``IGNORE_EXCEPTIONS`` settings on your cache configuration: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "IGNORE_EXCEPTIONS": True, } } } Also, you can apply the same settings to all configured caches, you can set the global flag in your settings: .. code-block:: python DJANGO_REDIS_IGNORE_EXCEPTIONS = True Log Ignored Exceptions ~~~~~~~~~~~~~~~~~~~~~~ When ignoring exceptions with ``IGNORE_EXCEPTIONS`` or ``DJANGO_REDIS_IGNORE_EXCEPTIONS``, you may optionally log exceptions using the global variable ``DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS`` in your settings file:: DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS = True If you wish to specify the logger in which the exceptions are output, simply set the global variable ``DJANGO_REDIS_LOGGER`` to the string name and/or path of the desired logger. This will default to ``__name__`` if no logger is specified and ``DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS`` is ``True``:: DJANGO_REDIS_LOGGER = 'some.specified.logger' Infinite timeout ~~~~~~~~~~~~~~~~ django-redis comes with infinite timeouts support out of the box. And it behaves in same way as django backend contract specifies: - ``timeout=0`` expires the value immediately. - ``timeout=None`` infinite timeout .. code-block:: python cache.set("key", "value", timeout=None) Get ttl (time-to-live) from key ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ With Redis, you can access to ttl of any stored key, for it, django-redis exposes ``ttl`` function. It returns: - 0 if key does not exists (or already expired). - None for keys that exists but does not have any expiration. - ttl value for any volatile key (any key that has expiration). .. code-block:: pycon >>> from django.core.cache import cache >>> cache.set("foo", "value", timeout=25) >>> cache.ttl("foo") 25 >>> cache.ttl("not-existent") 0 With Redis, you can access to ttl of any stored key in milliseconds, for it, django-redis exposes ``pttl`` function. .. code-block:: pycon >>> from django.core.cache import cache >>> cache.set("foo", "value", timeout=25) >>> cache.pttl("foo") 25000 >>> cache.pttl("not-existent") 0 Expire & Persist ~~~~~~~~~~~~~~~~ Additionally to the simple ttl query, you can send persist a concrete key or specify a new expiration timeout using the ``persist`` and ``expire`` methods: .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.ttl("foo") 22 >>> cache.persist("foo") True >>> cache.ttl("foo") None .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.expire("foo", timeout=5) True >>> cache.ttl("foo") 5 The ``expire_at`` method can be used to make the key expire at a specific moment in time. .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.expire_at("foo", datetime.now() + timedelta(hours=1)) True >>> cache.ttl("foo") 3600 The ``pexpire_at`` method can be used to make the key expire at a specific moment in time with milliseconds precision: .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.pexpire_at("foo", datetime.now() + timedelta(milliseconds=900, hours=1)) True >>> cache.ttl("foo") 3601 >>> cache.pttl("foo") 3600900 The ``pexpire`` method can be used to provide millisecond precision: .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.pexpire("foo", timeout=5500) True >>> cache.pttl("foo") 5500 Locks ~~~~~ It also supports the Redis ability to create Redis distributed named locks. The Lock interface is identical to the ``threading.Lock`` so you can use it as replacement. .. code-block:: python with cache.lock("somekey"): do_some_thing() Scan & Delete keys in bulk ~~~~~~~~~~~~~~~~~~~~~~~~~~ django-redis comes with some additional methods that help with searching or deleting keys using glob patterns. .. code-block:: pycon >>> from django.core.cache import cache >>> cache.keys("foo_*") ["foo_1", "foo_2"] A simple search like this will return all matched values. In databases with a large number of keys this isn't suitable method. Instead, you can use the ``iter_keys`` function that works like the ``keys`` function but uses Redis server side cursors. Calling ``iter_keys`` will return a generator that you can then iterate over efficiently. .. code-block:: pycon >>> from django.core.cache import cache >>> cache.iter_keys("foo_*") >>> next(cache.iter_keys("foo_*")) "foo_1" For deleting keys, you should use ``delete_pattern`` which has the same glob pattern syntax as the ``keys`` function and returns the number of deleted keys. .. code-block:: pycon >>> from django.core.cache import cache >>> cache.delete_pattern("foo_*") Redis native commands ~~~~~~~~~~~~~~~~~~~~~ django-redis has limited support for some Redis atomic operations, such as the commands ``SETNX`` and ``INCR``. You can use the ``SETNX`` command through the backend ``set()`` method with the ``nx`` parameter: .. code-block:: pycon >>> from django.core.cache import cache >>> cache.set("key", "value1", nx=True) True >>> cache.set("key", "value2", nx=True) False >>> cache.get("key") "value1" Also, the ``incr`` and ``decr`` methods use Redis atomic operations when the value that a key contains is suitable for it. Raw client access ~~~~~~~~~~~~~~~~~ In some situations your application requires access to a raw Redis client to use some advanced features that aren't exposed by the Django cache interface. To avoid storing another setting for creating a raw connection, django-redis exposes functions with which you can obtain a raw client reusing the cache connection string: ``get_redis_connection(alias)``. .. code-block:: pycon >>> from django_redis import get_redis_connection >>> con = get_redis_connection("default") >>> con WARNING: Not all pluggable clients support this feature. Connection pools ~~~~~~~~~~~~~~~~ Behind the scenes, django-redis uses the underlying redis-py connection pool implementation, and exposes a simple way to configure it. Alternatively, you can directly customize a connection/connection pool creation for a backend. The default redis-py behavior is to not close connections, recycling them when possible. Configure default connection pool ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The default connection pool is simple. For example, you can customize the maximum number of connections in the pool by setting ``CONNECTION_POOL_KWARGS`` in the ``CACHES`` setting: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", # ... "OPTIONS": { "CONNECTION_POOL_KWARGS": {"max_connections": 100} } } } You can verify how many connections the pool has opened with the following snippet: .. code-block:: python from django_redis import get_redis_connection r = get_redis_connection("default") # Use the name you have defined for Redis in settings.CACHES connection_pool = r.connection_pool print("Created connections so far: %d" % connection_pool._created_connections) Since the default connection pool passes all keyword arguments it doesn't use to its connections, you can also customize the connections that the pool makes by adding those options to ``CONNECTION_POOL_KWARGS``: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "CONNECTION_POOL_KWARGS": {"max_connections": 100, "retry_on_timeout": True} } } } Use your own connection pool subclass ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Sometimes you want to use your own subclass of the connection pool. This is possible with django-redis using the ``CONNECTION_POOL_CLASS`` parameter in the backend options. .. code-block:: python from redis.connection import ConnectionPool class MyOwnPool(ConnectionPool): # Just doing nothing, only for example purpose pass .. code-block:: python # Omitting all backend declaration boilerplate code. "OPTIONS": { "CONNECTION_POOL_CLASS": "myproj.mypool.MyOwnPool", } Customize connection factory ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ If none of the previous methods satisfies you, you can get in the middle of the django-redis connection factory process and customize or completely rewrite it. By default, django-redis creates connections through the ``django_redis.pool.ConnectionFactory`` class that is specified in the global Django setting ``DJANGO_REDIS_CONNECTION_FACTORY``. .. code-block:: python class ConnectionFactory(object): def get_connection_pool(self, params: dict): # Given connection parameters in the `params` argument, return new # connection pool. It should be overwritten if you want do # something before/after creating the connection pool, or return # your own connection pool. pass def get_connection(self, params: dict): # Given connection parameters in the `params` argument, return a # new connection. It should be overwritten if you want to do # something before/after creating a new connection. The default # implementation uses `get_connection_pool` to obtain a pool and # create a new connection in the newly obtained pool. pass def get_or_create_connection_pool(self, params: dict): # This is a high layer on top of `get_connection_pool` for # implementing a cache of created connection pools. It should be # overwritten if you want change the default behavior. pass def make_connection_params(self, url: str) -> dict: # The responsibility of this method is to convert basic connection # parameters and other settings to fully connection pool ready # connection parameters. pass def connect(self, url: str): # This is really a public API and entry point for this factory # class. This encapsulates the main logic of creating the # previously mentioned `params` using `make_connection_params` and # creating a new connection using the `get_connection` method. pass Use the sentinel connection factory ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ In order to facilitate using `Redis Sentinels`_, django-redis comes with a built in sentinel connection factory, which creates sentinel connection pools. In order to enable this functionality you should add the following: .. code-block:: python # Enable the alternate connection factory. DJANGO_REDIS_CONNECTION_FACTORY = 'django_redis.pool.SentinelConnectionFactory' # These sentinels are shared between all the examples, and are passed # directly to redis Sentinel. These can also be defined inline. SENTINELS = [ ('sentinel-1', 26379), ('sentinel-2', 26379), ('sentinel-3', 26379), ] CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", # The hostname in LOCATION is the primary (service / master) name "LOCATION": "redis://service_name/db", "OPTIONS": { # While the default client will work, this will check you # have configured things correctly, and also create a # primary and replica pool for the service specified by # LOCATION rather than requiring two URLs. "CLIENT_CLASS": "django_redis.client.SentinelClient", # Sentinels which are passed directly to redis Sentinel. "SENTINELS": SENTINELS, # kwargs for redis Sentinel (optional). "SENTINEL_KWARGS": {}, # You can still override the connection pool (optional). "CONNECTION_POOL_CLASS": "redis.sentinel.SentinelConnectionPool", }, }, # A minimal example using the SentinelClient. "minimal": { "BACKEND": "django_redis.cache.RedisCache", # The SentinelClient will use this location for both the primaries # and replicas. "LOCATION": "redis://minimal_service_name/db", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.SentinelClient", "SENTINELS": SENTINELS, }, }, # A minimal example using the DefaultClient. "other": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": [ # The DefaultClient is [primary, replicas...], but with the # SentinelConnectionPool it only requires one "is_master=0". "redis://other_service_name/db?is_master=1", "redis://other_service_name/db?is_master=0", ], "OPTIONS": {"SENTINELS": SENTINELS}, }, # A minimal example only using only replicas in read only mode (and # the DefaultClient). "readonly": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://readonly_service_name/db?is_master=0", "OPTIONS": {"SENTINELS": SENTINELS}, }, } .. _Redis Sentinels: https://redis.io/topics/sentinel Pluggable parsers ~~~~~~~~~~~~~~~~~ redis-py (the Python Redis client used by django-redis) comes with a pure Python Redis parser that works very well for most common task, but if you want some performance boost, you can use hiredis. hiredis is a Redis client written in C and it has its own parser that can be used with django-redis. .. code-block:: python "OPTIONS": { "PARSER_CLASS": "redis.connection.HiredisParser", } Pluggable clients ~~~~~~~~~~~~~~~~~ django-redis is designed for to be very flexible and very configurable. For it, it exposes a pluggable backends that make easy extend the default behavior, and it comes with few ones out the box. Default client ^^^^^^^^^^^^^^ Almost all about the default client is explained, with one exception: the default client comes with replication support. To connect to a Redis replication setup, you should change the ``LOCATION`` to something like: .. code-block:: python "LOCATION": [ "redis://127.0.0.1:6379/1", "redis://127.0.0.1:6378/1", ] The first connection string represents the primary server and the rest to replica servers. WARNING: Replication setup is not heavily tested in production environments. Shard client ^^^^^^^^^^^^ This pluggable client implements client-side sharding. It inherits almost all functionality from the default client. To use it, change your cache settings to something like this: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": [ "redis://127.0.0.1:6379/1", "redis://127.0.0.1:6379/2", ], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.ShardClient", } } } WARNING: Shard client is still experimental, so be careful when using it in production environments. Herd client ^^^^^^^^^^^ This pluggable client helps dealing with the thundering herd problem. You can read more about it on link: `Wikipedia `_ Like previous pluggable clients, it inherits all functionality from the default client, adding some additional methods for getting/setting keys. .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.HerdClient", } } } This client exposes additional settings: - ``CACHE_HERD_TIMEOUT``: Set default herd timeout. (Default value: 60s) Pluggable serializer ~~~~~~~~~~~~~~~~~~~~ The pluggable clients serialize data before sending it to the server. By default, django-redis serializes the data using the Python ``pickle`` module. This is very flexible and can handle a large range of object types. To serialize using JSON instead, the serializer ``JSONSerializer`` is also available. .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.json.JSONSerializer", } } } There's also support for serialization using `MsgPack`_ (that requires the msgpack library): .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", } } } .. _MsgPack: http://msgpack.org/ Pluggable Redis client ~~~~~~~~~~~~~~~~~~~~~~ django-redis uses the Redis client ``redis.client.StrictClient`` by default. It is possible to use an alternative client. You can customize the client used by setting ``REDIS_CLIENT_CLASS`` in the ``CACHES`` setting. Optionally, you can provide arguments to this class by setting ``REDIS_CLIENT_KWARGS``. .. code-block:: python CACHES = { "default": { "OPTIONS": { "REDIS_CLIENT_CLASS": "my.module.ClientClass", "REDIS_CLIENT_KWARGS": {"some_setting": True}, } } } Closing Connections ~~~~~~~~~~~~~~~~~~~ The default django-redis behavior on close() is to keep the connections to Redis server. You can change this default behaviour for all caches by the ``DJANGO_REDIS_CLOSE_CONNECTION = True`` in the django settings (globally) or (at cache level) by setting ``CLOSE_CONNECTION: True`` in the ``OPTIONS`` for each configured cache. Setting True as a value will instruct the django-redis to close all the connections (since v. 4.12.2), irrespectively of its current usage. .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "CLOSE_CONNECTION": True, } } } SSL/TLS and Self-Signed certificates ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In case you encounter a Redis server offering a TLS connection using a self-signed certificate you may disable certification verification with the following: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "rediss://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": None} } } } License ------- .. code-block:: text Copyright (c) 2011-2015 Andrey Antukh Copyright (c) 2011 Sean Bleier All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS`` AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/README.rst0000644000175100001710000007144400000000000016365 0ustar00runnerdocker00000000000000============================== Redis cache backend for Django ============================== .. image:: https://jazzband.co/static/img/badge.svg :target: https://jazzband.co/ :alt: Jazzband .. image:: https://github.com/jazzband/django-redis/actions/workflows/ci.yml/badge.svg :target: https://github.com/jazzband/django-redis/actions/workflows/ci.yml :alt: GitHub Actions .. image:: https://codecov.io/gh/jazzband/django-redis/branch/master/graph/badge.svg :target: https://codecov.io/gh/jazzband/django-redis :alt: Coverage .. image:: https://img.shields.io/pypi/v/django-redis.svg?style=flat :target: https://pypi.org/project/django-redis/ This is a `Jazzband `_ project. By contributing you agree to abide by the `Contributor Code of Conduct `_ and follow the `guidelines `_. Introduction ------------ django-redis is a BSD licensed, full featured Redis cache and session backend for Django. Why use django-redis? ~~~~~~~~~~~~~~~~~~~~~ - Uses native redis-py url notation connection strings - Pluggable clients - Pluggable parsers - Pluggable serializers - Primary/secondary support in the default client - Comprehensive test suite - Used in production in several projects as cache and session storage - Supports infinite timeouts - Facilities for raw access to Redis client/connection pool - Highly configurable (can emulate memcached exception behavior, for example) - Unix sockets supported by default Requirements ~~~~~~~~~~~~ - `Python`_ 3.6+ - `Django`_ 2.2+ - `redis-py`_ 3.0+ - `Redis server`_ 2.8+ .. _Python: https://www.python.org/downloads/ .. _Django: https://www.djangoproject.com/download/ .. _redis-py: https://pypi.org/project/redis/ .. _Redis server: https://redis.io/download User guide ---------- Installation ~~~~~~~~~~~~ Install with pip: .. code-block:: console $ python -m pip install django-redis Configure as cache backend ~~~~~~~~~~~~~~~~~~~~~~~~~~ To start using django-redis, you should change your Django cache settings to something like: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } } } django-redis uses the redis-py native URL notation for connection strings, it allows better interoperability and has a connection string in more "standard" way. Some examples: - ``redis://[[username]:[password]]@localhost:6379/0`` - ``rediss://[[username]:[password]]@localhost:6379/0`` - ``unix://[[username]:[password]]@/path/to/socket.sock?db=0`` Three URL schemes are supported: - ``redis://``: creates a normal TCP socket connection - ``rediss://``: creates a SSL wrapped TCP socket connection - ``unix://`` creates a Unix Domain Socket connection There are several ways to specify a database number: - A ``db`` querystring option, e.g. ``redis://localhost?db=0`` - If using the ``redis://`` scheme, the path argument of the URL, e.g. ``redis://localhost/0`` When using `Redis' ACLs `_, you will need to add the username to the URL (and provide the password with the Cache ``OPTIONS``). The login for the user ``django`` would look like this: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://django@localhost:6379/0", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "PASSWORD": "mysecret" } } } An alternative would be write both username and password into the URL: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://django:mysecret@localhost:6379/0", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } } } In some circumstances the password you should use to connect Redis is not URL-safe, in this case you can escape it or just use the convenience option in ``OPTIONS`` dict: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "PASSWORD": "mysecret" } } } Take care, that this option does not overwrites the password in the uri, so if you have set the password in the uri, this settings will be ignored. Configure as session backend ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Django can by default use any cache backend as session backend and you benefit from that by using django-redis as backend for session storage without installing any additional backends: .. code-block:: python SESSION_ENGINE = "django.contrib.sessions.backends.cache" SESSION_CACHE_ALIAS = "default" Testing with django-redis ~~~~~~~~~~~~~~~~~~~~~~~~~ django-redis supports customizing the underlying Redis client (see "Pluggable clients"). This can be used for testing purposes. In case you want to flush all data from the cache after a test, add the following lines to your test class: .. code-block:: python from django_redis import get_redis_connection def tearDown(self): get_redis_connection("default").flushall() Advanced usage -------------- Pickle version ~~~~~~~~~~~~~~ For almost all values, django-redis uses pickle to serialize objects. The ``pickle.DEFAULT_PROTOCOL`` version of pickle is used by default to ensure safe upgrades and compatibility across Python versions. If you want set a concrete version, you can do it, using ``PICKLE_VERSION`` option: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "PICKLE_VERSION": -1 # Will use highest protocol version available } } } Socket timeout ~~~~~~~~~~~~~~ Socket timeout can be set using ``SOCKET_TIMEOUT`` and ``SOCKET_CONNECT_TIMEOUT`` options: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "SOCKET_CONNECT_TIMEOUT": 5, # seconds "SOCKET_TIMEOUT": 5, # seconds } } } ``SOCKET_CONNECT_TIMEOUT`` is the timeout for the connection to be established and ``SOCKET_TIMEOUT`` is the timeout for read and write operations after the connection is established. Compression support ~~~~~~~~~~~~~~~~~~~ django-redis comes with compression support out of the box, but is deactivated by default. You can activate it setting up a concrete backend: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", } } } Let see an example, of how make it work with *lzma* compression format: .. code-block:: python import lzma CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.lzma.LzmaCompressor", } } } *Lz4* compression support (requires the lz4 library): .. code-block:: python import lz4 CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", } } } *Zstandard (zstd)* compression support (requires the pyzstd library): .. code-block:: python import pyzstd CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.zstd.ZStdCompressor", } } } Memcached exceptions behavior ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In some situations, when Redis is only used for cache, you do not want exceptions when Redis is down. This is default behavior in the memcached backend and it can be emulated in django-redis. For setup memcached like behaviour (ignore connection exceptions), you should set ``IGNORE_EXCEPTIONS`` settings on your cache configuration: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "IGNORE_EXCEPTIONS": True, } } } Also, you can apply the same settings to all configured caches, you can set the global flag in your settings: .. code-block:: python DJANGO_REDIS_IGNORE_EXCEPTIONS = True Log Ignored Exceptions ~~~~~~~~~~~~~~~~~~~~~~ When ignoring exceptions with ``IGNORE_EXCEPTIONS`` or ``DJANGO_REDIS_IGNORE_EXCEPTIONS``, you may optionally log exceptions using the global variable ``DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS`` in your settings file:: DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS = True If you wish to specify the logger in which the exceptions are output, simply set the global variable ``DJANGO_REDIS_LOGGER`` to the string name and/or path of the desired logger. This will default to ``__name__`` if no logger is specified and ``DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS`` is ``True``:: DJANGO_REDIS_LOGGER = 'some.specified.logger' Infinite timeout ~~~~~~~~~~~~~~~~ django-redis comes with infinite timeouts support out of the box. And it behaves in same way as django backend contract specifies: - ``timeout=0`` expires the value immediately. - ``timeout=None`` infinite timeout .. code-block:: python cache.set("key", "value", timeout=None) Get ttl (time-to-live) from key ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ With Redis, you can access to ttl of any stored key, for it, django-redis exposes ``ttl`` function. It returns: - 0 if key does not exists (or already expired). - None for keys that exists but does not have any expiration. - ttl value for any volatile key (any key that has expiration). .. code-block:: pycon >>> from django.core.cache import cache >>> cache.set("foo", "value", timeout=25) >>> cache.ttl("foo") 25 >>> cache.ttl("not-existent") 0 With Redis, you can access to ttl of any stored key in milliseconds, for it, django-redis exposes ``pttl`` function. .. code-block:: pycon >>> from django.core.cache import cache >>> cache.set("foo", "value", timeout=25) >>> cache.pttl("foo") 25000 >>> cache.pttl("not-existent") 0 Expire & Persist ~~~~~~~~~~~~~~~~ Additionally to the simple ttl query, you can send persist a concrete key or specify a new expiration timeout using the ``persist`` and ``expire`` methods: .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.ttl("foo") 22 >>> cache.persist("foo") True >>> cache.ttl("foo") None .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.expire("foo", timeout=5) True >>> cache.ttl("foo") 5 The ``expire_at`` method can be used to make the key expire at a specific moment in time. .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.expire_at("foo", datetime.now() + timedelta(hours=1)) True >>> cache.ttl("foo") 3600 The ``pexpire_at`` method can be used to make the key expire at a specific moment in time with milliseconds precision: .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.pexpire_at("foo", datetime.now() + timedelta(milliseconds=900, hours=1)) True >>> cache.ttl("foo") 3601 >>> cache.pttl("foo") 3600900 The ``pexpire`` method can be used to provide millisecond precision: .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.pexpire("foo", timeout=5500) True >>> cache.pttl("foo") 5500 Locks ~~~~~ It also supports the Redis ability to create Redis distributed named locks. The Lock interface is identical to the ``threading.Lock`` so you can use it as replacement. .. code-block:: python with cache.lock("somekey"): do_some_thing() Scan & Delete keys in bulk ~~~~~~~~~~~~~~~~~~~~~~~~~~ django-redis comes with some additional methods that help with searching or deleting keys using glob patterns. .. code-block:: pycon >>> from django.core.cache import cache >>> cache.keys("foo_*") ["foo_1", "foo_2"] A simple search like this will return all matched values. In databases with a large number of keys this isn't suitable method. Instead, you can use the ``iter_keys`` function that works like the ``keys`` function but uses Redis server side cursors. Calling ``iter_keys`` will return a generator that you can then iterate over efficiently. .. code-block:: pycon >>> from django.core.cache import cache >>> cache.iter_keys("foo_*") >>> next(cache.iter_keys("foo_*")) "foo_1" For deleting keys, you should use ``delete_pattern`` which has the same glob pattern syntax as the ``keys`` function and returns the number of deleted keys. .. code-block:: pycon >>> from django.core.cache import cache >>> cache.delete_pattern("foo_*") Redis native commands ~~~~~~~~~~~~~~~~~~~~~ django-redis has limited support for some Redis atomic operations, such as the commands ``SETNX`` and ``INCR``. You can use the ``SETNX`` command through the backend ``set()`` method with the ``nx`` parameter: .. code-block:: pycon >>> from django.core.cache import cache >>> cache.set("key", "value1", nx=True) True >>> cache.set("key", "value2", nx=True) False >>> cache.get("key") "value1" Also, the ``incr`` and ``decr`` methods use Redis atomic operations when the value that a key contains is suitable for it. Raw client access ~~~~~~~~~~~~~~~~~ In some situations your application requires access to a raw Redis client to use some advanced features that aren't exposed by the Django cache interface. To avoid storing another setting for creating a raw connection, django-redis exposes functions with which you can obtain a raw client reusing the cache connection string: ``get_redis_connection(alias)``. .. code-block:: pycon >>> from django_redis import get_redis_connection >>> con = get_redis_connection("default") >>> con WARNING: Not all pluggable clients support this feature. Connection pools ~~~~~~~~~~~~~~~~ Behind the scenes, django-redis uses the underlying redis-py connection pool implementation, and exposes a simple way to configure it. Alternatively, you can directly customize a connection/connection pool creation for a backend. The default redis-py behavior is to not close connections, recycling them when possible. Configure default connection pool ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The default connection pool is simple. For example, you can customize the maximum number of connections in the pool by setting ``CONNECTION_POOL_KWARGS`` in the ``CACHES`` setting: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", # ... "OPTIONS": { "CONNECTION_POOL_KWARGS": {"max_connections": 100} } } } You can verify how many connections the pool has opened with the following snippet: .. code-block:: python from django_redis import get_redis_connection r = get_redis_connection("default") # Use the name you have defined for Redis in settings.CACHES connection_pool = r.connection_pool print("Created connections so far: %d" % connection_pool._created_connections) Since the default connection pool passes all keyword arguments it doesn't use to its connections, you can also customize the connections that the pool makes by adding those options to ``CONNECTION_POOL_KWARGS``: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "CONNECTION_POOL_KWARGS": {"max_connections": 100, "retry_on_timeout": True} } } } Use your own connection pool subclass ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Sometimes you want to use your own subclass of the connection pool. This is possible with django-redis using the ``CONNECTION_POOL_CLASS`` parameter in the backend options. .. code-block:: python from redis.connection import ConnectionPool class MyOwnPool(ConnectionPool): # Just doing nothing, only for example purpose pass .. code-block:: python # Omitting all backend declaration boilerplate code. "OPTIONS": { "CONNECTION_POOL_CLASS": "myproj.mypool.MyOwnPool", } Customize connection factory ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ If none of the previous methods satisfies you, you can get in the middle of the django-redis connection factory process and customize or completely rewrite it. By default, django-redis creates connections through the ``django_redis.pool.ConnectionFactory`` class that is specified in the global Django setting ``DJANGO_REDIS_CONNECTION_FACTORY``. .. code-block:: python class ConnectionFactory(object): def get_connection_pool(self, params: dict): # Given connection parameters in the `params` argument, return new # connection pool. It should be overwritten if you want do # something before/after creating the connection pool, or return # your own connection pool. pass def get_connection(self, params: dict): # Given connection parameters in the `params` argument, return a # new connection. It should be overwritten if you want to do # something before/after creating a new connection. The default # implementation uses `get_connection_pool` to obtain a pool and # create a new connection in the newly obtained pool. pass def get_or_create_connection_pool(self, params: dict): # This is a high layer on top of `get_connection_pool` for # implementing a cache of created connection pools. It should be # overwritten if you want change the default behavior. pass def make_connection_params(self, url: str) -> dict: # The responsibility of this method is to convert basic connection # parameters and other settings to fully connection pool ready # connection parameters. pass def connect(self, url: str): # This is really a public API and entry point for this factory # class. This encapsulates the main logic of creating the # previously mentioned `params` using `make_connection_params` and # creating a new connection using the `get_connection` method. pass Use the sentinel connection factory ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ In order to facilitate using `Redis Sentinels`_, django-redis comes with a built in sentinel connection factory, which creates sentinel connection pools. In order to enable this functionality you should add the following: .. code-block:: python # Enable the alternate connection factory. DJANGO_REDIS_CONNECTION_FACTORY = 'django_redis.pool.SentinelConnectionFactory' # These sentinels are shared between all the examples, and are passed # directly to redis Sentinel. These can also be defined inline. SENTINELS = [ ('sentinel-1', 26379), ('sentinel-2', 26379), ('sentinel-3', 26379), ] CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", # The hostname in LOCATION is the primary (service / master) name "LOCATION": "redis://service_name/db", "OPTIONS": { # While the default client will work, this will check you # have configured things correctly, and also create a # primary and replica pool for the service specified by # LOCATION rather than requiring two URLs. "CLIENT_CLASS": "django_redis.client.SentinelClient", # Sentinels which are passed directly to redis Sentinel. "SENTINELS": SENTINELS, # kwargs for redis Sentinel (optional). "SENTINEL_KWARGS": {}, # You can still override the connection pool (optional). "CONNECTION_POOL_CLASS": "redis.sentinel.SentinelConnectionPool", }, }, # A minimal example using the SentinelClient. "minimal": { "BACKEND": "django_redis.cache.RedisCache", # The SentinelClient will use this location for both the primaries # and replicas. "LOCATION": "redis://minimal_service_name/db", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.SentinelClient", "SENTINELS": SENTINELS, }, }, # A minimal example using the DefaultClient. "other": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": [ # The DefaultClient is [primary, replicas...], but with the # SentinelConnectionPool it only requires one "is_master=0". "redis://other_service_name/db?is_master=1", "redis://other_service_name/db?is_master=0", ], "OPTIONS": {"SENTINELS": SENTINELS}, }, # A minimal example only using only replicas in read only mode (and # the DefaultClient). "readonly": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://readonly_service_name/db?is_master=0", "OPTIONS": {"SENTINELS": SENTINELS}, }, } .. _Redis Sentinels: https://redis.io/topics/sentinel Pluggable parsers ~~~~~~~~~~~~~~~~~ redis-py (the Python Redis client used by django-redis) comes with a pure Python Redis parser that works very well for most common task, but if you want some performance boost, you can use hiredis. hiredis is a Redis client written in C and it has its own parser that can be used with django-redis. .. code-block:: python "OPTIONS": { "PARSER_CLASS": "redis.connection.HiredisParser", } Pluggable clients ~~~~~~~~~~~~~~~~~ django-redis is designed for to be very flexible and very configurable. For it, it exposes a pluggable backends that make easy extend the default behavior, and it comes with few ones out the box. Default client ^^^^^^^^^^^^^^ Almost all about the default client is explained, with one exception: the default client comes with replication support. To connect to a Redis replication setup, you should change the ``LOCATION`` to something like: .. code-block:: python "LOCATION": [ "redis://127.0.0.1:6379/1", "redis://127.0.0.1:6378/1", ] The first connection string represents the primary server and the rest to replica servers. WARNING: Replication setup is not heavily tested in production environments. Shard client ^^^^^^^^^^^^ This pluggable client implements client-side sharding. It inherits almost all functionality from the default client. To use it, change your cache settings to something like this: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": [ "redis://127.0.0.1:6379/1", "redis://127.0.0.1:6379/2", ], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.ShardClient", } } } WARNING: Shard client is still experimental, so be careful when using it in production environments. Herd client ^^^^^^^^^^^ This pluggable client helps dealing with the thundering herd problem. You can read more about it on link: `Wikipedia `_ Like previous pluggable clients, it inherits all functionality from the default client, adding some additional methods for getting/setting keys. .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.HerdClient", } } } This client exposes additional settings: - ``CACHE_HERD_TIMEOUT``: Set default herd timeout. (Default value: 60s) Pluggable serializer ~~~~~~~~~~~~~~~~~~~~ The pluggable clients serialize data before sending it to the server. By default, django-redis serializes the data using the Python ``pickle`` module. This is very flexible and can handle a large range of object types. To serialize using JSON instead, the serializer ``JSONSerializer`` is also available. .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.json.JSONSerializer", } } } There's also support for serialization using `MsgPack`_ (that requires the msgpack library): .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", } } } .. _MsgPack: http://msgpack.org/ Pluggable Redis client ~~~~~~~~~~~~~~~~~~~~~~ django-redis uses the Redis client ``redis.client.StrictClient`` by default. It is possible to use an alternative client. You can customize the client used by setting ``REDIS_CLIENT_CLASS`` in the ``CACHES`` setting. Optionally, you can provide arguments to this class by setting ``REDIS_CLIENT_KWARGS``. .. code-block:: python CACHES = { "default": { "OPTIONS": { "REDIS_CLIENT_CLASS": "my.module.ClientClass", "REDIS_CLIENT_KWARGS": {"some_setting": True}, } } } Closing Connections ~~~~~~~~~~~~~~~~~~~ The default django-redis behavior on close() is to keep the connections to Redis server. You can change this default behaviour for all caches by the ``DJANGO_REDIS_CLOSE_CONNECTION = True`` in the django settings (globally) or (at cache level) by setting ``CLOSE_CONNECTION: True`` in the ``OPTIONS`` for each configured cache. Setting True as a value will instruct the django-redis to close all the connections (since v. 4.12.2), irrespectively of its current usage. .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "CLOSE_CONNECTION": True, } } } SSL/TLS and Self-Signed certificates ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In case you encounter a Redis server offering a TLS connection using a self-signed certificate you may disable certification verification with the following: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "rediss://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": None} } } } License ------- .. code-block:: text Copyright (c) 2011-2015 Andrey Antukh Copyright (c) 2011 Sean Bleier All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS`` AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1640185721.0984051 django-redis-5.2.0/django_redis/0000755000175100001710000000000000000000000017314 5ustar00runnerdocker00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/__init__.py0000644000175100001710000000104000000000000021420 0ustar00runnerdocker00000000000000VERSION = (5, 2, 0) __version__ = ".".join(map(str, VERSION)) def get_redis_connection(alias="default", write=True): """ Helper used for obtaining a raw redis client. """ from django.core.cache import caches cache = caches[alias] if not hasattr(cache, "client"): raise NotImplementedError("This backend does not support this feature") if not hasattr(cache.client, "get_client"): raise NotImplementedError("This backend does not support this feature") return cache.client.get_client(write) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/cache.py0000644000175100001710000001273200000000000020736 0ustar00runnerdocker00000000000000import functools import logging from typing import Any, Callable, Dict, Optional from django import VERSION as DJANGO_VERSION from django.conf import settings from django.core.cache.backends.base import BaseCache from django.utils.module_loading import import_string from .exceptions import ConnectionInterrupted DJANGO_REDIS_SCAN_ITERSIZE = getattr(settings, "DJANGO_REDIS_SCAN_ITERSIZE", 10) CONNECTION_INTERRUPTED = object() def omit_exception( method: Optional[Callable] = None, return_value: Optional[Any] = None ): """ Simple decorator that intercepts connection errors and ignores these if settings specify this. """ if method is None: return functools.partial(omit_exception, return_value=return_value) @functools.wraps(method) def _decorator(self, *args, **kwargs): try: return method(self, *args, **kwargs) except ConnectionInterrupted as e: if self._ignore_exceptions: if self._log_ignored_exceptions: self.logger.error(str(e)) return return_value raise e.__cause__ return _decorator class RedisCache(BaseCache): def __init__(self, server: str, params: Dict[str, Any]) -> None: super().__init__(params) self._server = server self._params = params options = params.get("OPTIONS", {}) self._client_cls = options.get( "CLIENT_CLASS", "django_redis.client.DefaultClient" ) self._client_cls = import_string(self._client_cls) self._client = None self._ignore_exceptions = options.get( "IGNORE_EXCEPTIONS", getattr(settings, "DJANGO_REDIS_IGNORE_EXCEPTIONS", False), ) self._log_ignored_exceptions = getattr( settings, "DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS", False ) self.logger = ( logging.getLogger(getattr(settings, "DJANGO_REDIS_LOGGER", __name__)) if self._log_ignored_exceptions else None ) @property def client(self): """ Lazy client connection property. """ if self._client is None: self._client = self._client_cls(self._server, self._params, self) return self._client @omit_exception def set(self, *args, **kwargs): return self.client.set(*args, **kwargs) @omit_exception def incr_version(self, *args, **kwargs): return self.client.incr_version(*args, **kwargs) @omit_exception def add(self, *args, **kwargs): return self.client.add(*args, **kwargs) def get(self, key, default=None, version=None, client=None): value = self._get(key, default, version, client) if value is CONNECTION_INTERRUPTED: value = default return value @omit_exception(return_value=CONNECTION_INTERRUPTED) def _get(self, key, default, version, client): return self.client.get(key, default=default, version=version, client=client) @omit_exception def delete(self, *args, **kwargs): """returns a boolean instead of int since django version 3.1""" result = self.client.delete(*args, **kwargs) return bool(result) if DJANGO_VERSION >= (3, 1, 0) else result @omit_exception def delete_pattern(self, *args, **kwargs): kwargs["itersize"] = kwargs.get("itersize", DJANGO_REDIS_SCAN_ITERSIZE) return self.client.delete_pattern(*args, **kwargs) @omit_exception def delete_many(self, *args, **kwargs): return self.client.delete_many(*args, **kwargs) @omit_exception def clear(self): return self.client.clear() @omit_exception(return_value={}) def get_many(self, *args, **kwargs): return self.client.get_many(*args, **kwargs) @omit_exception def set_many(self, *args, **kwargs): return self.client.set_many(*args, **kwargs) @omit_exception def incr(self, *args, **kwargs): return self.client.incr(*args, **kwargs) @omit_exception def decr(self, *args, **kwargs): return self.client.decr(*args, **kwargs) @omit_exception def has_key(self, *args, **kwargs): return self.client.has_key(*args, **kwargs) @omit_exception def keys(self, *args, **kwargs): return self.client.keys(*args, **kwargs) @omit_exception def iter_keys(self, *args, **kwargs): return self.client.iter_keys(*args, **kwargs) @omit_exception def ttl(self, *args, **kwargs): return self.client.ttl(*args, **kwargs) @omit_exception def pttl(self, *args, **kwargs): return self.client.pttl(*args, **kwargs) @omit_exception def persist(self, *args, **kwargs): return self.client.persist(*args, **kwargs) @omit_exception def expire(self, *args, **kwargs): return self.client.expire(*args, **kwargs) @omit_exception def expire_at(self, *args, **kwargs): return self.client.expire_at(*args, **kwargs) @omit_exception def pexpire(self, *args, **kwargs): return self.client.pexpire(*args, **kwargs) @omit_exception def pexpire_at(self, *args, **kwargs): return self.client.pexpire_at(*args, **kwargs) @omit_exception def lock(self, *args, **kwargs): return self.client.lock(*args, **kwargs) @omit_exception def close(self, **kwargs): self.client.close(**kwargs) @omit_exception def touch(self, *args, **kwargs): return self.client.touch(*args, **kwargs) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1640185721.0984051 django-redis-5.2.0/django_redis/client/0000755000175100001710000000000000000000000020572 5ustar00runnerdocker00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/client/__init__.py0000644000175100001710000000032200000000000022700 0ustar00runnerdocker00000000000000from .default import DefaultClient from .herd import HerdClient from .sentinel import SentinelClient from .sharded import ShardClient __all__ = ["DefaultClient", "HerdClient", "SentinelClient", "ShardClient"] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/client/default.py0000644000175100001710000005732500000000000022604 0ustar00runnerdocker00000000000000import random import re import socket from collections import OrderedDict from datetime import datetime from typing import Any, Dict, Iterator, List, Optional, Union from django.conf import settings from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache, get_key_func from django.core.exceptions import ImproperlyConfigured from django.utils.module_loading import import_string from redis import Redis from redis.exceptions import ConnectionError, ResponseError, TimeoutError from .. import pool from ..exceptions import CompressorError, ConnectionInterrupted from ..util import CacheKey _main_exceptions = (TimeoutError, ResponseError, ConnectionError, socket.timeout) special_re = re.compile("([*?[])") def glob_escape(s: str) -> str: return special_re.sub(r"[\1]", s) class DefaultClient: def __init__(self, server, params: Dict[str, Any], backend: BaseCache) -> None: self._backend = backend self._server = server self._params = params self.reverse_key = get_key_func( params.get("REVERSE_KEY_FUNCTION") or "django_redis.util.default_reverse_key" ) if not self._server: raise ImproperlyConfigured("Missing connections string") if not isinstance(self._server, (list, tuple, set)): self._server = self._server.split(",") self._clients: List[Optional[Redis]] = [None] * len(self._server) self._options = params.get("OPTIONS", {}) self._replica_read_only = self._options.get("REPLICA_READ_ONLY", True) serializer_path = self._options.get( "SERIALIZER", "django_redis.serializers.pickle.PickleSerializer" ) serializer_cls = import_string(serializer_path) compressor_path = self._options.get( "COMPRESSOR", "django_redis.compressors.identity.IdentityCompressor" ) compressor_cls = import_string(compressor_path) self._serializer = serializer_cls(options=self._options) self._compressor = compressor_cls(options=self._options) self.connection_factory = pool.get_connection_factory(options=self._options) def __contains__(self, key: Any) -> bool: return self.has_key(key) def get_next_client_index( self, write: bool = True, tried: Optional[List[int]] = None ) -> int: """ Return a next index for read client. This function implements a default behavior for get a next read client for a replication setup. Overwrite this function if you want a specific behavior. """ if tried is None: tried = list() if tried and len(tried) < len(self._server): not_tried = [i for i in range(0, len(self._server)) if i not in tried] return random.choice(not_tried) if write or len(self._server) == 1: return 0 return random.randint(1, len(self._server) - 1) def get_client( self, write: bool = True, tried: Optional[List[int]] = None, show_index: bool = False, ): """ Method used for obtain a raw redis client. This function is used by almost all cache backend operations for obtain a native redis client/connection instance. """ index = self.get_next_client_index(write=write, tried=tried) if self._clients[index] is None: self._clients[index] = self.connect(index) if show_index: return self._clients[index], index else: return self._clients[index] def connect(self, index: int = 0) -> Redis: """ Given a connection index, returns a new raw redis client/connection instance. Index is used for replication setups and indicates that connection string should be used. In normal setups, index is 0. """ return self.connection_factory.connect(self._server[index]) def disconnect(self, index=0, client=None): """delegates the connection factory to disconnect the client""" if not client: client = self._clients[index] return self.connection_factory.disconnect(client) if client else None def set( self, key: Any, value: Any, timeout: Optional[float] = DEFAULT_TIMEOUT, version: Optional[int] = None, client: Optional[Redis] = None, nx: bool = False, xx: bool = False, ) -> bool: """ Persist a value to the cache, and set an optional expiration time. Also supports optional nx parameter. If set to True - will use redis setnx instead of set. """ nkey = self.make_key(key, version=version) nvalue = self.encode(value) if timeout is DEFAULT_TIMEOUT: timeout = self._backend.default_timeout original_client = client tried: List[int] = [] while True: try: if client is None: client, index = self.get_client( write=True, tried=tried, show_index=True ) if timeout is not None: # Convert to milliseconds timeout = int(timeout * 1000) if timeout <= 0: if nx: # Using negative timeouts when nx is True should # not expire (in our case delete) the value if it exists. # Obviously expire not existent value is noop. return not self.has_key(key, version=version, client=client) else: # redis doesn't support negative timeouts in ex flags # so it seems that it's better to just delete the key # than to set it and than expire in a pipeline return bool( self.delete(key, client=client, version=version) ) return bool(client.set(nkey, nvalue, nx=nx, px=timeout, xx=xx)) except _main_exceptions as e: if ( not original_client and not self._replica_read_only and len(tried) < len(self._server) ): tried.append(index) client = None continue raise ConnectionInterrupted(connection=client) from e def incr_version( self, key: Any, delta: int = 1, version: Optional[int] = None, client: Optional[Redis] = None, ) -> int: """ Adds delta to the cache version for the supplied key. Returns the new version. """ if client is None: client = self.get_client(write=True) if version is None: version = self._backend.version old_key = self.make_key(key, version) value = self.get(old_key, version=version, client=client) try: ttl = self.ttl(old_key, version=version, client=client) except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e if value is None: raise ValueError("Key '%s' not found" % key) if isinstance(key, CacheKey): new_key = self.make_key(key.original_key(), version=version + delta) else: new_key = self.make_key(key, version=version + delta) self.set(new_key, value, timeout=ttl, client=client) self.delete(old_key, client=client) return version + delta def add( self, key: Any, value: Any, timeout: Any = DEFAULT_TIMEOUT, version: Optional[Any] = None, client: Optional[Redis] = None, ) -> bool: """ Add a value to the cache, failing if the key already exists. Returns ``True`` if the object was added, ``False`` if not. """ return self.set(key, value, timeout, version=version, client=client, nx=True) def get( self, key: Any, default=None, version: Optional[int] = None, client: Optional[Redis] = None, ) -> Any: """ Retrieve a value from the cache. Returns decoded value if key is found, the default if not. """ if client is None: client = self.get_client(write=False) key = self.make_key(key, version=version) try: value = client.get(key) except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e if value is None: return default return self.decode(value) def persist( self, key: Any, version: Optional[int] = None, client: Optional[Redis] = None ) -> bool: if client is None: client = self.get_client(write=True) key = self.make_key(key, version=version) return client.persist(key) def expire( self, key: Any, timeout, version: Optional[int] = None, client: Optional[Redis] = None, ) -> bool: if client is None: client = self.get_client(write=True) key = self.make_key(key, version=version) return client.expire(key, timeout) def pexpire(self, key, timeout, version=None, client=None) -> bool: if client is None: client = self.get_client(write=True) key = self.make_key(key, version=version) # Temporary casting until https://github.com/redis/redis-py/issues/1664 # is fixed. return bool(client.pexpire(key, timeout)) def pexpire_at( self, key: Any, when: Union[datetime, int], version: Optional[int] = None, client: Optional[Redis] = None, ) -> bool: """ Set an expire flag on a ``key`` to ``when``, which can be represented as an integer indicating unix time or a Python datetime object. """ if client is None: client = self.get_client(write=True) key = self.make_key(key, version=version) return bool(client.pexpireat(key, when)) def expire_at( self, key: Any, when: Union[datetime, int], version: Optional[int] = None, client: Optional[Redis] = None, ) -> bool: """ Set an expire flag on a ``key`` to ``when``, which can be represented as an integer indicating unix time or a Python datetime object. """ if client is None: client = self.get_client(write=True) key = self.make_key(key, version=version) return client.expireat(key, when) def lock( self, key, version: Optional[int] = None, timeout=None, sleep=0.1, blocking_timeout=None, client: Optional[Redis] = None, thread_local=True, ): if client is None: client = self.get_client(write=True) key = self.make_key(key, version=version) return client.lock( key, timeout=timeout, sleep=sleep, blocking_timeout=blocking_timeout, thread_local=thread_local, ) def delete( self, key: Any, version: Optional[int] = None, prefix: Optional[str] = None, client: Optional[Redis] = None, ) -> int: """ Remove a key from the cache. """ if client is None: client = self.get_client(write=True) try: return client.delete(self.make_key(key, version=version, prefix=prefix)) except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e def delete_pattern( self, pattern: str, version: Optional[int] = None, prefix: Optional[str] = None, client: Optional[Redis] = None, itersize: Optional[int] = None, ) -> int: """ Remove all keys matching pattern. """ if client is None: client = self.get_client(write=True) pattern = self.make_pattern(pattern, version=version, prefix=prefix) try: count = 0 for key in client.scan_iter(match=pattern, count=itersize): client.delete(key) count += 1 return count except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e def delete_many( self, keys, version: Optional[int] = None, client: Optional[Redis] = None ): """ Remove multiple keys at once. """ if client is None: client = self.get_client(write=True) keys = [self.make_key(k, version=version) for k in keys] if not keys: return try: return client.delete(*keys) except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e def clear(self, client: Optional[Redis] = None) -> None: """ Flush all cache keys. """ if client is None: client = self.get_client(write=True) try: client.flushdb() except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e def decode(self, value: Union[bytes, int]) -> Any: """ Decode the given value. """ try: value = int(value) except (ValueError, TypeError): try: value = self._compressor.decompress(value) except CompressorError: # Handle little values, chosen to be not compressed pass value = self._serializer.loads(value) return value def encode(self, value: Any) -> Union[bytes, Any]: """ Encode the given value. """ if isinstance(value, bool) or not isinstance(value, int): value = self._serializer.dumps(value) value = self._compressor.compress(value) return value return value def get_many( self, keys, version: Optional[int] = None, client: Optional[Redis] = None ) -> OrderedDict: """ Retrieve many keys. """ if client is None: client = self.get_client(write=False) if not keys: return OrderedDict() recovered_data = OrderedDict() map_keys = OrderedDict((self.make_key(k, version=version), k) for k in keys) try: results = client.mget(*map_keys) except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e for key, value in zip(map_keys, results): if value is None: continue recovered_data[map_keys[key]] = self.decode(value) return recovered_data def set_many( self, data: Dict[Any, Any], timeout: Optional[float] = DEFAULT_TIMEOUT, version: Optional[int] = None, client: Optional[Redis] = None, ) -> None: """ Set a bunch of values in the cache at once from a dict of key/value pairs. This is much more efficient than calling set() multiple times. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. """ if client is None: client = self.get_client(write=True) try: pipeline = client.pipeline() for key, value in data.items(): self.set(key, value, timeout, version=version, client=pipeline) pipeline.execute() except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e def _incr( self, key: Any, delta: int = 1, version: Optional[int] = None, client: Optional[Redis] = None, ignore_key_check: bool = False, ) -> int: if client is None: client = self.get_client(write=True) key = self.make_key(key, version=version) try: try: # if key expired after exists check, then we get # key with wrong value and ttl -1. # use lua script for atomicity if not ignore_key_check: lua = """ local exists = redis.call('EXISTS', KEYS[1]) if (exists == 1) then return redis.call('INCRBY', KEYS[1], ARGV[1]) else return false end """ else: lua = """ return redis.call('INCRBY', KEYS[1], ARGV[1]) """ value = client.eval(lua, 1, key, delta) if value is None: raise ValueError("Key '%s' not found" % key) except ResponseError: # if cached value or total value is greater than 64 bit signed # integer. # elif int is encoded. so redis sees the data as string. # In this situations redis will throw ResponseError # try to keep TTL of key timeout = self.ttl(key, version=version, client=client) # returns -2 if the key does not exist # means, that key have expired if timeout == -2: raise ValueError("Key '%s' not found" % key) value = self.get(key, version=version, client=client) + delta self.set(key, value, version=version, timeout=timeout, client=client) except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e return value def incr( self, key: Any, delta: int = 1, version: Optional[int] = None, client: Optional[Redis] = None, ignore_key_check: bool = False, ) -> int: """ Add delta to value in the cache. If the key does not exist, raise a ValueError exception. if ignore_key_check=True then the key will be created and set to the delta value by default. """ return self._incr( key=key, delta=delta, version=version, client=client, ignore_key_check=ignore_key_check, ) def decr( self, key: Any, delta: int = 1, version: Optional[int] = None, client: Optional[Redis] = None, ) -> int: """ Decreace delta to value in the cache. If the key does not exist, raise a ValueError exception. """ return self._incr(key=key, delta=-delta, version=version, client=client) def ttl( self, key: Any, version: Optional[int] = None, client: Optional[Redis] = None ) -> Optional[int]: """ Executes TTL redis command and return the "time-to-live" of specified key. If key is a non volatile key, it returns None. """ if client is None: client = self.get_client(write=False) key = self.make_key(key, version=version) if not client.exists(key): return 0 t = client.ttl(key) if t >= 0: return t elif t == -1: return None elif t == -2: return 0 else: # Should never reach here return None def pttl(self, key, version=None, client=None): """ Executes PTTL redis command and return the "time-to-live" of specified key. If key is a non volatile key, it returns None. """ if client is None: client = self.get_client(write=False) key = self.make_key(key, version=version) if not client.exists(key): return 0 t = client.pttl(key) if t >= 0: return t elif t == -1: return None elif t == -2: return 0 else: # Should never reach here return None def has_key( self, key: Any, version: Optional[int] = None, client: Optional[Redis] = None ) -> bool: """ Test if key exists. """ if client is None: client = self.get_client(write=False) key = self.make_key(key, version=version) try: return client.exists(key) == 1 except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e def iter_keys( self, search: str, itersize: Optional[int] = None, client: Optional[Redis] = None, version: Optional[int] = None, ) -> Iterator[str]: """ Same as keys, but uses redis >= 2.8 cursors for make memory efficient keys iteration. """ if client is None: client = self.get_client(write=False) pattern = self.make_pattern(search, version=version) for item in client.scan_iter(match=pattern, count=itersize): yield self.reverse_key(item.decode()) def keys( self, search: str, version: Optional[int] = None, client: Optional[Redis] = None ) -> List[Any]: """ Execute KEYS command and return matched results. Warning: this can return huge number of results, in this case, it strongly recommended use iter_keys for it. """ if client is None: client = self.get_client(write=False) pattern = self.make_pattern(search, version=version) try: return [self.reverse_key(k.decode()) for k in client.keys(pattern)] except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e def make_key( self, key: Any, version: Optional[Any] = None, prefix: Optional[str] = None ) -> CacheKey: if isinstance(key, CacheKey): return key if prefix is None: prefix = self._backend.key_prefix if version is None: version = self._backend.version return CacheKey(self._backend.key_func(key, prefix, version)) def make_pattern( self, pattern: str, version: Optional[int] = None, prefix: Optional[str] = None ) -> CacheKey: if isinstance(pattern, CacheKey): return pattern if prefix is None: prefix = self._backend.key_prefix prefix = glob_escape(prefix) if version is None: version = self._backend.version version_str = glob_escape(str(version)) return CacheKey(self._backend.key_func(pattern, prefix, version_str)) def close(self, **kwargs): close_flag = self._options.get( "CLOSE_CONNECTION", getattr(settings, "DJANGO_REDIS_CLOSE_CONNECTION", False), ) if close_flag: self.do_close_clients() def do_close_clients(self): """default implementation: Override in custom client""" num_clients = len(self._clients) for idx in range(num_clients): self.disconnect(index=idx) self._clients = [None] * num_clients def touch( self, key: Any, timeout: Optional[float] = DEFAULT_TIMEOUT, version: Optional[int] = None, client: Optional[Redis] = None, ) -> bool: """ Sets a new expiration for a key. """ if timeout is DEFAULT_TIMEOUT: timeout = self._backend.default_timeout if client is None: client = self.get_client(write=True) key = self.make_key(key, version=version) if timeout is None: return bool(client.persist(key)) else: # Convert to milliseconds timeout = int(timeout * 1000) return bool(client.pexpire(key, timeout)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/client/herd.py0000644000175100001710000001104400000000000022066 0ustar00runnerdocker00000000000000import random import socket import time from collections import OrderedDict from django.conf import settings from redis.exceptions import ConnectionError, ResponseError, TimeoutError from ..exceptions import ConnectionInterrupted from .default import DEFAULT_TIMEOUT, DefaultClient _main_exceptions = (ConnectionError, ResponseError, TimeoutError, socket.timeout) class Marker: """ Dummy class for use as marker for herded keys. """ pass CACHE_HERD_TIMEOUT = getattr(settings, "CACHE_HERD_TIMEOUT", 60) def _is_expired(x): if x >= CACHE_HERD_TIMEOUT: return True val = x + random.randint(1, CACHE_HERD_TIMEOUT) if val >= CACHE_HERD_TIMEOUT: return True return False class HerdClient(DefaultClient): def __init__(self, *args, **kwargs): self._marker = Marker() super().__init__(*args, **kwargs) def _pack(self, value, timeout): herd_timeout = (timeout or self._backend.default_timeout) + int(time.time()) return self._marker, value, herd_timeout def _unpack(self, value): try: marker, unpacked, herd_timeout = value except (ValueError, TypeError): return value, False if not isinstance(marker, Marker): return value, False now = int(time.time()) if herd_timeout < now: x = now - herd_timeout return unpacked, _is_expired(x) return unpacked, False def set( self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None, nx=False, xx=False, ): if timeout is DEFAULT_TIMEOUT: timeout = self._backend.default_timeout if timeout is None or timeout <= 0: return super().set( key, value, timeout=timeout, version=version, client=client, nx=nx, xx=xx, ) packed = self._pack(value, timeout) real_timeout = timeout + CACHE_HERD_TIMEOUT return super().set( key, packed, timeout=real_timeout, version=version, client=client, nx=nx ) def get(self, key, default=None, version=None, client=None): packed = super().get(key, default=default, version=version, client=client) val, refresh = self._unpack(packed) if refresh: return default return val def get_many(self, keys, version=None, client=None): if client is None: client = self.get_client(write=False) if not keys: return {} recovered_data = OrderedDict() new_keys = [self.make_key(key, version=version) for key in keys] map_keys = dict(zip(new_keys, keys)) try: results = client.mget(*new_keys) except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e for key, value in zip(new_keys, results): if value is None: continue val, refresh = self._unpack(self.decode(value)) recovered_data[map_keys[key]] = None if refresh else val return recovered_data def set_many( self, data, timeout=DEFAULT_TIMEOUT, version=None, client=None, herd=True ): """ Set a bunch of values in the cache at once from a dict of key/value pairs. This is much more efficient than calling set() multiple times. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. """ if client is None: client = self.get_client(write=True) set_function = self.set if herd else super().set try: pipeline = client.pipeline() for key, value in data.items(): set_function(key, value, timeout, version=version, client=pipeline) pipeline.execute() except _main_exceptions as e: raise ConnectionInterrupted(connection=client) from e def incr(self, *args, **kwargs): raise NotImplementedError() def decr(self, *args, **kwargs): raise NotImplementedError() def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None, client=None): if client is None: client = self.get_client(write=True) value = self.get(key, version=version, client=client) if value is None: return False self.set(key, value, timeout=timeout, version=version, client=client) return True ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/client/sentinel.py0000644000175100001710000000300700000000000022765 0ustar00runnerdocker00000000000000from urllib.parse import parse_qs, urlencode, urlparse, urlunparse from django.core.exceptions import ImproperlyConfigured from redis.sentinel import SentinelConnectionPool from .default import DefaultClient def replace_query(url, query): return urlunparse((*url[:4], urlencode(query, doseq=True), url[5])) class SentinelClient(DefaultClient): """ Sentinel client which uses the single redis URL specified by the CACHE's LOCATION to create a LOCATION configuration for two connection pools; One pool for the primaries and another pool for the replicas, and upon connecting ensures the connection pool factory is configured correctly. """ def __init__(self, server, params, backend): if isinstance(server, str): url = urlparse(server) primary_query = parse_qs(url.query, keep_blank_values=True) replica_query = dict(primary_query) primary_query["is_master"] = [1] replica_query["is_master"] = [0] server = [replace_query(url, i) for i in (primary_query, replica_query)] super().__init__(server, params, backend) def connect(self, *args, **kwargs): connection = super().connect(*args, **kwargs) if not isinstance(connection.connection_pool, SentinelConnectionPool): raise ImproperlyConfigured( "Settings DJANGO_REDIS_CONNECTION_FACTORY or " "CACHE[].OPTIONS.CONNECTION_POOL_CLASS is not configured correctly." ) return connection ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/client/sharded.py0000644000175100001710000002536700000000000022573 0ustar00runnerdocker00000000000000import re from collections import OrderedDict from datetime import datetime from typing import Union from redis.exceptions import ConnectionError from ..exceptions import ConnectionInterrupted from ..hash_ring import HashRing from ..util import CacheKey from .default import DEFAULT_TIMEOUT, DefaultClient class ShardClient(DefaultClient): _findhash = re.compile(r".*\{(.*)\}.*", re.I) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if not isinstance(self._server, (list, tuple)): self._server = [self._server] self._ring = HashRing(self._server) self._serverdict = self.connect() def get_client(self, *args, **kwargs): raise NotImplementedError def connect(self, index=0): connection_dict = {} for name in self._server: connection_dict[name] = self.connection_factory.connect(name) return connection_dict def get_server_name(self, _key): key = str(_key) g = self._findhash.match(key) if g is not None and len(g.groups()) > 0: key = g.groups()[0] name = self._ring.get_node(key) return name def get_server(self, key): name = self.get_server_name(key) return self._serverdict[name] def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().add( key=key, value=value, version=version, client=client, timeout=timeout ) def get(self, key, default=None, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().get(key=key, default=default, version=version, client=client) def get_many(self, keys, version=None): if not keys: return {} recovered_data = OrderedDict() new_keys = [self.make_key(key, version=version) for key in keys] map_keys = dict(zip(new_keys, keys)) for key in new_keys: client = self.get_server(key) value = self.get(key=key, version=version, client=client) if value is None: continue recovered_data[map_keys[key]] = value return recovered_data def set( self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None, nx=False ): """ Persist a value to the cache, and set an optional expiration time. """ if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().set( key=key, value=value, timeout=timeout, version=version, client=client, nx=nx ) def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): """ Set a bunch of values in the cache at once from a dict of key/value pairs. This is much more efficient than calling set() multiple times. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. """ for key, value in data.items(): self.set(key, value, timeout, version=version) def has_key(self, key, version=None, client=None): """ Test if key exists. """ if client is None: key = self.make_key(key, version=version) client = self.get_server(key) key = self.make_key(key, version=version) try: return client.exists(key) == 1 except ConnectionError as e: raise ConnectionInterrupted(connection=client) from e def delete(self, key, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().delete(key=key, version=version, client=client) def ttl(self, key, version=None, client=None): """ Executes TTL redis command and return the "time-to-live" of specified key. If key is a non volatile key, it returns None. """ if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().ttl(key=key, version=version, client=client) def pttl(self, key, version=None, client=None): """ Executes PTTL redis command and return the "time-to-live" of specified key in milliseconds. If key is a non volatile key, it returns None. """ if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().pttl(key=key, version=version, client=client) def persist(self, key, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().persist(key=key, version=version, client=client) def expire(self, key, timeout, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().expire(key=key, timeout=timeout, version=version, client=client) def pexpire(self, key, timeout, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().pexpire(key=key, timeout=timeout, version=version, client=client) def pexpire_at(self, key, when: Union[datetime, int], version=None, client=None): """ Set an expire flag on a ``key`` to ``when`` on a shard client. ``when`` which can be represented as an integer indicating unix time or a Python datetime object. """ if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().pexpire_at(key=key, when=when, version=version, client=client) def expire_at(self, key, when: Union[datetime, int], version=None, client=None): """ Set an expire flag on a ``key`` to ``when`` on a shard client. ``when`` which can be represented as an integer indicating unix time or a Python datetime object. """ if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().expire_at(key=key, when=when, version=version, client=client) def lock( self, key, version=None, timeout=None, sleep=0.1, blocking_timeout=None, client=None, thread_local=True, ): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) key = self.make_key(key, version=version) return super().lock( key, timeout=timeout, sleep=sleep, client=client, blocking_timeout=blocking_timeout, thread_local=thread_local, ) def delete_many(self, keys, version=None): """ Remove multiple keys at once. """ res = 0 for key in [self.make_key(k, version=version) for k in keys]: client = self.get_server(key) res += self.delete(key, client=client) return res def incr_version(self, key, delta=1, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) if version is None: version = self._backend.version old_key = self.make_key(key, version) value = self.get(old_key, version=version, client=client) try: ttl = self.ttl(old_key, version=version, client=client) except ConnectionError as e: raise ConnectionInterrupted(connection=client) from e if value is None: raise ValueError("Key '%s' not found" % key) if isinstance(key, CacheKey): new_key = self.make_key(key.original_key(), version=version + delta) else: new_key = self.make_key(key, version=version + delta) self.set(new_key, value, timeout=ttl, client=self.get_server(new_key)) self.delete(old_key, client=client) return version + delta def incr(self, key, delta=1, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().incr(key=key, delta=delta, version=version, client=client) def decr(self, key, delta=1, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().decr(key=key, delta=delta, version=version, client=client) def iter_keys(self, key, version=None): raise NotImplementedError("iter_keys not supported on sharded client") def keys(self, search, version=None): pattern = self.make_pattern(search, version=version) keys = [] try: for server, connection in self._serverdict.items(): keys.extend(connection.keys(pattern)) except ConnectionError as e: # FIXME: technically all clients should be passed as `connection`. client = self.get_server(pattern) raise ConnectionInterrupted(connection=client) from e return [self.reverse_key(k.decode()) for k in keys] def delete_pattern( self, pattern, version=None, client=None, itersize=None, prefix=None ): """ Remove all keys matching pattern. """ pattern = self.make_pattern(pattern, version=version, prefix=prefix) kwargs = {"match": pattern} if itersize: kwargs["count"] = itersize keys = [] for server, connection in self._serverdict.items(): keys.extend(key for key in connection.scan_iter(**kwargs)) res = 0 if keys: for server, connection in self._serverdict.items(): res += connection.delete(*keys) return res def do_close_clients(self): for client in self._serverdict.values(): self.disconnect(client=client) def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super().touch(key=key, timeout=timeout, version=version, client=client) def clear(self, client=None): for connection in self._serverdict.values(): connection.flushdb() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1640185721.0984051 django-redis-5.2.0/django_redis/compressors/0000755000175100001710000000000000000000000021673 5ustar00runnerdocker00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/compressors/__init__.py0000644000175100001710000000000000000000000023772 0ustar00runnerdocker00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/compressors/base.py0000644000175100001710000000037500000000000023164 0ustar00runnerdocker00000000000000class BaseCompressor: def __init__(self, options): self._options = options def compress(self, value: bytes) -> bytes: raise NotImplementedError def decompress(self, value: bytes) -> bytes: raise NotImplementedError ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/compressors/identity.py0000644000175100001710000000033000000000000024072 0ustar00runnerdocker00000000000000from .base import BaseCompressor class IdentityCompressor(BaseCompressor): def compress(self, value: bytes) -> bytes: return value def decompress(self, value: bytes) -> bytes: return value ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/compressors/lz4.py0000644000175100001710000000103500000000000022755 0ustar00runnerdocker00000000000000from lz4.frame import compress as _compress from lz4.frame import decompress as _decompress from ..exceptions import CompressorError from .base import BaseCompressor class Lz4Compressor(BaseCompressor): min_length = 15 def compress(self, value: bytes) -> bytes: if len(value) > self.min_length: return _compress(value) return value def decompress(self, value: bytes) -> bytes: try: return _decompress(value) except Exception as e: raise CompressorError(e) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/compressors/lzma.py0000644000175100001710000000077700000000000023223 0ustar00runnerdocker00000000000000import lzma from ..exceptions import CompressorError from .base import BaseCompressor class LzmaCompressor(BaseCompressor): min_length = 100 preset = 4 def compress(self, value: bytes) -> bytes: if len(value) > self.min_length: return lzma.compress(value, preset=self.preset) return value def decompress(self, value: bytes) -> bytes: try: return lzma.decompress(value) except lzma.LZMAError as e: raise CompressorError(e) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/compressors/zlib.py0000644000175100001710000000076300000000000023213 0ustar00runnerdocker00000000000000import zlib from ..exceptions import CompressorError from .base import BaseCompressor class ZlibCompressor(BaseCompressor): min_length = 15 preset = 6 def compress(self, value: bytes) -> bytes: if len(value) > self.min_length: return zlib.compress(value, self.preset) return value def decompress(self, value: bytes) -> bytes: try: return zlib.decompress(value) except zlib.error as e: raise CompressorError(e) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/compressors/zstd.py0000644000175100001710000000074300000000000023235 0ustar00runnerdocker00000000000000import pyzstd from ..exceptions import CompressorError from .base import BaseCompressor class ZStdCompressor(BaseCompressor): min_length = 15 def compress(self, value: bytes) -> bytes: if len(value) > self.min_length: return pyzstd.compress(value) return value def decompress(self, value: bytes) -> bytes: try: return pyzstd.decompress(value) except pyzstd.ZstdError as e: raise CompressorError(e) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/exceptions.py0000644000175100001710000000052700000000000022053 0ustar00runnerdocker00000000000000class ConnectionInterrupted(Exception): def __init__(self, connection, parent=None): self.connection = connection def __str__(self) -> str: error_type = type(self.__cause__).__name__ error_msg = str(self.__cause__) return f"Redis {error_type}: {error_msg}" class CompressorError(Exception): pass ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/hash_ring.py0000644000175100001710000000343100000000000021631 0ustar00runnerdocker00000000000000import bisect import hashlib from typing import Dict, Iterable, Iterator, List, Optional, Tuple class HashRing: nodes: List[str] = [] def __init__(self, nodes: Iterable[str] = (), replicas: int = 128) -> None: self.replicas: int = replicas self.ring: Dict[str, str] = {} self.sorted_keys: List[str] = [] for node in nodes: self.add_node(node) def add_node(self, node: str) -> None: self.nodes.append(node) for x in range(self.replicas): _key = f"{node}:{x}" _hash = hashlib.sha256(_key.encode()).hexdigest() self.ring[_hash] = node self.sorted_keys.append(_hash) self.sorted_keys.sort() def remove_node(self, node: str) -> None: self.nodes.remove(node) for x in range(self.replicas): _hash = hashlib.sha256(f"{node}:{x}".encode()).hexdigest() del self.ring[_hash] self.sorted_keys.remove(_hash) def get_node(self, key: str) -> Optional[str]: n, i = self.get_node_pos(key) return n def get_node_pos(self, key: str) -> Tuple[Optional[str], Optional[int]]: if len(self.ring) == 0: return None, None _hash = hashlib.sha256(key.encode()).hexdigest() idx = bisect.bisect(self.sorted_keys, _hash) idx = min(idx - 1, (self.replicas * len(self.nodes)) - 1) return self.ring[self.sorted_keys[idx]], idx def iter_nodes(self, key: str) -> Iterator[Tuple[Optional[str], Optional[str]]]: if len(self.ring) == 0: yield None, None node, pos = self.get_node_pos(key) for k in self.sorted_keys[pos:]: yield k, self.ring[k] def __call__(self, key: str) -> Optional[str]: return self.get_node(key) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/pool.py0000644000175100001710000001436700000000000020652 0ustar00runnerdocker00000000000000from typing import Dict from urllib.parse import parse_qs, urlparse from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.utils.module_loading import import_string from redis import Redis from redis.connection import DefaultParser, to_bool from redis.sentinel import Sentinel class ConnectionFactory: # Store connection pool by cache backend options. # # _pools is a process-global, as otherwise _pools is cleared every time # ConnectionFactory is instantiated, as Django creates new cache client # (DefaultClient) instance for every request. _pools: Dict[str, Redis] = {} def __init__(self, options): pool_cls_path = options.get( "CONNECTION_POOL_CLASS", "redis.connection.ConnectionPool" ) self.pool_cls = import_string(pool_cls_path) self.pool_cls_kwargs = options.get("CONNECTION_POOL_KWARGS", {}) redis_client_cls_path = options.get("REDIS_CLIENT_CLASS", "redis.client.Redis") self.redis_client_cls = import_string(redis_client_cls_path) self.redis_client_cls_kwargs = options.get("REDIS_CLIENT_KWARGS", {}) self.options = options def make_connection_params(self, url): """ Given a main connection parameters, build a complete dict of connection parameters. """ kwargs = { "url": url, "parser_class": self.get_parser_cls(), } password = self.options.get("PASSWORD", None) if password: kwargs["password"] = password socket_timeout = self.options.get("SOCKET_TIMEOUT", None) if socket_timeout: assert isinstance( socket_timeout, (int, float) ), "Socket timeout should be float or integer" kwargs["socket_timeout"] = socket_timeout socket_connect_timeout = self.options.get("SOCKET_CONNECT_TIMEOUT", None) if socket_connect_timeout: assert isinstance( socket_connect_timeout, (int, float) ), "Socket connect timeout should be float or integer" kwargs["socket_connect_timeout"] = socket_connect_timeout return kwargs def connect(self, url: str) -> Redis: """ Given a basic connection parameters, return a new connection. """ params = self.make_connection_params(url) connection = self.get_connection(params) return connection def disconnect(self, connection): """ Given a not null client connection it disconnect from the Redis server. The default implementation uses a pool to hold connections. """ connection.connection_pool.disconnect() def get_connection(self, params): """ Given a now preformatted params, return a new connection. The default implementation uses a cached pools for create new connection. """ pool = self.get_or_create_connection_pool(params) return self.redis_client_cls( connection_pool=pool, **self.redis_client_cls_kwargs ) def get_parser_cls(self): cls = self.options.get("PARSER_CLASS", None) if cls is None: return DefaultParser return import_string(cls) def get_or_create_connection_pool(self, params): """ Given a connection parameters and return a new or cached connection pool for them. Reimplement this method if you want distinct connection pool instance caching behavior. """ key = params["url"] if key not in self._pools: self._pools[key] = self.get_connection_pool(params) return self._pools[key] def get_connection_pool(self, params): """ Given a connection parameters, return a new connection pool for them. Overwrite this method if you want a custom behavior on creating connection pool. """ cp_params = dict(params) cp_params.update(self.pool_cls_kwargs) pool = self.pool_cls.from_url(**cp_params) if pool.connection_kwargs.get("password", None) is None: pool.connection_kwargs["password"] = params.get("password", None) pool.reset() return pool class SentinelConnectionFactory(ConnectionFactory): def __init__(self, options): # allow overriding the default SentinelConnectionPool class options.setdefault( "CONNECTION_POOL_CLASS", "redis.sentinel.SentinelConnectionPool" ) super().__init__(options) sentinels = options.get("SENTINELS") if not sentinels: raise ImproperlyConfigured( "SENTINELS must be provided as a list of (host, port)." ) # provide the connection pool kwargs to the sentinel in case it # needs to use the socket options for the sentinels themselves connection_kwargs = self.make_connection_params(None) connection_kwargs.pop("url") connection_kwargs.update(self.pool_cls_kwargs) self._sentinel = Sentinel( sentinels, sentinel_kwargs=options.get("SENTINEL_KWARGS"), **connection_kwargs, ) def get_connection_pool(self, params): """ Given a connection parameters, return a new sentinel connection pool for them. """ url = urlparse(params["url"]) # explicitly set service_name and sentinel_manager for the # SentinelConnectionPool constructor since will be called by from_url cp_params = dict(params) cp_params.update(service_name=url.hostname, sentinel_manager=self._sentinel) pool = super().get_connection_pool(cp_params) # convert "is_master" to a boolean if set on the URL, otherwise if not # provided it defaults to True. is_master = parse_qs(url.query).get("is_master") if is_master: pool.is_master = to_bool(is_master[0]) return pool def get_connection_factory(path=None, options=None): if path is None: path = getattr( settings, "DJANGO_REDIS_CONNECTION_FACTORY", "django_redis.pool.ConnectionFactory", ) cls = import_string(path) return cls(options or {}) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1640185721.0984051 django-redis-5.2.0/django_redis/serializers/0000755000175100001710000000000000000000000021650 5ustar00runnerdocker00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/serializers/__init__.py0000644000175100001710000000000000000000000023747 0ustar00runnerdocker00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/serializers/base.py0000644000175100001710000000036700000000000023142 0ustar00runnerdocker00000000000000from typing import Any class BaseSerializer: def __init__(self, options): pass def dumps(self, value: Any) -> bytes: raise NotImplementedError def loads(self, value: bytes) -> Any: raise NotImplementedError ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/serializers/json.py0000644000175100001710000000062100000000000023172 0ustar00runnerdocker00000000000000import json from typing import Any from django.core.serializers.json import DjangoJSONEncoder from .base import BaseSerializer class JSONSerializer(BaseSerializer): encoder_class = DjangoJSONEncoder def dumps(self, value: Any) -> bytes: return json.dumps(value, cls=self.encoder_class).encode() def loads(self, value: bytes) -> Any: return json.loads(value.decode()) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/serializers/msgpack.py0000644000175100001710000000043400000000000023650 0ustar00runnerdocker00000000000000from typing import Any import msgpack from .base import BaseSerializer class MSGPackSerializer(BaseSerializer): def dumps(self, value: Any) -> bytes: return msgpack.dumps(value) def loads(self, value: bytes) -> Any: return msgpack.loads(value, raw=False) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/serializers/pickle.py0000644000175100001710000000214000000000000023466 0ustar00runnerdocker00000000000000import pickle from typing import Any from django.core.exceptions import ImproperlyConfigured from .base import BaseSerializer class PickleSerializer(BaseSerializer): def __init__(self, options) -> None: self._pickle_version = pickle.DEFAULT_PROTOCOL self.setup_pickle_version(options) super().__init__(options=options) def setup_pickle_version(self, options) -> None: if "PICKLE_VERSION" in options: try: self._pickle_version = int(options["PICKLE_VERSION"]) if self._pickle_version > pickle.HIGHEST_PROTOCOL: raise ImproperlyConfigured( f"PICKLE_VERSION can't be higher than pickle.HIGHEST_PROTOCOL:" f" {pickle.HIGHEST_PROTOCOL}" ) except (ValueError, TypeError): raise ImproperlyConfigured("PICKLE_VERSION value must be an integer") def dumps(self, value: Any) -> bytes: return pickle.dumps(value, self._pickle_version) def loads(self, value: bytes) -> Any: return pickle.loads(value) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/django_redis/util.py0000644000175100001710000000041200000000000020640 0ustar00runnerdocker00000000000000class CacheKey(str): """ A stub string class that we can use to check if a key was created already. """ def original_key(self) -> str: return self.rsplit(":", 1)[1] def default_reverse_key(key: str) -> str: return key.split(":", 2)[2] ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1640185721.0984051 django-redis-5.2.0/django_redis.egg-info/0000755000175100001710000000000000000000000021006 5ustar00runnerdocker00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185721.0 django-redis-5.2.0/django_redis.egg-info/PKG-INFO0000644000175100001710000007407100000000000022114 0ustar00runnerdocker00000000000000Metadata-Version: 2.1 Name: django-redis Version: 5.2.0 Summary: Full featured redis cache backend for Django. Home-page: https://github.com/jazzband/django-redis Author: Andrei Antoukh Author-email: niwi@niwi.nz License: BSD-3-Clause Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: Web Environment Classifier: Framework :: Django Classifier: Framework :: Django :: 2.2 Classifier: Framework :: Django :: 3.1 Classifier: Framework :: Django :: 3.2 Classifier: Framework :: Django :: 4.0 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: BSD License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: Utilities Requires-Python: >=3.6 Description-Content-Type: text/x-rst Provides-Extra: hiredis License-File: LICENSE License-File: AUTHORS.rst ============================== Redis cache backend for Django ============================== .. image:: https://jazzband.co/static/img/badge.svg :target: https://jazzband.co/ :alt: Jazzband .. image:: https://github.com/jazzband/django-redis/actions/workflows/ci.yml/badge.svg :target: https://github.com/jazzband/django-redis/actions/workflows/ci.yml :alt: GitHub Actions .. image:: https://codecov.io/gh/jazzband/django-redis/branch/master/graph/badge.svg :target: https://codecov.io/gh/jazzband/django-redis :alt: Coverage .. image:: https://img.shields.io/pypi/v/django-redis.svg?style=flat :target: https://pypi.org/project/django-redis/ This is a `Jazzband `_ project. By contributing you agree to abide by the `Contributor Code of Conduct `_ and follow the `guidelines `_. Introduction ------------ django-redis is a BSD licensed, full featured Redis cache and session backend for Django. Why use django-redis? ~~~~~~~~~~~~~~~~~~~~~ - Uses native redis-py url notation connection strings - Pluggable clients - Pluggable parsers - Pluggable serializers - Primary/secondary support in the default client - Comprehensive test suite - Used in production in several projects as cache and session storage - Supports infinite timeouts - Facilities for raw access to Redis client/connection pool - Highly configurable (can emulate memcached exception behavior, for example) - Unix sockets supported by default Requirements ~~~~~~~~~~~~ - `Python`_ 3.6+ - `Django`_ 2.2+ - `redis-py`_ 3.0+ - `Redis server`_ 2.8+ .. _Python: https://www.python.org/downloads/ .. _Django: https://www.djangoproject.com/download/ .. _redis-py: https://pypi.org/project/redis/ .. _Redis server: https://redis.io/download User guide ---------- Installation ~~~~~~~~~~~~ Install with pip: .. code-block:: console $ python -m pip install django-redis Configure as cache backend ~~~~~~~~~~~~~~~~~~~~~~~~~~ To start using django-redis, you should change your Django cache settings to something like: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } } } django-redis uses the redis-py native URL notation for connection strings, it allows better interoperability and has a connection string in more "standard" way. Some examples: - ``redis://[[username]:[password]]@localhost:6379/0`` - ``rediss://[[username]:[password]]@localhost:6379/0`` - ``unix://[[username]:[password]]@/path/to/socket.sock?db=0`` Three URL schemes are supported: - ``redis://``: creates a normal TCP socket connection - ``rediss://``: creates a SSL wrapped TCP socket connection - ``unix://`` creates a Unix Domain Socket connection There are several ways to specify a database number: - A ``db`` querystring option, e.g. ``redis://localhost?db=0`` - If using the ``redis://`` scheme, the path argument of the URL, e.g. ``redis://localhost/0`` When using `Redis' ACLs `_, you will need to add the username to the URL (and provide the password with the Cache ``OPTIONS``). The login for the user ``django`` would look like this: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://django@localhost:6379/0", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "PASSWORD": "mysecret" } } } An alternative would be write both username and password into the URL: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://django:mysecret@localhost:6379/0", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } } } In some circumstances the password you should use to connect Redis is not URL-safe, in this case you can escape it or just use the convenience option in ``OPTIONS`` dict: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "PASSWORD": "mysecret" } } } Take care, that this option does not overwrites the password in the uri, so if you have set the password in the uri, this settings will be ignored. Configure as session backend ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Django can by default use any cache backend as session backend and you benefit from that by using django-redis as backend for session storage without installing any additional backends: .. code-block:: python SESSION_ENGINE = "django.contrib.sessions.backends.cache" SESSION_CACHE_ALIAS = "default" Testing with django-redis ~~~~~~~~~~~~~~~~~~~~~~~~~ django-redis supports customizing the underlying Redis client (see "Pluggable clients"). This can be used for testing purposes. In case you want to flush all data from the cache after a test, add the following lines to your test class: .. code-block:: python from django_redis import get_redis_connection def tearDown(self): get_redis_connection("default").flushall() Advanced usage -------------- Pickle version ~~~~~~~~~~~~~~ For almost all values, django-redis uses pickle to serialize objects. The ``pickle.DEFAULT_PROTOCOL`` version of pickle is used by default to ensure safe upgrades and compatibility across Python versions. If you want set a concrete version, you can do it, using ``PICKLE_VERSION`` option: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "PICKLE_VERSION": -1 # Will use highest protocol version available } } } Socket timeout ~~~~~~~~~~~~~~ Socket timeout can be set using ``SOCKET_TIMEOUT`` and ``SOCKET_CONNECT_TIMEOUT`` options: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "SOCKET_CONNECT_TIMEOUT": 5, # seconds "SOCKET_TIMEOUT": 5, # seconds } } } ``SOCKET_CONNECT_TIMEOUT`` is the timeout for the connection to be established and ``SOCKET_TIMEOUT`` is the timeout for read and write operations after the connection is established. Compression support ~~~~~~~~~~~~~~~~~~~ django-redis comes with compression support out of the box, but is deactivated by default. You can activate it setting up a concrete backend: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", } } } Let see an example, of how make it work with *lzma* compression format: .. code-block:: python import lzma CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.lzma.LzmaCompressor", } } } *Lz4* compression support (requires the lz4 library): .. code-block:: python import lz4 CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", } } } *Zstandard (zstd)* compression support (requires the pyzstd library): .. code-block:: python import pyzstd CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.zstd.ZStdCompressor", } } } Memcached exceptions behavior ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In some situations, when Redis is only used for cache, you do not want exceptions when Redis is down. This is default behavior in the memcached backend and it can be emulated in django-redis. For setup memcached like behaviour (ignore connection exceptions), you should set ``IGNORE_EXCEPTIONS`` settings on your cache configuration: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "IGNORE_EXCEPTIONS": True, } } } Also, you can apply the same settings to all configured caches, you can set the global flag in your settings: .. code-block:: python DJANGO_REDIS_IGNORE_EXCEPTIONS = True Log Ignored Exceptions ~~~~~~~~~~~~~~~~~~~~~~ When ignoring exceptions with ``IGNORE_EXCEPTIONS`` or ``DJANGO_REDIS_IGNORE_EXCEPTIONS``, you may optionally log exceptions using the global variable ``DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS`` in your settings file:: DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS = True If you wish to specify the logger in which the exceptions are output, simply set the global variable ``DJANGO_REDIS_LOGGER`` to the string name and/or path of the desired logger. This will default to ``__name__`` if no logger is specified and ``DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS`` is ``True``:: DJANGO_REDIS_LOGGER = 'some.specified.logger' Infinite timeout ~~~~~~~~~~~~~~~~ django-redis comes with infinite timeouts support out of the box. And it behaves in same way as django backend contract specifies: - ``timeout=0`` expires the value immediately. - ``timeout=None`` infinite timeout .. code-block:: python cache.set("key", "value", timeout=None) Get ttl (time-to-live) from key ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ With Redis, you can access to ttl of any stored key, for it, django-redis exposes ``ttl`` function. It returns: - 0 if key does not exists (or already expired). - None for keys that exists but does not have any expiration. - ttl value for any volatile key (any key that has expiration). .. code-block:: pycon >>> from django.core.cache import cache >>> cache.set("foo", "value", timeout=25) >>> cache.ttl("foo") 25 >>> cache.ttl("not-existent") 0 With Redis, you can access to ttl of any stored key in milliseconds, for it, django-redis exposes ``pttl`` function. .. code-block:: pycon >>> from django.core.cache import cache >>> cache.set("foo", "value", timeout=25) >>> cache.pttl("foo") 25000 >>> cache.pttl("not-existent") 0 Expire & Persist ~~~~~~~~~~~~~~~~ Additionally to the simple ttl query, you can send persist a concrete key or specify a new expiration timeout using the ``persist`` and ``expire`` methods: .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.ttl("foo") 22 >>> cache.persist("foo") True >>> cache.ttl("foo") None .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.expire("foo", timeout=5) True >>> cache.ttl("foo") 5 The ``expire_at`` method can be used to make the key expire at a specific moment in time. .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.expire_at("foo", datetime.now() + timedelta(hours=1)) True >>> cache.ttl("foo") 3600 The ``pexpire_at`` method can be used to make the key expire at a specific moment in time with milliseconds precision: .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.pexpire_at("foo", datetime.now() + timedelta(milliseconds=900, hours=1)) True >>> cache.ttl("foo") 3601 >>> cache.pttl("foo") 3600900 The ``pexpire`` method can be used to provide millisecond precision: .. code-block:: pycon >>> cache.set("foo", "bar", timeout=22) >>> cache.pexpire("foo", timeout=5500) True >>> cache.pttl("foo") 5500 Locks ~~~~~ It also supports the Redis ability to create Redis distributed named locks. The Lock interface is identical to the ``threading.Lock`` so you can use it as replacement. .. code-block:: python with cache.lock("somekey"): do_some_thing() Scan & Delete keys in bulk ~~~~~~~~~~~~~~~~~~~~~~~~~~ django-redis comes with some additional methods that help with searching or deleting keys using glob patterns. .. code-block:: pycon >>> from django.core.cache import cache >>> cache.keys("foo_*") ["foo_1", "foo_2"] A simple search like this will return all matched values. In databases with a large number of keys this isn't suitable method. Instead, you can use the ``iter_keys`` function that works like the ``keys`` function but uses Redis server side cursors. Calling ``iter_keys`` will return a generator that you can then iterate over efficiently. .. code-block:: pycon >>> from django.core.cache import cache >>> cache.iter_keys("foo_*") >>> next(cache.iter_keys("foo_*")) "foo_1" For deleting keys, you should use ``delete_pattern`` which has the same glob pattern syntax as the ``keys`` function and returns the number of deleted keys. .. code-block:: pycon >>> from django.core.cache import cache >>> cache.delete_pattern("foo_*") Redis native commands ~~~~~~~~~~~~~~~~~~~~~ django-redis has limited support for some Redis atomic operations, such as the commands ``SETNX`` and ``INCR``. You can use the ``SETNX`` command through the backend ``set()`` method with the ``nx`` parameter: .. code-block:: pycon >>> from django.core.cache import cache >>> cache.set("key", "value1", nx=True) True >>> cache.set("key", "value2", nx=True) False >>> cache.get("key") "value1" Also, the ``incr`` and ``decr`` methods use Redis atomic operations when the value that a key contains is suitable for it. Raw client access ~~~~~~~~~~~~~~~~~ In some situations your application requires access to a raw Redis client to use some advanced features that aren't exposed by the Django cache interface. To avoid storing another setting for creating a raw connection, django-redis exposes functions with which you can obtain a raw client reusing the cache connection string: ``get_redis_connection(alias)``. .. code-block:: pycon >>> from django_redis import get_redis_connection >>> con = get_redis_connection("default") >>> con WARNING: Not all pluggable clients support this feature. Connection pools ~~~~~~~~~~~~~~~~ Behind the scenes, django-redis uses the underlying redis-py connection pool implementation, and exposes a simple way to configure it. Alternatively, you can directly customize a connection/connection pool creation for a backend. The default redis-py behavior is to not close connections, recycling them when possible. Configure default connection pool ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The default connection pool is simple. For example, you can customize the maximum number of connections in the pool by setting ``CONNECTION_POOL_KWARGS`` in the ``CACHES`` setting: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", # ... "OPTIONS": { "CONNECTION_POOL_KWARGS": {"max_connections": 100} } } } You can verify how many connections the pool has opened with the following snippet: .. code-block:: python from django_redis import get_redis_connection r = get_redis_connection("default") # Use the name you have defined for Redis in settings.CACHES connection_pool = r.connection_pool print("Created connections so far: %d" % connection_pool._created_connections) Since the default connection pool passes all keyword arguments it doesn't use to its connections, you can also customize the connections that the pool makes by adding those options to ``CONNECTION_POOL_KWARGS``: .. code-block:: python CACHES = { "default": { # ... "OPTIONS": { "CONNECTION_POOL_KWARGS": {"max_connections": 100, "retry_on_timeout": True} } } } Use your own connection pool subclass ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Sometimes you want to use your own subclass of the connection pool. This is possible with django-redis using the ``CONNECTION_POOL_CLASS`` parameter in the backend options. .. code-block:: python from redis.connection import ConnectionPool class MyOwnPool(ConnectionPool): # Just doing nothing, only for example purpose pass .. code-block:: python # Omitting all backend declaration boilerplate code. "OPTIONS": { "CONNECTION_POOL_CLASS": "myproj.mypool.MyOwnPool", } Customize connection factory ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ If none of the previous methods satisfies you, you can get in the middle of the django-redis connection factory process and customize or completely rewrite it. By default, django-redis creates connections through the ``django_redis.pool.ConnectionFactory`` class that is specified in the global Django setting ``DJANGO_REDIS_CONNECTION_FACTORY``. .. code-block:: python class ConnectionFactory(object): def get_connection_pool(self, params: dict): # Given connection parameters in the `params` argument, return new # connection pool. It should be overwritten if you want do # something before/after creating the connection pool, or return # your own connection pool. pass def get_connection(self, params: dict): # Given connection parameters in the `params` argument, return a # new connection. It should be overwritten if you want to do # something before/after creating a new connection. The default # implementation uses `get_connection_pool` to obtain a pool and # create a new connection in the newly obtained pool. pass def get_or_create_connection_pool(self, params: dict): # This is a high layer on top of `get_connection_pool` for # implementing a cache of created connection pools. It should be # overwritten if you want change the default behavior. pass def make_connection_params(self, url: str) -> dict: # The responsibility of this method is to convert basic connection # parameters and other settings to fully connection pool ready # connection parameters. pass def connect(self, url: str): # This is really a public API and entry point for this factory # class. This encapsulates the main logic of creating the # previously mentioned `params` using `make_connection_params` and # creating a new connection using the `get_connection` method. pass Use the sentinel connection factory ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ In order to facilitate using `Redis Sentinels`_, django-redis comes with a built in sentinel connection factory, which creates sentinel connection pools. In order to enable this functionality you should add the following: .. code-block:: python # Enable the alternate connection factory. DJANGO_REDIS_CONNECTION_FACTORY = 'django_redis.pool.SentinelConnectionFactory' # These sentinels are shared between all the examples, and are passed # directly to redis Sentinel. These can also be defined inline. SENTINELS = [ ('sentinel-1', 26379), ('sentinel-2', 26379), ('sentinel-3', 26379), ] CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", # The hostname in LOCATION is the primary (service / master) name "LOCATION": "redis://service_name/db", "OPTIONS": { # While the default client will work, this will check you # have configured things correctly, and also create a # primary and replica pool for the service specified by # LOCATION rather than requiring two URLs. "CLIENT_CLASS": "django_redis.client.SentinelClient", # Sentinels which are passed directly to redis Sentinel. "SENTINELS": SENTINELS, # kwargs for redis Sentinel (optional). "SENTINEL_KWARGS": {}, # You can still override the connection pool (optional). "CONNECTION_POOL_CLASS": "redis.sentinel.SentinelConnectionPool", }, }, # A minimal example using the SentinelClient. "minimal": { "BACKEND": "django_redis.cache.RedisCache", # The SentinelClient will use this location for both the primaries # and replicas. "LOCATION": "redis://minimal_service_name/db", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.SentinelClient", "SENTINELS": SENTINELS, }, }, # A minimal example using the DefaultClient. "other": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": [ # The DefaultClient is [primary, replicas...], but with the # SentinelConnectionPool it only requires one "is_master=0". "redis://other_service_name/db?is_master=1", "redis://other_service_name/db?is_master=0", ], "OPTIONS": {"SENTINELS": SENTINELS}, }, # A minimal example only using only replicas in read only mode (and # the DefaultClient). "readonly": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://readonly_service_name/db?is_master=0", "OPTIONS": {"SENTINELS": SENTINELS}, }, } .. _Redis Sentinels: https://redis.io/topics/sentinel Pluggable parsers ~~~~~~~~~~~~~~~~~ redis-py (the Python Redis client used by django-redis) comes with a pure Python Redis parser that works very well for most common task, but if you want some performance boost, you can use hiredis. hiredis is a Redis client written in C and it has its own parser that can be used with django-redis. .. code-block:: python "OPTIONS": { "PARSER_CLASS": "redis.connection.HiredisParser", } Pluggable clients ~~~~~~~~~~~~~~~~~ django-redis is designed for to be very flexible and very configurable. For it, it exposes a pluggable backends that make easy extend the default behavior, and it comes with few ones out the box. Default client ^^^^^^^^^^^^^^ Almost all about the default client is explained, with one exception: the default client comes with replication support. To connect to a Redis replication setup, you should change the ``LOCATION`` to something like: .. code-block:: python "LOCATION": [ "redis://127.0.0.1:6379/1", "redis://127.0.0.1:6378/1", ] The first connection string represents the primary server and the rest to replica servers. WARNING: Replication setup is not heavily tested in production environments. Shard client ^^^^^^^^^^^^ This pluggable client implements client-side sharding. It inherits almost all functionality from the default client. To use it, change your cache settings to something like this: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": [ "redis://127.0.0.1:6379/1", "redis://127.0.0.1:6379/2", ], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.ShardClient", } } } WARNING: Shard client is still experimental, so be careful when using it in production environments. Herd client ^^^^^^^^^^^ This pluggable client helps dealing with the thundering herd problem. You can read more about it on link: `Wikipedia `_ Like previous pluggable clients, it inherits all functionality from the default client, adding some additional methods for getting/setting keys. .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.HerdClient", } } } This client exposes additional settings: - ``CACHE_HERD_TIMEOUT``: Set default herd timeout. (Default value: 60s) Pluggable serializer ~~~~~~~~~~~~~~~~~~~~ The pluggable clients serialize data before sending it to the server. By default, django-redis serializes the data using the Python ``pickle`` module. This is very flexible and can handle a large range of object types. To serialize using JSON instead, the serializer ``JSONSerializer`` is also available. .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.json.JSONSerializer", } } } There's also support for serialization using `MsgPack`_ (that requires the msgpack library): .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", } } } .. _MsgPack: http://msgpack.org/ Pluggable Redis client ~~~~~~~~~~~~~~~~~~~~~~ django-redis uses the Redis client ``redis.client.StrictClient`` by default. It is possible to use an alternative client. You can customize the client used by setting ``REDIS_CLIENT_CLASS`` in the ``CACHES`` setting. Optionally, you can provide arguments to this class by setting ``REDIS_CLIENT_KWARGS``. .. code-block:: python CACHES = { "default": { "OPTIONS": { "REDIS_CLIENT_CLASS": "my.module.ClientClass", "REDIS_CLIENT_KWARGS": {"some_setting": True}, } } } Closing Connections ~~~~~~~~~~~~~~~~~~~ The default django-redis behavior on close() is to keep the connections to Redis server. You can change this default behaviour for all caches by the ``DJANGO_REDIS_CLOSE_CONNECTION = True`` in the django settings (globally) or (at cache level) by setting ``CLOSE_CONNECTION: True`` in the ``OPTIONS`` for each configured cache. Setting True as a value will instruct the django-redis to close all the connections (since v. 4.12.2), irrespectively of its current usage. .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "CLOSE_CONNECTION": True, } } } SSL/TLS and Self-Signed certificates ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In case you encounter a Redis server offering a TLS connection using a self-signed certificate you may disable certification verification with the following: .. code-block:: python CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "rediss://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": None} } } } License ------- .. code-block:: text Copyright (c) 2011-2015 Andrey Antukh Copyright (c) 2011 Sean Bleier All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS`` AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185721.0 django-redis-5.2.0/django_redis.egg-info/SOURCES.txt0000644000175100001710000000270700000000000022700 0ustar00runnerdocker00000000000000AUTHORS.rst LICENSE MANIFEST.in README.rst pyproject.toml setup.cfg setup.py django_redis/__init__.py django_redis/cache.py django_redis/exceptions.py django_redis/hash_ring.py django_redis/pool.py django_redis/util.py django_redis.egg-info/PKG-INFO django_redis.egg-info/SOURCES.txt django_redis.egg-info/dependency_links.txt django_redis.egg-info/requires.txt django_redis.egg-info/top_level.txt django_redis/client/__init__.py django_redis/client/default.py django_redis/client/herd.py django_redis/client/sentinel.py django_redis/client/sharded.py django_redis/compressors/__init__.py django_redis/compressors/base.py django_redis/compressors/identity.py django_redis/compressors/lz4.py django_redis/compressors/lzma.py django_redis/compressors/zlib.py django_redis/compressors/zstd.py django_redis/serializers/__init__.py django_redis/serializers/base.py django_redis/serializers/json.py django_redis/serializers/msgpack.py django_redis/serializers/pickle.py tests/__init__.py tests/conftest.py tests/test_backend.py tests/test_cache_options.py tests/test_client.py tests/test_connection_string.py tests/test_hashring.py tests/test_serializers.py tests/test_session.py tests/settings/sqlite.py tests/settings/sqlite_herd.py tests/settings/sqlite_json.py tests/settings/sqlite_lz4.py tests/settings/sqlite_msgpack.py tests/settings/sqlite_sentinel.py tests/settings/sqlite_sharding.py tests/settings/sqlite_usock.py tests/settings/sqlite_zlib.py tests/settings/sqlite_zstd.py././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185721.0 django-redis-5.2.0/django_redis.egg-info/dependency_links.txt0000644000175100001710000000000100000000000025054 0ustar00runnerdocker00000000000000 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185721.0 django-redis-5.2.0/django_redis.egg-info/requires.txt0000644000175100001710000000012200000000000023401 0ustar00runnerdocker00000000000000Django>=2.2 redis!=4.0.0,!=4.0.1,>=3 [hiredis] redis[hiredis]!=4.0.0,!=4.0.1,>=3 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185721.0 django-redis-5.2.0/django_redis.egg-info/top_level.txt0000644000175100001710000000001500000000000023534 0ustar00runnerdocker00000000000000django_redis ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/pyproject.toml0000644000175100001710000000117300000000000017602 0ustar00runnerdocker00000000000000[tool.towncrier] directory = "changelog.d" filename = "CHANGELOG.rst" issue_format = "`#{issue} `_" name = "django-redis" package = "django_redis" type = [ { name = "Features", directory = "feature", showcontent = true }, { name = "Bug Fixes", directory = "bugfix", showcontent = true }, { name = "Miscellaneous", directory = "misc", showcontent = true }, { name = "Documentation", directory = "doc", showcontent = true }, { name = "Deprecations and Removals", directory = "removal", showcontent = true }, ] ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1640185721.1024053 django-redis-5.2.0/setup.cfg0000644000175100001710000001067600000000000016517 0ustar00runnerdocker00000000000000[metadata] name = django-redis version = attr: django_redis.__version__ url = https://github.com/jazzband/django-redis author = Andrei Antoukh author_email = niwi@niwi.nz description = Full featured redis cache backend for Django. long_description = file: README.rst long_description_content_type = text/x-rst license = BSD-3-Clause classifiers = Development Status :: 5 - Production/Stable Environment :: Web Environment Framework :: Django Framework :: Django :: 2.2 Framework :: Django :: 3.1 Framework :: Django :: 3.2 Framework :: Django :: 4.0 Intended Audience :: Developers License :: OSI Approved :: BSD License Operating System :: OS Independent Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3 :: Only Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Topic :: Software Development :: Libraries Topic :: Utilities [options] python_requires = >=3.6 packages = django_redis django_redis.client django_redis.serializers django_redis.compressors install_requires = Django>=2.2 redis>=3,!=4.0.0,!=4.0.1 [options.extras_require] hiredis = redis[hiredis]>=3,!=4.0.0,!=4.0.1 [coverage:run] omit = tests/*, [coverage:report] precision = 1 skip_covered = true [flake8] ignore = W503 W601 E203 max-line-length = 88 [isort] profile = black multi_line_output = 3 [tox:tox] minversion = 3.15.0 envlist = black flake8 isort mypy py{36,37,38,39,310}-dj{22,31,32,40}-redislatest py310-dj40-redismaster py310-djmain-redis{latest,master} [gh-actions] python = 3.6: py36 3.7: py37 3.8: py38, black, flake8, isort, mypy 3.9: py39 3.10: py310 [gh-actions:env] DJANGO = 2.2: dj22 3.1: dj31 3.2: dj32 4.0: dj40 main: djmain REDIS = latest: redislatest master: redismaster [testenv] passenv = CI GITHUB* commands = {envpython} -m pytest --cov-report= --ds=settings.sqlite {posargs} {envpython} -m pytest --cov-append --cov-report= --ds=settings.sqlite_json {posargs} {envpython} -m pytest --cov-append --cov-report= --ds=settings.sqlite_lz4 {posargs} {envpython} -m pytest --cov-append --cov-report= --ds=settings.sqlite_msgpack {posargs} {envpython} -m pytest --cov-append --cov-report= --ds=settings.sqlite_sentinel {posargs} {envpython} -m pytest --cov-append --cov-report= --ds=settings.sqlite_sharding {posargs} {envpython} -m pytest --cov-append --cov-report= --ds=settings.sqlite_usock {posargs} {envpython} -m pytest --cov-append --cov-report= --ds=settings.sqlite_zlib {posargs} {envpython} -m pytest --cov-append --cov-report= --ds=settings.sqlite_zstd {posargs} {envpython} -m coverage report {envpython} -m coverage xml deps = dj22: Django>=2.2,<2.3 dj31: Django>=3.1,<3.2 dj32: Django>=3.2,<3.3 dj40: Django>=4.0,<4.1 djmain: https://github.com/django/django/archive/main.tar.gz msgpack>=0.6.0 pytest pytest-cov pytest-django pytest-pythonpath pytest-mock redismaster: https://github.com/redis/redis-py/archive/master.tar.gz lz4>=0.15 pyzstd>=0.15 [testenv:{black,flake8,isort,mypy}] basepython = python3 envdir = {toxworkdir}/lint commands = black: black --target-version py36 {posargs:--check --diff} setup.py django_redis/ tests/ flake8: flake8 {posargs} setup.py django_redis/ tests/ isort: isort {posargs:--check-only --diff} django_redis/ tests/ mypy: mypy {posargs:--cobertura-xml-report .} django_redis tests deps = black django-stubs flake8 isort >= 5.0.2 lxml mypy pytest pytest-django pytest-mock types-redis skip_install = true [tool:pytest] DJANGO_SETTINGS_MODULE = settings.sqlite addopts = --doctest-modules --cov=django_redis --cov-config=setup.cfg --no-cov-on-fail filterwarnings = error::DeprecationWarning error::FutureWarning error::PendingDeprecationWarning ignore:.*distutils package is deprecated.*:DeprecationWarning python_paths = tests testpaths = tests xfail_strict = true [mypy] plugins = mypy_django_plugin.main pretty = true show_error_codes = true show_error_context = true warn_redundant_casts = true warn_unused_ignores = true warn_unreachable = true [mypy.plugins.django-stubs] django_settings_module = tests.settings.sqlite ignore_missing_settings = true [mypy-lz4.frame] ignore_missing_imports = true [mypy-pyzstd] ignore_missing_imports = true [mypy-msgpack] ignore_missing_imports = true [mypy-redis.sentinel] ignore_missing_imports = true [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/setup.py0000644000175100001710000000004600000000000016376 0ustar00runnerdocker00000000000000from setuptools import setup setup() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1640185721.1024053 django-redis-5.2.0/tests/0000755000175100001710000000000000000000000016026 5ustar00runnerdocker00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/__init__.py0000644000175100001710000000000000000000000020125 0ustar00runnerdocker00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/conftest.py0000644000175100001710000000036400000000000020230 0ustar00runnerdocker00000000000000from typing import Iterable import pytest from django.core.cache import cache as default_cache from django_redis.cache import RedisCache @pytest.fixture def cache() -> Iterable[RedisCache]: yield default_cache default_cache.clear() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1640185721.1024053 django-redis-5.2.0/tests/settings/0000755000175100001710000000000000000000000017666 5ustar00runnerdocker00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/settings/sqlite.py0000644000175100001710000000243300000000000021543 0ustar00runnerdocker00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": ["redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=1"], "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:56379?db=1", "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379:1,redis://127.0.0.1:6379:1", "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, "KEY_PREFIX": "test-prefix", }, } # Include `django.contrib.auth` and `django.contrib.contenttypes` for mypy / # django-stubs. # See: # - https://github.com/typeddjango/django-stubs/issues/318 # - https://github.com/typeddjango/django-stubs/issues/534 INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", ] USE_TZ = False ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/settings/sqlite_herd.py0000644000175100001710000000172700000000000022552 0ustar00runnerdocker00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": ["redis://127.0.0.1:6379?db=5"], "OPTIONS": {"CLIENT_CLASS": "django_redis.client.HerdClient"}, }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:56379?db=1", "OPTIONS": {"CLIENT_CLASS": "django_redis.client.HerdClient"}, }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1,redis://127.0.0.1:6379?db=1", "OPTIONS": {"CLIENT_CLASS": "django_redis.client.HerdClient"}, }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": {"CLIENT_CLASS": "django_redis.client.HerdClient"}, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ["django.contrib.sessions"] USE_TZ = False ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/settings/sqlite_json.py0000644000175100001710000000260600000000000022576 0ustar00runnerdocker00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": ["redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=1"], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.json.JSONSerializer", }, }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:56379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.json.JSONSerializer", }, }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1,redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.json.JSONSerializer", }, }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.json.JSONSerializer", }, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ["django.contrib.sessions"] USE_TZ = False ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/settings/sqlite_lz4.py0000644000175100001710000000255600000000000022342 0ustar00runnerdocker00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": ["redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=1"], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", }, }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:56379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", }, }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "127.0.0.1:6379?db=1,127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", }, }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", }, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ["django.contrib.sessions"] USE_TZ = False ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/settings/sqlite_msgpack.py0000644000175100001710000000263600000000000023255 0ustar00runnerdocker00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": ["redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=1"], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", }, }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:56379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", }, }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1,redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", }, }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", }, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ["django.contrib.sessions"] USE_TZ = False ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/settings/sqlite_sentinel.py0000644000175100001710000000245400000000000023447 0ustar00runnerdocker00000000000000SECRET_KEY = "django_tests_secret_key" DJANGO_REDIS_CONNECTION_FACTORY = "django_redis.pool.SentinelConnectionFactory" SENTINELS = [("127.0.0.1", 26379)] CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": ["redis://default_service?db=5"], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SENTINELS": SENTINELS, }, }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://missing_service?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SENTINELS": SENTINELS, }, }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://default_service?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.SentinelClient", "SENTINELS": SENTINELS, }, }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://default_service?db=1", "KEY_PREFIX": "test-prefix", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SENTINELS": SENTINELS, }, }, } INSTALLED_APPS = ["django.contrib.sessions"] USE_TZ = False ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/settings/sqlite_sharding.py0000644000175100001710000000203400000000000023417 0ustar00runnerdocker00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": ["redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=2"], "OPTIONS": {"CLIENT_CLASS": "django_redis.client.ShardClient"}, }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": ["redis://127.0.0.1:56379?db=1", "redis://127.0.0.1:56379?db=2"], "OPTIONS": {"CLIENT_CLASS": "django_redis.client.ShardClient"}, }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1,redis://127.0.0.1:6379?db=1", "OPTIONS": {"CLIENT_CLASS": "django_redis.client.ShardClient"}, }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": {"CLIENT_CLASS": "django_redis.client.ShardClient"}, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ["django.contrib.sessions"] USE_TZ = False ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/settings/sqlite_usock.py0000644000175100001710000000200200000000000022737 0ustar00runnerdocker00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": ["unix:///tmp/redis.sock?db=1", "unix:///tmp/redis.sock?db=1"], "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:56379?db=1", "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1,redis://127.0.0.1:6379?db=1", "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ["django.contrib.sessions"] USE_TZ = False ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/settings/sqlite_zlib.py0000644000175100001710000000260600000000000022565 0ustar00runnerdocker00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": ["redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=1"], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", }, }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:56379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", }, }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1,redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", }, }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", }, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ["django.contrib.sessions"] USE_TZ = False ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/settings/sqlite_zstd.py0000644000175100001710000000260600000000000022611 0ustar00runnerdocker00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": ["redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=1"], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.zstd.ZStdCompressor", }, }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:56379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.zstd.ZStdCompressor", }, }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1,redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.zstd.ZStdCompressor", }, }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.zstd.ZStdCompressor", }, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ["django.contrib.sessions"] USE_TZ = False ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/test_backend.py0000644000175100001710000006132700000000000021037 0ustar00runnerdocker00000000000000import datetime import threading import time from datetime import timedelta from typing import List, Union, cast from unittest.mock import patch import pytest from django.core.cache import caches from pytest_django.fixtures import SettingsWrapper from pytest_mock import MockerFixture import django_redis.cache from django_redis.cache import RedisCache from django_redis.client import ShardClient, herd from django_redis.serializers.json import JSONSerializer from django_redis.serializers.msgpack import MSGPackSerializer herd.CACHE_HERD_TIMEOUT = 2 class TestDjangoRedisCache: def test_setnx(self, cache: RedisCache): # we should ensure there is no test_key_nx in redis cache.delete("test_key_nx") res = cache.get("test_key_nx") assert res is None res = cache.set("test_key_nx", 1, nx=True) assert bool(res) is True # test that second set will have res = cache.set("test_key_nx", 2, nx=True) assert res is False res = cache.get("test_key_nx") assert res == 1 cache.delete("test_key_nx") res = cache.get("test_key_nx") assert res is None def test_setnx_timeout(self, cache: RedisCache): # test that timeout still works for nx=True res = cache.set("test_key_nx", 1, timeout=2, nx=True) assert res is True time.sleep(3) res = cache.get("test_key_nx") assert res is None # test that timeout will not affect key, if it was there cache.set("test_key_nx", 1) res = cache.set("test_key_nx", 2, timeout=2, nx=True) assert res is False time.sleep(3) res = cache.get("test_key_nx") assert res == 1 cache.delete("test_key_nx") res = cache.get("test_key_nx") assert res is None def test_unicode_keys(self, cache: RedisCache): cache.set("ключ", "value") res = cache.get("ключ") assert res == "value" def test_save_and_integer(self, cache: RedisCache): cache.set("test_key", 2) res = cache.get("test_key", "Foo") assert isinstance(res, int) assert res == 2 def test_save_string(self, cache: RedisCache): cache.set("test_key", "hello" * 1000) res = cache.get("test_key") assert isinstance(res, str) assert res == "hello" * 1000 cache.set("test_key", "2") res = cache.get("test_key") assert isinstance(res, str) assert res == "2" def test_save_unicode(self, cache: RedisCache): cache.set("test_key", "heló") res = cache.get("test_key") assert isinstance(res, str) assert res == "heló" def test_save_dict(self, cache: RedisCache): if isinstance(cache.client._serializer, (JSONSerializer, MSGPackSerializer)): # JSONSerializer and MSGPackSerializer use the isoformat for # datetimes. now_dt: Union[str, datetime.datetime] = datetime.datetime.now().isoformat() else: now_dt = datetime.datetime.now() test_dict = {"id": 1, "date": now_dt, "name": "Foo"} cache.set("test_key", test_dict) res = cache.get("test_key") assert isinstance(res, dict) assert res["id"] == 1 assert res["name"] == "Foo" assert res["date"] == now_dt def test_save_float(self, cache: RedisCache): float_val = 1.345620002 cache.set("test_key", float_val) res = cache.get("test_key") assert isinstance(res, float) assert res == float_val def test_timeout(self, cache: RedisCache): cache.set("test_key", 222, timeout=3) time.sleep(4) res = cache.get("test_key") assert res is None def test_timeout_0(self, cache: RedisCache): cache.set("test_key", 222, timeout=0) res = cache.get("test_key") assert res is None def test_timeout_parameter_as_positional_argument(self, cache: RedisCache): cache.set("test_key", 222, -1) res = cache.get("test_key") assert res is None cache.set("test_key", 222, 1) res1 = cache.get("test_key") time.sleep(2) res2 = cache.get("test_key") assert res1 == 222 assert res2 is None # nx=True should not overwrite expire of key already in db cache.set("test_key", 222, None) cache.set("test_key", 222, -1, nx=True) res = cache.get("test_key") assert res == 222 def test_timeout_negative(self, cache: RedisCache): cache.set("test_key", 222, timeout=-1) res = cache.get("test_key") assert res is None cache.set("test_key", 222, timeout=None) cache.set("test_key", 222, timeout=-1) res = cache.get("test_key") assert res is None # nx=True should not overwrite expire of key already in db cache.set("test_key", 222, timeout=None) cache.set("test_key", 222, timeout=-1, nx=True) res = cache.get("test_key") assert res == 222 def test_timeout_tiny(self, cache: RedisCache): cache.set("test_key", 222, timeout=0.00001) res = cache.get("test_key") assert res in (None, 222) def test_set_add(self, cache: RedisCache): cache.set("add_key", "Initial value") res = cache.add("add_key", "New value") assert res is False res = cache.get("add_key") assert res == "Initial value" res = cache.add("other_key", "New value") assert res is True def test_get_many(self, cache: RedisCache): cache.set("a", 1) cache.set("b", 2) cache.set("c", 3) res = cache.get_many(["a", "b", "c"]) assert res == {"a": 1, "b": 2, "c": 3} def test_get_many_unicode(self, cache: RedisCache): cache.set("a", "1") cache.set("b", "2") cache.set("c", "3") res = cache.get_many(["a", "b", "c"]) assert res == {"a": "1", "b": "2", "c": "3"} def test_set_many(self, cache: RedisCache): cache.set_many({"a": 1, "b": 2, "c": 3}) res = cache.get_many(["a", "b", "c"]) assert res == {"a": 1, "b": 2, "c": 3} def test_set_call_empty_pipeline(self, cache: RedisCache, mocker: MockerFixture): if isinstance(cache.client, ShardClient): pytest.skip("ShardClient doesn't support get_client") pipeline = cache.client.get_client(write=True).pipeline() key = "key" value = "value" mocked_set = mocker.patch.object(pipeline, "set") cache.set(key, value, client=pipeline) if isinstance(cache.client, herd.HerdClient): default_timeout = cache.client._backend.default_timeout herd_timeout = (default_timeout + herd.CACHE_HERD_TIMEOUT) * 1000 herd_pack_value = cache.client._pack(value, default_timeout) mocked_set.assert_called_once_with( cache.client.make_key(key, version=None), cache.client.encode(herd_pack_value), nx=False, px=herd_timeout, xx=False, ) else: mocked_set.assert_called_once_with( cache.client.make_key(key, version=None), cache.client.encode(value), nx=False, px=cache.client._backend.default_timeout * 1000, xx=False, ) def test_delete(self, cache: RedisCache): cache.set_many({"a": 1, "b": 2, "c": 3}) res = cache.delete("a") assert bool(res) is True res = cache.get_many(["a", "b", "c"]) assert res == {"b": 2, "c": 3} res = cache.delete("a") assert bool(res) is False @patch("django_redis.cache.DJANGO_VERSION", (3, 1, 0, "final", 0)) def test_delete_return_value_type_new31(self, cache: RedisCache): """delete() returns a boolean instead of int since django version 3.1""" cache.set("a", 1) res = cache.delete("a") assert isinstance(res, bool) assert res is True res = cache.delete("b") assert isinstance(res, bool) assert res is False @patch("django_redis.cache.DJANGO_VERSION", new=(3, 0, 1, "final", 0)) def test_delete_return_value_type_before31(self, cache: RedisCache): """delete() returns a int before django version 3.1""" cache.set("a", 1) res = cache.delete("a") assert isinstance(res, int) assert res == 1 res = cache.delete("b") assert isinstance(res, int) assert res == 0 def test_delete_many(self, cache: RedisCache): cache.set_many({"a": 1, "b": 2, "c": 3}) res = cache.delete_many(["a", "b"]) assert bool(res) is True res = cache.get_many(["a", "b", "c"]) assert res == {"c": 3} res = cache.delete_many(["a", "b"]) assert bool(res) is False def test_delete_many_generator(self, cache: RedisCache): cache.set_many({"a": 1, "b": 2, "c": 3}) res = cache.delete_many(key for key in ["a", "b"]) assert bool(res) is True res = cache.get_many(["a", "b", "c"]) assert res == {"c": 3} res = cache.delete_many(["a", "b"]) assert bool(res) is False def test_delete_many_empty_generator(self, cache: RedisCache): res = cache.delete_many(key for key in cast(List[str], [])) assert bool(res) is False def test_incr(self, cache: RedisCache): if isinstance(cache.client, herd.HerdClient): pytest.skip("HerdClient doesn't support incr") cache.set("num", 1) cache.incr("num") res = cache.get("num") assert res == 2 cache.incr("num", 10) res = cache.get("num") assert res == 12 # max 64 bit signed int cache.set("num", 9223372036854775807) cache.incr("num") res = cache.get("num") assert res == 9223372036854775808 cache.incr("num", 2) res = cache.get("num") assert res == 9223372036854775810 cache.set("num", 3) cache.incr("num", 2) res = cache.get("num") assert res == 5 def test_incr_no_timeout(self, cache: RedisCache): if isinstance(cache.client, herd.HerdClient): pytest.skip("HerdClient doesn't support incr") cache.set("num", 1, timeout=None) cache.incr("num") res = cache.get("num") assert res == 2 cache.incr("num", 10) res = cache.get("num") assert res == 12 # max 64 bit signed int cache.set("num", 9223372036854775807, timeout=None) cache.incr("num") res = cache.get("num") assert res == 9223372036854775808 cache.incr("num", 2) res = cache.get("num") assert res == 9223372036854775810 cache.set("num", 3, timeout=None) cache.incr("num", 2) res = cache.get("num") assert res == 5 def test_incr_error(self, cache: RedisCache): if isinstance(cache.client, herd.HerdClient): pytest.skip("HerdClient doesn't support incr") with pytest.raises(ValueError): # key does not exist cache.incr("numnum") def test_incr_ignore_check(self, cache: RedisCache): if isinstance(cache.client, ShardClient): pytest.skip("ShardClient doesn't support argument ignore_key_check to incr") if isinstance(cache.client, herd.HerdClient): pytest.skip("HerdClient doesn't support incr") # key exists check will be skipped and the value will be incremented by # '1' which is the default delta cache.incr("num", ignore_key_check=True) res = cache.get("num") assert res == 1 cache.delete("num") # since key doesnt exist it is set to the delta value, 10 in this case cache.incr("num", 10, ignore_key_check=True) res = cache.get("num") assert res == 10 cache.delete("num") # following are just regression checks to make sure it still works as # expected with incr max 64 bit signed int cache.set("num", 9223372036854775807) cache.incr("num", ignore_key_check=True) res = cache.get("num") assert res == 9223372036854775808 cache.incr("num", 2, ignore_key_check=True) res = cache.get("num") assert res == 9223372036854775810 cache.set("num", 3) cache.incr("num", 2, ignore_key_check=True) res = cache.get("num") assert res == 5 def test_get_set_bool(self, cache: RedisCache): cache.set("bool", True) res = cache.get("bool") assert isinstance(res, bool) assert res is True cache.set("bool", False) res = cache.get("bool") assert isinstance(res, bool) assert res is False def test_decr(self, cache: RedisCache): if isinstance(cache.client, herd.HerdClient): pytest.skip("HerdClient doesn't support decr") cache.set("num", 20) cache.decr("num") res = cache.get("num") assert res == 19 cache.decr("num", 20) res = cache.get("num") assert res == -1 cache.decr("num", 2) res = cache.get("num") assert res == -3 cache.set("num", 20) cache.decr("num") res = cache.get("num") assert res == 19 # max 64 bit signed int + 1 cache.set("num", 9223372036854775808) cache.decr("num") res = cache.get("num") assert res == 9223372036854775807 cache.decr("num", 2) res = cache.get("num") assert res == 9223372036854775805 def test_version(self, cache: RedisCache): cache.set("keytest", 2, version=2) res = cache.get("keytest") assert res is None res = cache.get("keytest", version=2) assert res == 2 def test_incr_version(self, cache: RedisCache): cache.set("keytest", 2) cache.incr_version("keytest") res = cache.get("keytest") assert res is None res = cache.get("keytest", version=2) assert res == 2 def test_ttl_incr_version_no_timeout(self, cache: RedisCache): cache.set("my_key", "hello world!", timeout=None) cache.incr_version("my_key") my_value = cache.get("my_key", version=2) assert my_value == "hello world!" def test_delete_pattern(self, cache: RedisCache): for key in ["foo-aa", "foo-ab", "foo-bb", "foo-bc"]: cache.set(key, "foo") res = cache.delete_pattern("*foo-a*") assert bool(res) is True keys = cache.keys("foo*") assert set(keys) == {"foo-bb", "foo-bc"} res = cache.delete_pattern("*foo-a*") assert bool(res) is False @patch("django_redis.cache.RedisCache.client") def test_delete_pattern_with_custom_count(self, client_mock, cache: RedisCache): for key in ["foo-aa", "foo-ab", "foo-bb", "foo-bc"]: cache.set(key, "foo") cache.delete_pattern("*foo-a*", itersize=2) client_mock.delete_pattern.assert_called_once_with("*foo-a*", itersize=2) @patch("django_redis.cache.RedisCache.client") def test_delete_pattern_with_settings_default_scan_count( self, client_mock, cache: RedisCache ): for key in ["foo-aa", "foo-ab", "foo-bb", "foo-bc"]: cache.set(key, "foo") expected_count = django_redis.cache.DJANGO_REDIS_SCAN_ITERSIZE cache.delete_pattern("*foo-a*") client_mock.delete_pattern.assert_called_once_with( "*foo-a*", itersize=expected_count ) def test_close(self, cache: RedisCache, settings: SettingsWrapper): settings.DJANGO_REDIS_CLOSE_CONNECTION = True cache.set("f", "1") cache.close() def test_close_client(self, cache: RedisCache, mocker: MockerFixture): mock = mocker.patch.object(cache.client, "close") cache.close() assert mock.called def test_ttl(self, cache: RedisCache): cache.set("foo", "bar", 10) ttl = cache.ttl("foo") if isinstance(cache.client, herd.HerdClient): assert pytest.approx(ttl) == 12 else: assert pytest.approx(ttl) == 10 # Test ttl None cache.set("foo", "foo", timeout=None) ttl = cache.ttl("foo") assert ttl is None # Test ttl with expired key cache.set("foo", "foo", timeout=-1) ttl = cache.ttl("foo") assert ttl == 0 # Test ttl with not existent key ttl = cache.ttl("not-existent-key") assert ttl == 0 def test_pttl(self, cache: RedisCache): # Test pttl cache.set("foo", "bar", 10) ttl = cache.pttl("foo") # delta is set to 10 as precision error causes tests to fail if isinstance(cache.client, herd.HerdClient): assert pytest.approx(ttl, 10) == 12000 else: assert pytest.approx(ttl, 10) == 10000 # Test pttl with float value cache.set("foo", "bar", 5.5) ttl = cache.pttl("foo") if isinstance(cache.client, herd.HerdClient): assert pytest.approx(ttl, 10) == 7500 else: assert pytest.approx(ttl, 10) == 5500 # Test pttl None cache.set("foo", "foo", timeout=None) ttl = cache.pttl("foo") assert ttl is None # Test pttl with expired key cache.set("foo", "foo", timeout=-1) ttl = cache.pttl("foo") assert ttl == 0 # Test pttl with not existent key ttl = cache.pttl("not-existent-key") assert ttl == 0 def test_persist(self, cache: RedisCache): cache.set("foo", "bar", timeout=20) assert cache.persist("foo") is True ttl = cache.ttl("foo") assert ttl is None assert cache.persist("not-existent-key") is False def test_expire(self, cache: RedisCache): cache.set("foo", "bar", timeout=None) assert cache.expire("foo", 20) is True ttl = cache.ttl("foo") assert pytest.approx(ttl) == 20 assert cache.expire("not-existent-key", 20) is False def test_pexpire(self, cache: RedisCache): cache.set("foo", "bar", timeout=None) assert cache.pexpire("foo", 20500) is True ttl = cache.pttl("foo") # delta is set to 10 as precision error causes tests to fail assert pytest.approx(ttl, 10) == 20500 assert cache.pexpire("not-existent-key", 20500) is False def test_pexpire_at(self, cache: RedisCache): # Test settings expiration time 1 hour ahead by datetime. cache.set("foo", "bar", timeout=None) expiration_time = datetime.datetime.now() + timedelta(hours=1) assert cache.pexpire_at("foo", expiration_time) is True ttl = cache.pttl("foo") assert pytest.approx(ttl, 10) == timedelta(hours=1).total_seconds() # Test settings expiration time 1 hour ahead by Unix timestamp. cache.set("foo", "bar", timeout=None) expiration_time = datetime.datetime.now() + timedelta(hours=2) assert cache.pexpire_at("foo", int(expiration_time.timestamp() * 1000)) is True ttl = cache.pttl("foo") assert pytest.approx(ttl, 10) == timedelta(hours=2).total_seconds() * 1000 # Test settings expiration time 1 hour in past, which effectively # deletes the key. expiration_time = datetime.datetime.now() - timedelta(hours=2) assert cache.pexpire_at("foo", expiration_time) is True value = cache.get("foo") assert value is None expiration_time = datetime.datetime.now() + timedelta(hours=2) assert cache.pexpire_at("not-existent-key", expiration_time) is False def test_expire_at(self, cache: RedisCache): # Test settings expiration time 1 hour ahead by datetime. cache.set("foo", "bar", timeout=None) expiration_time = datetime.datetime.now() + timedelta(hours=1) assert cache.expire_at("foo", expiration_time) is True ttl = cache.ttl("foo") assert pytest.approx(ttl, 1) == timedelta(hours=1).total_seconds() # Test settings expiration time 1 hour ahead by Unix timestamp. cache.set("foo", "bar", timeout=None) expiration_time = datetime.datetime.now() + timedelta(hours=2) assert cache.expire_at("foo", int(expiration_time.timestamp())) is True ttl = cache.ttl("foo") assert pytest.approx(ttl, 1) == timedelta(hours=1).total_seconds() * 2 # Test settings expiration time 1 hour in past, which effectively # deletes the key. expiration_time = datetime.datetime.now() - timedelta(hours=2) assert cache.expire_at("foo", expiration_time) is True value = cache.get("foo") assert value is None expiration_time = datetime.datetime.now() + timedelta(hours=2) assert cache.expire_at("not-existent-key", expiration_time) is False def test_lock(self, cache: RedisCache): lock = cache.lock("foobar") lock.acquire(blocking=True) assert cache.has_key("foobar") lock.release() assert not cache.has_key("foobar") def test_lock_released_by_thread(self, cache: RedisCache): lock = cache.lock("foobar", thread_local=False) lock.acquire(blocking=True) def release_lock(lock_): lock_.release() t = threading.Thread(target=release_lock, args=[lock]) t.start() t.join() assert not cache.has_key("foobar") def test_iter_keys(self, cache: RedisCache): if isinstance(cache.client, ShardClient): pytest.skip("ShardClient doesn't support iter_keys") cache.set("foo1", 1) cache.set("foo2", 1) cache.set("foo3", 1) # Test simple result result = set(cache.iter_keys("foo*")) assert result == {"foo1", "foo2", "foo3"} def test_iter_keys_itersize(self, cache: RedisCache): if isinstance(cache.client, ShardClient): pytest.skip("ShardClient doesn't support iter_keys") cache.set("foo1", 1) cache.set("foo2", 1) cache.set("foo3", 1) # Test limited result result = list(cache.iter_keys("foo*", itersize=2)) assert len(result) == 3 def test_iter_keys_generator(self, cache: RedisCache): if isinstance(cache.client, ShardClient): pytest.skip("ShardClient doesn't support iter_keys") cache.set("foo1", 1) cache.set("foo2", 1) cache.set("foo3", 1) # Test generator object result = cache.iter_keys("foo*") next_value = next(result) assert next_value is not None def test_primary_replica_switching(self, cache: RedisCache): if isinstance(cache.client, ShardClient): pytest.skip("ShardClient doesn't support get_client") cache = cast(RedisCache, caches["sample"]) client = cache.client client._server = ["foo", "bar"] client._clients = ["Foo", "Bar"] assert client.get_client(write=True) == "Foo" assert client.get_client(write=False) == "Bar" def test_touch_zero_timeout(self, cache: RedisCache): cache.set("test_key", 222, timeout=10) assert cache.touch("test_key", 0) is True res = cache.get("test_key") assert res is None def test_touch_positive_timeout(self, cache: RedisCache): cache.set("test_key", 222, timeout=10) assert cache.touch("test_key", 2) is True assert cache.get("test_key") == 222 time.sleep(3) assert cache.get("test_key") is None def test_touch_negative_timeout(self, cache: RedisCache): cache.set("test_key", 222, timeout=10) assert cache.touch("test_key", -1) is True res = cache.get("test_key") assert res is None def test_touch_missed_key(self, cache: RedisCache): assert cache.touch("test_key_does_not_exist", 1) is False def test_touch_forever(self, cache: RedisCache): cache.set("test_key", "foo", timeout=1) result = cache.touch("test_key", None) assert result is True assert cache.ttl("test_key") is None time.sleep(2) assert cache.get("test_key") == "foo" def test_touch_forever_nonexistent(self, cache: RedisCache): result = cache.touch("test_key_does_not_exist", None) assert result is False def test_touch_default_timeout(self, cache: RedisCache): cache.set("test_key", "foo", timeout=1) result = cache.touch("test_key") assert result is True time.sleep(2) assert cache.get("test_key") == "foo" def test_clear(self, cache: RedisCache): cache.set("foo", "bar") value_from_cache = cache.get("foo") assert value_from_cache == "bar" cache.clear() value_from_cache_after_clear = cache.get("foo") assert value_from_cache_after_clear is None ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/test_cache_options.py0000644000175100001710000001075400000000000022264 0ustar00runnerdocker00000000000000import copy from typing import Iterable, cast import pytest from django.core.cache import caches from pytest_django.fixtures import SettingsWrapper from redis.exceptions import ConnectionError from django_redis.cache import RedisCache from django_redis.client import ShardClient def make_key(key: str, prefix: str, version: str) -> str: return f"{prefix}#{version}#{key}" def reverse_key(key: str) -> str: return key.split("#", 2)[2] @pytest.fixture def ignore_exceptions_cache(settings: SettingsWrapper) -> RedisCache: caches_setting = copy.deepcopy(settings.CACHES) caches_setting["doesnotexist"]["OPTIONS"]["IGNORE_EXCEPTIONS"] = True settings.CACHES = caches_setting settings.DJANGO_REDIS_IGNORE_EXCEPTIONS = True return cast(RedisCache, caches["doesnotexist"]) def test_get_django_omit_exceptions_many_returns_default_arg( ignore_exceptions_cache: RedisCache, ): assert ignore_exceptions_cache._ignore_exceptions is True assert ignore_exceptions_cache.get_many(["key1", "key2", "key3"]) == {} def test_get_django_omit_exceptions(ignore_exceptions_cache: RedisCache): assert ignore_exceptions_cache._ignore_exceptions is True assert ignore_exceptions_cache.get("key") is None assert ignore_exceptions_cache.get("key", "default") == "default" assert ignore_exceptions_cache.get("key", default="default") == "default" def test_get_django_omit_exceptions_priority_1(settings: SettingsWrapper): caches_setting = copy.deepcopy(settings.CACHES) caches_setting["doesnotexist"]["OPTIONS"]["IGNORE_EXCEPTIONS"] = True settings.CACHES = caches_setting settings.DJANGO_REDIS_IGNORE_EXCEPTIONS = False cache = cast(RedisCache, caches["doesnotexist"]) assert cache._ignore_exceptions is True assert cache.get("key") is None def test_get_django_omit_exceptions_priority_2(settings: SettingsWrapper): caches_setting = copy.deepcopy(settings.CACHES) caches_setting["doesnotexist"]["OPTIONS"]["IGNORE_EXCEPTIONS"] = False settings.CACHES = caches_setting settings.DJANGO_REDIS_IGNORE_EXCEPTIONS = True cache = cast(RedisCache, caches["doesnotexist"]) assert cache._ignore_exceptions is False with pytest.raises(ConnectionError): cache.get("key") @pytest.fixture def key_prefix_cache( cache: RedisCache, settings: SettingsWrapper ) -> Iterable[RedisCache]: caches_setting = copy.deepcopy(settings.CACHES) caches_setting["default"]["KEY_PREFIX"] = "*" settings.CACHES = caches_setting yield cache @pytest.fixture def with_prefix_cache() -> Iterable[RedisCache]: with_prefix = cast(RedisCache, caches["with_prefix"]) yield with_prefix with_prefix.clear() class TestDjangoRedisCacheEscapePrefix: def test_delete_pattern( self, key_prefix_cache: RedisCache, with_prefix_cache: RedisCache ): key_prefix_cache.set("a", "1") with_prefix_cache.set("b", "2") key_prefix_cache.delete_pattern("*") assert key_prefix_cache.has_key("a") is False assert with_prefix_cache.get("b") == "2" def test_iter_keys( self, key_prefix_cache: RedisCache, with_prefix_cache: RedisCache ): if isinstance(key_prefix_cache.client, ShardClient): pytest.skip("ShardClient doesn't support iter_keys") key_prefix_cache.set("a", "1") with_prefix_cache.set("b", "2") assert list(key_prefix_cache.iter_keys("*")) == ["a"] def test_keys(self, key_prefix_cache: RedisCache, with_prefix_cache: RedisCache): key_prefix_cache.set("a", "1") with_prefix_cache.set("b", "2") keys = key_prefix_cache.keys("*") assert "a" in keys assert "b" not in keys def test_custom_key_function(cache: RedisCache, settings: SettingsWrapper): caches_setting = copy.deepcopy(settings.CACHES) caches_setting["default"]["KEY_FUNCTION"] = "test_cache_options.make_key" caches_setting["default"]["REVERSE_KEY_FUNCTION"] = "test_cache_options.reverse_key" settings.CACHES = caches_setting if isinstance(cache.client, ShardClient): pytest.skip("ShardClient doesn't support get_client") for key in ["foo-aa", "foo-ab", "foo-bb", "foo-bc"]: cache.set(key, "foo") res = cache.delete_pattern("*foo-a*") assert bool(res) is True keys = cache.keys("foo*") assert set(keys) == {"foo-bb", "foo-bc"} # ensure our custom function was actually called assert {k.decode() for k in cache.client.get_client(write=False).keys("*")} == ( {"#1#foo-bc", "#1#foo-bb"} ) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/test_client.py0000644000175100001710000001304200000000000020715 0ustar00runnerdocker00000000000000from typing import Iterable from unittest.mock import Mock, patch import pytest from django.core.cache import DEFAULT_CACHE_ALIAS from pytest_django.fixtures import SettingsWrapper from pytest_mock import MockerFixture from django_redis.cache import RedisCache from django_redis.client import DefaultClient, ShardClient @pytest.fixture def cache_client(cache: RedisCache) -> Iterable[DefaultClient]: client = cache.client client.set("TestClientClose", 0) yield client client.delete("TestClientClose") class TestClientClose: def test_close_client_disconnect_default( self, cache_client: DefaultClient, mocker: MockerFixture ): mock = mocker.patch.object(cache_client.connection_factory, "disconnect") cache_client.close() assert not mock.called def test_close_disconnect_settings( self, cache_client: DefaultClient, settings: SettingsWrapper, mocker: MockerFixture, ): settings.DJANGO_REDIS_CLOSE_CONNECTION = True mock = mocker.patch.object(cache_client.connection_factory, "disconnect") cache_client.close() assert mock.called def test_close_disconnect_settings_cache( self, cache_client: DefaultClient, mocker: MockerFixture, settings: SettingsWrapper, ): settings.CACHES[DEFAULT_CACHE_ALIAS]["OPTIONS"]["CLOSE_CONNECTION"] = True cache_client.set("TestClientClose", 0) mock = mocker.patch.object(cache_client.connection_factory, "disconnect") cache_client.close() assert mock.called def test_close_disconnect_client_options( self, cache_client: DefaultClient, mocker: MockerFixture ): cache_client._options["CLOSE_CONNECTION"] = True mock = mocker.patch.object(cache_client.connection_factory, "disconnect") cache_client.close() assert mock.called class TestDefaultClient: @patch("test_client.DefaultClient.get_client") @patch("test_client.DefaultClient.__init__", return_value=None) def test_delete_pattern_calls_get_client_given_no_client( self, init_mock, get_client_mock ): client = DefaultClient() client._backend = Mock() client._backend.key_prefix = "" client.delete_pattern(pattern="foo*") get_client_mock.assert_called_once_with(write=True) @patch("test_client.DefaultClient.make_pattern") @patch("test_client.DefaultClient.get_client", return_value=Mock()) @patch("test_client.DefaultClient.__init__", return_value=None) def test_delete_pattern_calls_make_pattern( self, init_mock, get_client_mock, make_pattern_mock ): client = DefaultClient() client._backend = Mock() client._backend.key_prefix = "" get_client_mock.return_value.scan_iter.return_value = [] client.delete_pattern(pattern="foo*") kwargs = {"version": None, "prefix": None} make_pattern_mock.assert_called_once_with("foo*", **kwargs) @patch("test_client.DefaultClient.make_pattern") @patch("test_client.DefaultClient.get_client", return_value=Mock()) @patch("test_client.DefaultClient.__init__", return_value=None) def test_delete_pattern_calls_scan_iter_with_count_if_itersize_given( self, init_mock, get_client_mock, make_pattern_mock ): client = DefaultClient() client._backend = Mock() client._backend.key_prefix = "" get_client_mock.return_value.scan_iter.return_value = [] client.delete_pattern(pattern="foo*", itersize=90210) get_client_mock.return_value.scan_iter.assert_called_once_with( count=90210, match=make_pattern_mock.return_value ) class TestShardClient: @patch("test_client.DefaultClient.make_pattern") @patch("test_client.ShardClient.__init__", return_value=None) def test_delete_pattern_calls_scan_iter_with_count_if_itersize_given( self, init_mock, make_pattern_mock ): client = ShardClient() client._backend = Mock() client._backend.key_prefix = "" connection = Mock() connection.scan_iter.return_value = [] client._serverdict = {"test": connection} client.delete_pattern(pattern="foo*", itersize=10) connection.scan_iter.assert_called_once_with( count=10, match=make_pattern_mock.return_value ) @patch("test_client.DefaultClient.make_pattern") @patch("test_client.ShardClient.__init__", return_value=None) def test_delete_pattern_calls_scan_iter(self, init_mock, make_pattern_mock): client = ShardClient() client._backend = Mock() client._backend.key_prefix = "" connection = Mock() connection.scan_iter.return_value = [] client._serverdict = {"test": connection} client.delete_pattern(pattern="foo*") connection.scan_iter.assert_called_once_with( match=make_pattern_mock.return_value ) @patch("test_client.DefaultClient.make_pattern") @patch("test_client.ShardClient.__init__", return_value=None) def test_delete_pattern_calls_delete_for_given_keys( self, init_mock, make_pattern_mock ): client = ShardClient() client._backend = Mock() client._backend.key_prefix = "" connection = Mock() connection.scan_iter.return_value = [Mock(), Mock()] connection.delete.return_value = 0 client._serverdict = {"test": connection} client.delete_pattern(pattern="foo*") connection.delete.assert_called_once_with(*connection.scan_iter.return_value) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/test_connection_string.py0000644000175100001710000000073500000000000023171 0ustar00runnerdocker00000000000000import pytest from django_redis import pool @pytest.mark.parametrize( "connection_string", [ "unix://tmp/foo.bar?db=1", "redis://localhost/2", "rediss://localhost:3333?db=2", ], ) def test_connection_strings(connection_string: str): cf = pool.get_connection_factory( path="django_redis.pool.ConnectionFactory", options={} ) res = cf.make_connection_params(connection_string) assert res["url"] == connection_string ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/test_hashring.py0000644000175100001710000000123400000000000021242 0ustar00runnerdocker00000000000000import pytest from django_redis.hash_ring import HashRing class Node: def __init__(self, id): self.id = id def __str__(self): return f"node:{self.id}" def __repr__(self): return f"" @pytest.fixture def hash_ring(): return HashRing([Node(i) for i in range(3)]) def test_hashring(hash_ring): ids = [] for key in [f"test{x}" for x in range(10)]: node = hash_ring.get_node(key) ids.append(node.id) assert ids == [0, 2, 1, 2, 2, 2, 2, 0, 1, 1] def test_hashring_brute_force(hash_ring): for key in (f"test{x}" for x in range(10000)): assert hash_ring.get_node(key) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/test_serializers.py0000644000175100001710000000163200000000000021775 0ustar00runnerdocker00000000000000import pickle import pytest from django.core.exceptions import ImproperlyConfigured from django_redis.serializers.pickle import PickleSerializer class TestPickleSerializer: def test_invalid_pickle_version_provided(self): with pytest.raises( ImproperlyConfigured, match="PICKLE_VERSION value must be an integer" ): PickleSerializer({"PICKLE_VERSION": "not-an-integer"}) def test_setup_pickle_version_not_explicitly_specified(self): serializer = PickleSerializer({}) assert serializer._pickle_version == pickle.DEFAULT_PROTOCOL def test_setup_pickle_version_too_high(self): with pytest.raises( ImproperlyConfigured, match=f"PICKLE_VERSION can't be higher than pickle.HIGHEST_PROTOCOL:" f" {pickle.HIGHEST_PROTOCOL}", ): PickleSerializer({"PICKLE_VERSION": pickle.HIGHEST_PROTOCOL + 1}) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1640185712.0 django-redis-5.2.0/tests/test_session.py0000644000175100001710000003420200000000000021123 0ustar00runnerdocker00000000000000import base64 import unittest from datetime import timedelta from typing import Optional, Type from django.conf import settings from django.contrib.sessions.backends.base import SessionBase from django.contrib.sessions.backends.cache import SessionStore as CacheSession from django.core.cache import DEFAULT_CACHE_ALIAS, caches from django.test import override_settings from django.utils import timezone from django_redis.serializers.msgpack import MSGPackSerializer SessionType = Type[SessionBase] # Copied from Django's sessions test suite. Keep in sync with upstream. # https://github.com/django/django/blob/main/tests/sessions_tests/tests.py class SessionTestsMixin: # This does not inherit from TestCase to avoid any tests being run with this # class, which wouldn't work, and to allow different TestCase subclasses to # be used. backend: Optional[SessionType] = None # subclasses must specify def setUp(self): self.session = self.backend() def tearDown(self): # NB: be careful to delete any sessions created; stale sessions fill up # the /tmp (with some backends) and eventually overwhelm it after lots # of runs (think buildbots) self.session.delete() def test_new_session(self): self.assertIs(self.session.modified, False) self.assertIs(self.session.accessed, False) def test_get_empty(self): self.assertIsNone(self.session.get("cat")) def test_store(self): self.session["cat"] = "dog" self.assertIs(self.session.modified, True) self.assertEqual(self.session.pop("cat"), "dog") def test_pop(self): self.session["some key"] = "exists" # Need to reset these to pretend we haven't accessed it: self.accessed = False self.modified = False self.assertEqual(self.session.pop("some key"), "exists") self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, True) self.assertIsNone(self.session.get("some key")) def test_pop_default(self): self.assertEqual( self.session.pop("some key", "does not exist"), "does not exist" ) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_pop_default_named_argument(self): self.assertEqual( self.session.pop("some key", default="does not exist"), "does not exist" ) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_pop_no_default_keyerror_raised(self): with self.assertRaises(KeyError): self.session.pop("some key") def test_setdefault(self): self.assertEqual(self.session.setdefault("foo", "bar"), "bar") self.assertEqual(self.session.setdefault("foo", "baz"), "bar") self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, True) def test_update(self): self.session.update({"update key": 1}) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, True) self.assertEqual(self.session.get("update key"), 1) def test_has_key(self): self.session["some key"] = 1 self.session.modified = False self.session.accessed = False self.assertIn("some key", self.session) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_values(self): self.assertEqual(list(self.session.values()), []) self.assertIs(self.session.accessed, True) self.session["some key"] = 1 self.session.modified = False self.session.accessed = False self.assertEqual(list(self.session.values()), [1]) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_keys(self): self.session["x"] = 1 self.session.modified = False self.session.accessed = False self.assertEqual(list(self.session.keys()), ["x"]) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_items(self): self.session["x"] = 1 self.session.modified = False self.session.accessed = False self.assertEqual(list(self.session.items()), [("x", 1)]) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_clear(self): self.session["x"] = 1 self.session.modified = False self.session.accessed = False self.assertEqual(list(self.session.items()), [("x", 1)]) self.session.clear() self.assertEqual(list(self.session.items()), []) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, True) def test_save(self): self.session.save() self.assertIs(self.session.exists(self.session.session_key), True) def test_delete(self): self.session.save() self.session.delete(self.session.session_key) self.assertIs(self.session.exists(self.session.session_key), False) def test_flush(self): self.session["foo"] = "bar" self.session.save() prev_key = self.session.session_key self.session.flush() self.assertIs(self.session.exists(prev_key), False) self.assertNotEqual(self.session.session_key, prev_key) self.assertIsNone(self.session.session_key) self.assertIs(self.session.modified, True) self.assertIs(self.session.accessed, True) def test_cycle(self): self.session["a"], self.session["b"] = "c", "d" self.session.save() prev_key = self.session.session_key prev_data = list(self.session.items()) self.session.cycle_key() self.assertIs(self.session.exists(prev_key), False) self.assertNotEqual(self.session.session_key, prev_key) self.assertEqual(list(self.session.items()), prev_data) def test_cycle_with_no_session_cache(self): self.session["a"], self.session["b"] = "c", "d" self.session.save() prev_data = self.session.items() self.session = self.backend(self.session.session_key) self.assertIs(hasattr(self.session, "_session_cache"), False) self.session.cycle_key() self.assertCountEqual(self.session.items(), prev_data) def test_save_doesnt_clear_data(self): self.session["a"] = "b" self.session.save() self.assertEqual(self.session["a"], "b") def test_invalid_key(self): # Submitting an invalid session key (either by guessing, or if the db has # removed the key) results in a new key being generated. try: session = self.backend("1") session.save() self.assertNotEqual(session.session_key, "1") self.assertIsNone(session.get("cat")) session.delete() finally: # Some backends leave a stale cache entry for the invalid # session key; make sure that entry is manually deleted session.delete("1") def test_session_key_empty_string_invalid(self): """Falsey values (Such as an empty string) are rejected.""" self.session._session_key = "" self.assertIsNone(self.session.session_key) def test_session_key_too_short_invalid(self): """Strings shorter than 8 characters are rejected.""" self.session._session_key = "1234567" self.assertIsNone(self.session.session_key) def test_session_key_valid_string_saved(self): """Strings of length 8 and up are accepted and stored.""" self.session._session_key = "12345678" self.assertEqual(self.session.session_key, "12345678") def test_session_key_is_read_only(self): def set_session_key(session): session.session_key = session._get_new_session_key() with self.assertRaises(AttributeError): set_session_key(self.session) # Custom session expiry def test_default_expiry(self): # A normal session has a max age equal to settings self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) # So does a custom session with an idle expiration time of 0 (but it'll # expire at browser close) self.session.set_expiry(0) self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) def test_custom_expiry_seconds(self): modification = timezone.now() self.session.set_expiry(10) date = self.session.get_expiry_date(modification=modification) self.assertEqual(date, modification + timedelta(seconds=10)) age = self.session.get_expiry_age(modification=modification) self.assertEqual(age, 10) def test_custom_expiry_timedelta(self): modification = timezone.now() # Mock timezone.now, because set_expiry calls it on this code path. original_now = timezone.now try: timezone.now = lambda: modification self.session.set_expiry(timedelta(seconds=10)) finally: timezone.now = original_now date = self.session.get_expiry_date(modification=modification) self.assertEqual(date, modification + timedelta(seconds=10)) age = self.session.get_expiry_age(modification=modification) self.assertEqual(age, 10) def test_custom_expiry_datetime(self): modification = timezone.now() self.session.set_expiry(modification + timedelta(seconds=10)) date = self.session.get_expiry_date(modification=modification) self.assertEqual(date, modification + timedelta(seconds=10)) age = self.session.get_expiry_age(modification=modification) self.assertEqual(age, 10) def test_custom_expiry_reset(self): self.session.set_expiry(None) self.session.set_expiry(10) self.session.set_expiry(None) self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) def test_get_expire_at_browser_close(self): # Tests get_expire_at_browser_close with different settings and different # set_expiry calls with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False): self.session.set_expiry(10) self.assertIs(self.session.get_expire_at_browser_close(), False) self.session.set_expiry(0) self.assertIs(self.session.get_expire_at_browser_close(), True) self.session.set_expiry(None) self.assertIs(self.session.get_expire_at_browser_close(), False) with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True): self.session.set_expiry(10) self.assertIs(self.session.get_expire_at_browser_close(), False) self.session.set_expiry(0) self.assertIs(self.session.get_expire_at_browser_close(), True) self.session.set_expiry(None) self.assertIs(self.session.get_expire_at_browser_close(), True) def test_decode(self): # Ensure we can decode what we encode data = {"a test key": "a test value"} encoded = self.session.encode(data) self.assertEqual(self.session.decode(encoded), data) def test_decode_failure_logged_to_security(self): bad_encode = base64.b64encode(b"flaskdj:alkdjf").decode("ascii") with self.assertLogs("django.security.SuspiciousSession", "WARNING") as cm: self.assertEqual({}, self.session.decode(bad_encode)) # The failed decode is logged. self.assertIn("corrupted", cm.output[0]) def test_actual_expiry(self): # this doesn't work with JSONSerializer (serializing timedelta) with override_settings( SESSION_SERIALIZER="django.contrib.sessions.serializers.PickleSerializer" ): self.session = self.backend() # reinitialize after overriding settings # Regression test for #19200 old_session_key = None new_session_key = None try: self.session["foo"] = "bar" self.session.set_expiry(-timedelta(seconds=10)) self.session.save() old_session_key = self.session.session_key # With an expiry date in the past, the session expires instantly. new_session = self.backend(self.session.session_key) new_session_key = new_session.session_key self.assertNotIn("foo", new_session) finally: self.session.delete(old_session_key) self.session.delete(new_session_key) def test_session_load_does_not_create_record(self): """ Loading an unknown session key does not create a session record. Creating session records on load is a DOS vulnerability. """ session = self.backend("someunknownkey") session.load() self.assertIsNone(session.session_key) self.assertIs(session.exists(session.session_key), False) # provided unknown key was cycled, not reused self.assertNotEqual(session.session_key, "someunknownkey") def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self): """ Sessions shouldn't be resurrected by a concurrent request. """ from django.contrib.sessions.backends.base import UpdateError # Create new session. s1 = self.backend() s1["test_data"] = "value1" s1.save(must_create=True) # Logout in another context. s2 = self.backend(s1.session_key) s2.delete() # Modify session in first context. s1["test_data"] = "value2" with self.assertRaises(UpdateError): # This should throw an exception as the session is deleted, not # resurrect the session. s1.save() self.assertEqual(s1.load(), {}) class SessionTests(SessionTestsMixin, unittest.TestCase): backend = CacheSession def test_actual_expiry(self): if isinstance( caches[DEFAULT_CACHE_ALIAS].client._serializer, MSGPackSerializer ): self.skipTest("msgpack serializer doesn't support datetime serialization") super().test_actual_expiry()