django-redis-4.9.0/0000755000175000001440000000000013245753134014226 5ustar niwiusers00000000000000django-redis-4.9.0/django_redis/0000755000175000001440000000000013245753134016656 5ustar niwiusers00000000000000django-redis-4.9.0/django_redis/client/0000755000175000001440000000000013245753134020134 5ustar niwiusers00000000000000django-redis-4.9.0/django_redis/client/__init__.py0000644000175000001440000000026113174554724022251 0ustar niwiusers00000000000000from .default import DefaultClient from .herd import HerdClient from .sharded import ShardClient __all__ = ["DefaultClient", "ShardClient", "HerdClient"] django-redis-4.9.0/django_redis/client/default.py0000644000175000001440000004421413245747656022153 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import random import re import socket from collections import OrderedDict from django.conf import settings from django.core.cache.backends.base import DEFAULT_TIMEOUT, get_key_func from django.core.exceptions import ImproperlyConfigured from django.utils import six from django.utils.encoding import smart_text from redis.exceptions import ConnectionError, ResponseError, TimeoutError from .. import pool from ..exceptions import CompressorError, ConnectionInterrupted from ..util import CacheKey, load_class _main_exceptions = (TimeoutError, ResponseError, ConnectionError, socket.timeout) special_re = re.compile('([*?[])') def glob_escape(s): return special_re.sub(r'[\1]', s) class DefaultClient(object): def __init__(self, server, params, backend): self._backend = backend self._server = server self._params = params self.reverse_key = get_key_func(params.get("REVERSE_KEY_FUNCTION") or "django_redis.util.default_reverse_key") if not self._server: raise ImproperlyConfigured("Missing connections string") if not isinstance(self._server, (list, tuple, set)): self._server = self._server.split(",") self._clients = [None] * len(self._server) self._options = params.get("OPTIONS", {}) self._slave_read_only = self._options.get('SLAVE_READ_ONLY', True) serializer_path = self._options.get("SERIALIZER", "django_redis.serializers.pickle.PickleSerializer") serializer_cls = load_class(serializer_path) compressor_path = self._options.get("COMPRESSOR", "django_redis.compressors.identity.IdentityCompressor") compressor_cls = load_class(compressor_path) self._serializer = serializer_cls(options=self._options) self._compressor = compressor_cls(options=self._options) self.connection_factory = pool.get_connection_factory(options=self._options) def __contains__(self, key): return self.has_key(key) def get_next_client_index(self, write=True, tried=()): """ Return a next index for read client. This function implements a default behavior for get a next read client for master-slave setup. Overwrite this function if you want a specific behavior. """ if tried and len(tried) < len(self._server): not_tried = [i for i in range(0, len(self._server)) if i not in tried] return random.choice(not_tried) if write or len(self._server) == 1: return 0 return random.randint(1, len(self._server) - 1) def get_client(self, write=True, tried=(), show_index=False): """ Method used for obtain a raw redis client. This function is used by almost all cache backend operations for obtain a native redis client/connection instance. """ index = self.get_next_client_index(write=write, tried=tried or []) if self._clients[index] is None: self._clients[index] = self.connect(index) if show_index: return self._clients[index], index else: return self._clients[index] def connect(self, index=0): """ Given a connection index, returns a new raw redis client/connection instance. Index is used for master/slave setups and indicates that connection string should be used. In normal setups, index is 0. """ return self.connection_factory.connect(self._server[index]) def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None, nx=False, xx=False): """ Persist a value to the cache, and set an optional expiration time. Also supports optional nx parameter. If set to True - will use redis setnx instead of set. """ nkey = self.make_key(key, version=version) nvalue = self.encode(value) if timeout == DEFAULT_TIMEOUT: timeout = self._backend.default_timeout original_client = client tried = [] while True: try: if not client: client, index = self.get_client(write=True, tried=tried, show_index=True) if timeout is not None: # Convert to milliseconds timeout = int(timeout * 1000) if timeout <= 0: if nx: # Using negative timeouts when nx is True should # not expire (in our case delete) the value if it exists. # Obviously expire not existent value is noop. return not self.has_key(key, version=version, client=client) else: # redis doesn't support negative timeouts in ex flags # so it seems that it's better to just delete the key # than to set it and than expire in a pipeline return self.delete(key, client=client, version=version) return client.set(nkey, nvalue, nx=nx, px=timeout, xx=xx) except _main_exceptions as e: if not original_client and not self._slave_read_only and len(tried) < len(self._server): tried.append(index) client = None continue raise ConnectionInterrupted(connection=client, parent=e) def incr_version(self, key, delta=1, version=None, client=None): """ Adds delta to the cache version for the supplied key. Returns the new version. """ if client is None: client = self.get_client(write=True) if version is None: version = self._backend.version old_key = self.make_key(key, version) value = self.get(old_key, version=version, client=client) try: ttl = client.ttl(old_key) except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) if value is None: raise ValueError("Key '%s' not found" % key) if isinstance(key, CacheKey): new_key = self.make_key(key.original_key(), version=version + delta) else: new_key = self.make_key(key, version=version + delta) self.set(new_key, value, timeout=ttl, client=client) self.delete(old_key, client=client) return version + delta def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None): """ Add a value to the cache, failing if the key already exists. Returns ``True`` if the object was added, ``False`` if not. """ return self.set(key, value, timeout, version=version, client=client, nx=True) def get(self, key, default=None, version=None, client=None): """ Retrieve a value from the cache. Returns decoded value if key is found, the default if not. """ if client is None: client = self.get_client(write=False) key = self.make_key(key, version=version) try: value = client.get(key) except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) if value is None: return default return self.decode(value) def persist(self, key, version=None, client=None): if client is None: client = self.get_client(write=True) key = self.make_key(key, version=version) if client.exists(key): client.persist(key) def expire(self, key, timeout, version=None, client=None): if client is None: client = self.get_client(write=True) key = self.make_key(key, version=version) if client.exists(key): client.expire(key, timeout) def lock(self, key, version=None, timeout=None, sleep=0.1, blocking_timeout=None, client=None): if client is None: client = self.get_client(write=True) key = self.make_key(key, version=version) return client.lock(key, timeout=timeout, sleep=sleep, blocking_timeout=blocking_timeout) def delete(self, key, version=None, prefix=None, client=None): """ Remove a key from the cache. """ if client is None: client = self.get_client(write=True) try: return client.delete(self.make_key(key, version=version, prefix=prefix)) except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) def delete_pattern(self, pattern, version=None, prefix=None, client=None, itersize=None): """ Remove all keys matching pattern. """ if client is None: client = self.get_client(write=True) pattern = self.make_pattern(pattern, version=version, prefix=prefix) kwargs = {'match': pattern, } if itersize: kwargs['count'] = itersize try: count = 0 for key in client.scan_iter(**kwargs): client.delete(key) count += 1 return count except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) def delete_many(self, keys, version=None, client=None): """ Remove multiple keys at once. """ if client is None: client = self.get_client(write=True) keys = [self.make_key(k, version=version) for k in keys] if not keys: return try: return client.delete(*keys) except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) def clear(self, client=None): """ Flush all cache keys. """ if client is None: client = self.get_client(write=True) try: client.flushdb() except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) def decode(self, value): """ Decode the given value. """ try: value = int(value) except (ValueError, TypeError): try: value = self._compressor.decompress(value) except CompressorError: # Handle little values, chosen to be not compressed pass value = self._serializer.loads(value) return value def encode(self, value): """ Encode the given value. """ if isinstance(value, bool) or not isinstance(value, six.integer_types): value = self._serializer.dumps(value) value = self._compressor.compress(value) return value return value def get_many(self, keys, version=None, client=None): """ Retrieve many keys. """ if client is None: client = self.get_client(write=False) if not keys: return {} recovered_data = OrderedDict() map_keys = OrderedDict( (self.make_key(k, version=version), k) for k in keys ) try: results = client.mget(*map_keys) except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) for key, value in zip(map_keys, results): if value is None: continue recovered_data[map_keys[key]] = self.decode(value) return recovered_data def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None, client=None): """ Set a bunch of values in the cache at once from a dict of key/value pairs. This is much more efficient than calling set() multiple times. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. """ if client is None: client = self.get_client(write=True) try: pipeline = client.pipeline() for key, value in data.items(): self.set(key, value, timeout, version=version, client=pipeline) pipeline.execute() except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) def _incr(self, key, delta=1, version=None, client=None, ignore_key_check=False): if client is None: client = self.get_client(write=True) key = self.make_key(key, version=version) try: try: # if key expired after exists check, then we get # key with wrong value and ttl -1. # use lua script for atomicity if not ignore_key_check: lua = """ local exists = redis.call('EXISTS', KEYS[1]) if (exists == 1) then return redis.call('INCRBY', KEYS[1], ARGV[1]) else return false end """ else: lua = """ return redis.call('INCRBY', KEYS[1], ARGV[1]) """ value = client.eval(lua, 1, key, delta) if value is None: raise ValueError("Key '%s' not found" % key) except ResponseError: # if cached value or total value is greater than 64 bit signed # integer. # elif int is encoded. so redis sees the data as string. # In this situations redis will throw ResponseError # try to keep TTL of key timeout = client.ttl(key) # returns -2 if the key does not exist # means, that key have expired if timeout == -2: raise ValueError("Key '%s' not found" % key) value = self.get(key, version=version, client=client) + delta self.set(key, value, version=version, timeout=timeout, client=client) except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) return value def incr(self, key, delta=1, version=None, client=None, ignore_key_check=False): """ Add delta to value in the cache. If the key does not exist, raise a ValueError exception. if ignore_key_check=True then the key will be created and set to the delta value by default. """ return self._incr(key=key, delta=delta, version=version, client=client, ignore_key_check=ignore_key_check) def decr(self, key, delta=1, version=None, client=None): """ Decreace delta to value in the cache. If the key does not exist, raise a ValueError exception. """ return self._incr(key=key, delta=-delta, version=version, client=client) def ttl(self, key, version=None, client=None): """ Executes TTL redis command and return the "time-to-live" of specified key. If key is a non volatile key, it returns None. """ if client is None: client = self.get_client(write=False) key = self.make_key(key, version=version) if not client.exists(key): return 0 t = client.ttl(key) if t >= 0: return t elif t == -1: return None elif t == -2: return 0 else: # Should never reach here return None def has_key(self, key, version=None, client=None): """ Test if key exists. """ if client is None: client = self.get_client(write=False) key = self.make_key(key, version=version) try: return client.exists(key) except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) def iter_keys(self, search, itersize=None, client=None, version=None): """ Same as keys, but uses redis >= 2.8 cursors for make memory efficient keys iteration. """ if client is None: client = self.get_client(write=False) pattern = self.make_pattern(search, version=version) for item in client.scan_iter(match=pattern, count=itersize): item = smart_text(item) yield self.reverse_key(item) def keys(self, search, version=None, client=None): """ Execute KEYS command and return matched results. Warning: this can return huge number of results, in this case, it strongly recommended use iter_keys for it. """ if client is None: client = self.get_client(write=False) pattern = self.make_pattern(search, version=version) try: encoding_map = [smart_text(k) for k in client.keys(pattern)] return [self.reverse_key(k) for k in encoding_map] except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) def make_key(self, key, version=None, prefix=None): if isinstance(key, CacheKey): return key if prefix is None: prefix = self._backend.key_prefix if version is None: version = self._backend.version return CacheKey(self._backend.key_func(key, prefix, version)) def make_pattern(self, pattern, version=None, prefix=None): if isinstance(pattern, CacheKey): return pattern if prefix is None: prefix = self._backend.key_prefix prefix = glob_escape(prefix) if version is None: version = self._backend.version version = glob_escape(str(version)) return CacheKey(self._backend.key_func(pattern, prefix, version)) def close(self, **kwargs): if getattr(settings, "DJANGO_REDIS_CLOSE_CONNECTION", False): for i in range(len(self._clients)): for c in self._clients[i].connection_pool._available_connections: c.disconnect() self._clients[i] = None django-redis-4.9.0/django_redis/client/herd.py0000644000175000001440000001050213174554724021433 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- import random import socket import time from collections import OrderedDict from django.conf import settings from redis.exceptions import ConnectionError, ResponseError, TimeoutError from ..exceptions import ConnectionInterrupted from .default import DEFAULT_TIMEOUT, DefaultClient _main_exceptions = (ConnectionError, ResponseError, TimeoutError, socket.timeout) class Marker(object): """ Dummy class for use as marker for herded keys. """ pass CACHE_HERD_TIMEOUT = getattr(settings, 'CACHE_HERD_TIMEOUT', 60) def _is_expired(x): if x >= CACHE_HERD_TIMEOUT: return True val = x + random.randint(1, CACHE_HERD_TIMEOUT) if val >= CACHE_HERD_TIMEOUT: return True return False class HerdClient(DefaultClient): def __init__(self, *args, **kwargs): self._marker = Marker() super(HerdClient, self).__init__(*args, **kwargs) def _pack(self, value, timeout): herd_timeout = (timeout or self._backend.default_timeout) + int(time.time()) return (self._marker, value, herd_timeout) def _unpack(self, value): try: marker, unpacked, herd_timeout = value except (ValueError, TypeError): return value, False if not isinstance(marker, Marker): return value, False now = int(time.time()) if herd_timeout < now: x = now - herd_timeout return unpacked, _is_expired(x) return unpacked, False def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None, nx=False, xx=False): if timeout == DEFAULT_TIMEOUT: timeout = self._backend.default_timeout if timeout is None or timeout <= 0: return super(HerdClient, self).set(key, value, timeout=timeout, version=version, client=client, nx=nx, xx=xx) packed = self._pack(value, timeout) real_timeout = (timeout + CACHE_HERD_TIMEOUT) return super(HerdClient, self).set(key, packed, timeout=real_timeout, version=version, client=client, nx=nx) def get(self, key, default=None, version=None, client=None): packed = super(HerdClient, self).get(key, default=default, version=version, client=client) val, refresh = self._unpack(packed) if refresh: return default return val def get_many(self, keys, version=None, client=None): if client is None: client = self.get_client(write=False) if not keys: return {} recovered_data = OrderedDict() new_keys = [self.make_key(key, version=version) for key in keys] map_keys = dict(zip(new_keys, keys)) try: results = client.mget(*new_keys) except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) for key, value in zip(new_keys, results): if value is None: continue val, refresh = self._unpack(self.decode(value)) recovered_data[map_keys[key]] = None if refresh else val return recovered_data def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None, client=None, herd=True): """ Set a bunch of values in the cache at once from a dict of key/value pairs. This is much more efficient than calling set() multiple times. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. """ if client is None: client = self.get_client(write=True) set_function = self.set if herd else super(HerdClient, self).set try: pipeline = client.pipeline() for key, value in data.items(): set_function(key, value, timeout, version=version, client=pipeline) pipeline.execute() except _main_exceptions as e: raise ConnectionInterrupted(connection=client, parent=e) def incr(self, *args, **kwargs): raise NotImplementedError() def decr(self, *args, **kwargs): raise NotImplementedError() django-redis-4.9.0/django_redis/client/sharded.py0000644000175000001440000002211113245747656022131 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import re from collections import OrderedDict from django.conf import settings from django.utils.encoding import smart_text from django.utils.six import text_type from redis.exceptions import ConnectionError from ..exceptions import ConnectionInterrupted from ..hash_ring import HashRing from ..util import CacheKey from .default import DEFAULT_TIMEOUT, DefaultClient class ShardClient(DefaultClient): _findhash = re.compile(r'.*\{(.*)\}.*', re.I) def __init__(self, *args, **kwargs): super(ShardClient, self).__init__(*args, **kwargs) if not isinstance(self._server, (list, tuple)): self._server = [self._server] self._ring = HashRing(self._server) self._serverdict = self.connect() def get_client(self, write=True): raise NotImplementedError def connect(self): connection_dict = {} for name in self._server: connection_dict[name] = self.connection_factory.connect(name) return connection_dict def get_server_name(self, _key): key = text_type(_key) g = self._findhash.match(key) if g is not None and len(g.groups()) > 0: key = g.groups()[0] name = self._ring.get_node(key) return name def get_server(self, key): name = self.get_server_name(key) return self._serverdict[name] def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super(ShardClient, self)\ .add(key=key, value=value, version=version, client=client, timeout=timeout) def get(self, key, default=None, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super(ShardClient, self)\ .get(key=key, default=default, version=version, client=client) def get_many(self, keys, version=None): if not keys: return {} recovered_data = OrderedDict() new_keys = [self.make_key(key, version=version) for key in keys] map_keys = dict(zip(new_keys, keys)) for key in new_keys: client = self.get_server(key) value = self.get(key=key, version=version, client=client) if value is None: continue recovered_data[map_keys[key]] = value return recovered_data def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None, nx=False): """ Persist a value to the cache, and set an optional expiration time. """ if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super(ShardClient, self).set(key=key, value=value, timeout=timeout, version=version, client=client, nx=nx) def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): """ Set a bunch of values in the cache at once from a dict of key/value pairs. This is much more efficient than calling set() multiple times. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. """ for key, value in data.items(): self.set(key, value, timeout, version=version) def has_key(self, key, version=None, client=None): """ Test if key exists. """ if client is None: key = self.make_key(key, version=version) client = self.get_server(key) key = self.make_key(key, version=version) try: return client.exists(key) except ConnectionError: raise ConnectionInterrupted(connection=client) def delete(self, key, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super(ShardClient, self).delete(key=key, version=version, client=client) def ttl(self, key, version=None, client=None): """ Executes TTL redis command and return the "time-to-live" of specified key. If key is a non volatile key, it returns None. """ if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super(ShardClient, self).ttl(key=key, version=version, client=client) def persist(self, key, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super(ShardClient, self).persist(key=key, version=version, client=client) def expire(self, key, timeout, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super(ShardClient, self).expire(key=key, timeout=timeout, version=version, client=client) def lock(self, key, version=None, timeout=None, sleep=0.1, blocking_timeout=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) key = self.make_key(key, version=version) return super(ShardClient, self).lock(key, timeout=timeout, sleep=sleep, client=client, blocking_timeout=blocking_timeout) def delete_many(self, keys, version=None): """ Remove multiple keys at once. """ res = 0 for key in [self.make_key(k, version=version) for k in keys]: client = self.get_server(key) res += self.delete(key, client=client) return res def incr_version(self, key, delta=1, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) if version is None: version = self._backend.version old_key = self.make_key(key, version) value = self.get(old_key, version=version, client=client) try: ttl = client.ttl(old_key) except ConnectionError: raise ConnectionInterrupted(connection=client) if value is None: raise ValueError("Key '%s' not found" % key) if isinstance(key, CacheKey): new_key = self.make_key(key.original_key(), version=version + delta) else: new_key = self.make_key(key, version=version + delta) self.set(new_key, value, timeout=ttl, client=self.get_server(new_key)) self.delete(old_key, client=client) return version + delta def incr(self, key, delta=1, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super(ShardClient, self)\ .incr(key=key, delta=delta, version=version, client=client) def decr(self, key, delta=1, version=None, client=None): if client is None: key = self.make_key(key, version=version) client = self.get_server(key) return super(ShardClient, self)\ .decr(key=key, delta=delta, version=version, client=client) def iter_keys(self, key, version=None): raise NotImplementedError("iter_keys not supported on sharded client") def keys(self, search, version=None): pattern = self.make_key(search, version=version) keys = [] try: for server, connection in self._serverdict.items(): keys.extend(connection.keys(pattern)) except ConnectionError: # FIXME: technically all clients should be passed as `connection`. client = self.get_server(pattern) raise ConnectionInterrupted(connection=client) decoded_keys = (smart_text(k) for k in keys) return [self.reverse_key(k) for k in decoded_keys] def delete_pattern(self, pattern, version=None, client=None, itersize=None, prefix=None): """ Remove all keys matching pattern. """ pattern = self.make_pattern(pattern, version=version, prefix=prefix) kwargs = {'match': pattern} if itersize: kwargs['count'] = itersize keys = [] for server, connection in self._serverdict.items(): keys.extend(key for key in connection.scan_iter(**kwargs)) res = 0 if keys: for server, connection in self._serverdict.items(): res += connection.delete(*keys) return res def close(self, **kwargs): if getattr(settings, "DJANGO_REDIS_CLOSE_CONNECTION", False): for client in self._serverdict.values(): for c in client.connection_pool._available_connections: c.disconnect() django-redis-4.9.0/django_redis/compressors/0000755000175000001440000000000013245753134021235 5ustar niwiusers00000000000000django-redis-4.9.0/django_redis/compressors/__init__.py0000644000175000001440000000000012645762707023345 0ustar niwiusers00000000000000django-redis-4.9.0/django_redis/compressors/base.py0000644000175000001440000000037713077640133022525 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- class BaseCompressor(object): def __init__(self, options): self._options = options def compress(self, value): raise NotImplementedError def decompress(self, value): raise NotImplementedError django-redis-4.9.0/django_redis/compressors/identity.py0000644000175000001440000000032113077640133023431 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from .base import BaseCompressor class IdentityCompressor(BaseCompressor): def compress(self, value): return value def decompress(self, value): return value django-redis-4.9.0/django_redis/compressors/lz4.py0000644000175000001440000000105113245747656022331 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import from lz4.frame import compress as _compress, decompress as _decompress from ..exceptions import CompressorError from .base import BaseCompressor class Lz4Compressor(BaseCompressor): min_length = 15 def compress(self, value): if len(value) > self.min_length: return _compress(value) return value def decompress(self, value): try: return _decompress(value) except Exception as e: raise CompressorError(e) django-redis-4.9.0/django_redis/compressors/lzma.py0000644000175000001440000000104013174554724022552 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import import lzma from ..exceptions import CompressorError from .base import BaseCompressor class LzmaCompressor(BaseCompressor): min_length = 100 preset = 4 def compress(self, value): if len(value) > self.min_length: return lzma.compress(value, preset=self.preset) return value def decompress(self, value): try: return lzma.decompress(value) except lzma.LZMAError as e: raise CompressorError(e) django-redis-4.9.0/django_redis/compressors/zlib.py0000644000175000001440000000102413174554724022551 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import import zlib from ..exceptions import CompressorError from .base import BaseCompressor class ZlibCompressor(BaseCompressor): min_length = 15 preset = 6 def compress(self, value): if len(value) > self.min_length: return zlib.compress(value, self.preset) return value def decompress(self, value): try: return zlib.decompress(value) except zlib.error as e: raise CompressorError(e) django-redis-4.9.0/django_redis/serializers/0000755000175000001440000000000013245753134021212 5ustar niwiusers00000000000000django-redis-4.9.0/django_redis/serializers/__init__.py0000644000175000001440000000000013077640133023306 0ustar niwiusers00000000000000django-redis-4.9.0/django_redis/serializers/base.py0000644000175000001440000000043613077640133022476 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals class BaseSerializer(object): def __init__(self, options): pass def dumps(self, value): raise NotImplementedError def loads(self, value): raise NotImplementedError django-redis-4.9.0/django_redis/serializers/json.py0000644000175000001440000000061113245747656022547 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import json from django.core.serializers.json import DjangoJSONEncoder from .base import BaseSerializer class JSONSerializer(BaseSerializer): def dumps(self, value): return json.dumps(value, cls=DjangoJSONEncoder).encode() def loads(self, value): return json.loads(value.decode()) django-redis-4.9.0/django_redis/serializers/msgpack.py0000644000175000001440000000050212533047545023207 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import msgpack from .base import BaseSerializer class MSGPackSerializer(BaseSerializer): def dumps(self, value): return msgpack.dumps(value) def loads(self, value): return msgpack.loads(value, encoding="utf-8") django-redis-4.9.0/django_redis/serializers/pickle.py0000644000175000001440000000173513245747656023055 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.core.exceptions import ImproperlyConfigured from .base import BaseSerializer # Import the fastest implementation of # pickle package. This should be removed # when python3 come the unique supported # python version try: import cPickle as pickle except ImportError: import pickle class PickleSerializer(BaseSerializer): def __init__(self, options): self._pickle_version = -1 self.setup_pickle_version(options) def setup_pickle_version(self, options): if "PICKLE_VERSION" in options: try: self._pickle_version = int(options["PICKLE_VERSION"]) except (ValueError, TypeError): raise ImproperlyConfigured("PICKLE_VERSION value must be an integer") def dumps(self, value): return pickle.dumps(value, self._pickle_version) def loads(self, value): return pickle.loads(value) django-redis-4.9.0/django_redis/__init__.py0000644000175000001440000000107113245753053020766 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- VERSION = (4, 9, 0) __version__ = '.'.join(map(str, VERSION)) def get_redis_connection(alias='default', write=True): """ Helper used for obtaining a raw redis client. """ from django.core.cache import caches cache = caches[alias] if not hasattr(cache, "client"): raise NotImplementedError("This backend does not support this feature") if not hasattr(cache.client, "get_client"): raise NotImplementedError("This backend does not support this feature") return cache.client.get_client(write) django-redis-4.9.0/django_redis/cache.py0000644000175000001440000001116313174554724020302 0ustar niwiusers00000000000000import functools import logging from django.conf import settings from django.core.cache.backends.base import BaseCache from .exceptions import ConnectionInterrupted from .util import load_class DJANGO_REDIS_IGNORE_EXCEPTIONS = getattr(settings, "DJANGO_REDIS_IGNORE_EXCEPTIONS", False) DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS = getattr(settings, "DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS", False) DJANGO_REDIS_LOGGER = getattr(settings, "DJANGO_REDIS_LOGGER", False) DJANGO_REDIS_SCAN_ITERSIZE = getattr(settings, "DJANGO_REDIS_SCAN_ITERSIZE", 10) if DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS: logger = logging.getLogger((DJANGO_REDIS_LOGGER or __name__)) def omit_exception(method=None, return_value=None): """ Simple decorator that intercepts connection errors and ignores these if settings specify this. """ if method is None: return functools.partial(omit_exception, return_value=return_value) @functools.wraps(method) def _decorator(self, *args, **kwargs): try: return method(self, *args, **kwargs) except ConnectionInterrupted as e: if self._ignore_exceptions: if DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS: logger.error(str(e)) return return_value raise e.parent return _decorator class RedisCache(BaseCache): def __init__(self, server, params): super(RedisCache, self).__init__(params) self._server = server self._params = params options = params.get("OPTIONS", {}) self._client_cls = options.get("CLIENT_CLASS", "django_redis.client.DefaultClient") self._client_cls = load_class(self._client_cls) self._client = None self._ignore_exceptions = options.get("IGNORE_EXCEPTIONS", DJANGO_REDIS_IGNORE_EXCEPTIONS) @property def client(self): """ Lazy client connection property. """ if self._client is None: self._client = self._client_cls(self._server, self._params, self) return self._client @omit_exception def set(self, *args, **kwargs): return self.client.set(*args, **kwargs) @omit_exception def incr_version(self, *args, **kwargs): return self.client.incr_version(*args, **kwargs) @omit_exception def add(self, *args, **kwargs): return self.client.add(*args, **kwargs) @omit_exception def get(self, key, default=None, version=None, client=None): try: return self.client.get(key, default=default, version=version, client=client) except ConnectionInterrupted as e: if DJANGO_REDIS_IGNORE_EXCEPTIONS or self._ignore_exceptions: if DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS: logger.error(str(e)) return default raise @omit_exception def delete(self, *args, **kwargs): return self.client.delete(*args, **kwargs) @omit_exception def delete_pattern(self, *args, **kwargs): kwargs['itersize'] = kwargs.get('itersize', DJANGO_REDIS_SCAN_ITERSIZE) return self.client.delete_pattern(*args, **kwargs) @omit_exception def delete_many(self, *args, **kwargs): return self.client.delete_many(*args, **kwargs) @omit_exception def clear(self): return self.client.clear() @omit_exception(return_value={}) def get_many(self, *args, **kwargs): return self.client.get_many(*args, **kwargs) @omit_exception def set_many(self, *args, **kwargs): return self.client.set_many(*args, **kwargs) @omit_exception def incr(self, *args, **kwargs): return self.client.incr(*args, **kwargs) @omit_exception def decr(self, *args, **kwargs): return self.client.decr(*args, **kwargs) @omit_exception def has_key(self, *args, **kwargs): return self.client.has_key(*args, **kwargs) @omit_exception def keys(self, *args, **kwargs): return self.client.keys(*args, **kwargs) @omit_exception def iter_keys(self, *args, **kwargs): return self.client.iter_keys(*args, **kwargs) @omit_exception def ttl(self, *args, **kwargs): return self.client.ttl(*args, **kwargs) @omit_exception def persist(self, *args, **kwargs): return self.client.persist(*args, **kwargs) @omit_exception def expire(self, *args, **kwargs): return self.client.expire(*args, **kwargs) @omit_exception def lock(self, *args, **kwargs): return self.client.lock(*args, **kwargs) @omit_exception def close(self, **kwargs): self.client.close(**kwargs) django-redis-4.9.0/django_redis/exceptions.py0000644000175000001440000000103413077640133021404 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- class ConnectionInterrupted(Exception): def __init__(self, connection, parent=None): self.connection = connection self.parent = parent def __str__(self): error_type = "ConnectionInterrupted" error_msg = "An error occurred while connecting to redis" if self.parent: error_type = self.parent.__class__.__name__ error_msg = str(self.parent) return "Redis %s: %s" % (error_type, error_msg) class CompressorError(Exception): pass django-redis-4.9.0/django_redis/hash_ring.py0000644000175000001440000000313413077640133021170 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import bisect import hashlib class HashRing(object): nodes = [] def __init__(self, nodes=(), replicas=128): self.replicas = replicas self.ring = {} self.sorted_keys = [] for node in nodes: self.add_node(node) def add_node(self, node): self.nodes.append(node) for x in range(self.replicas): _key = "{0}:{1}".format(node, x) _hash = hashlib.sha256(_key.encode('utf-8')).hexdigest() self.ring[_hash] = node self.sorted_keys.append(_hash) self.sorted_keys.sort() def remove_node(self, node): self.nodes.remove(node) for x in range(self.replicas): _hash = hashlib.sha256("%s:%d" % (node, x)).hexdigest() self.ring.remove(_hash) self.sorted_keys.remove(_hash) def get_node(self, key): n, i = self.get_node_pos(key) return n def get_node_pos(self, key): if len(self.ring) == 0: return (None, None) _hash = hashlib.sha256(key.encode('utf-8')).hexdigest() idx = bisect.bisect(self.sorted_keys, _hash) idx = min(idx - 1, (self.replicas * len(self.nodes)) - 1) return (self.ring[self.sorted_keys[idx]], idx) def iter_nodes(self, key): if len(self.ring) == 0: yield None, None node, pos = self.get_node_pos(key) for k in self.sorted_keys[pos:]: yield k, self.ring[k] def __call__(self, key): return self.get_node(key) django-redis-4.9.0/django_redis/pool.py0000644000175000001440000001003613174554724020206 0ustar niwiusers00000000000000from django.conf import settings from redis.connection import DefaultParser from . import util class ConnectionFactory(object): # Store connection pool by cache backend options. # # _pools is a process-global, as otherwise _pools is cleared every time # ConnectionFactory is instiated, as Django creates new cache client # (DefaultClient) instance for every request. _pools = {} def __init__(self, options): pool_cls_path = options.get("CONNECTION_POOL_CLASS", "redis.connection.ConnectionPool") self.pool_cls = util.load_class(pool_cls_path) self.pool_cls_kwargs = options.get("CONNECTION_POOL_KWARGS", {}) redis_client_cls_path = options.get("REDIS_CLIENT_CLASS", "redis.client.StrictRedis") self.redis_client_cls = util.load_class(redis_client_cls_path) self.redis_client_cls_kwargs = options.get("REDIS_CLIENT_KWARGS", {}) self.options = options def make_connection_params(self, url): """ Given a main connection parameters, build a complete dict of connection parameters. """ kwargs = { "url": url, "parser_class": self.get_parser_cls(), } password = self.options.get("PASSWORD", None) if password: kwargs["password"] = password socket_timeout = self.options.get("SOCKET_TIMEOUT", None) if socket_timeout: assert isinstance(socket_timeout, (int, float)), \ "Socket timeout should be float or integer" kwargs["socket_timeout"] = socket_timeout socket_connect_timeout = self.options.get("SOCKET_CONNECT_TIMEOUT", None) if socket_connect_timeout: assert isinstance(socket_connect_timeout, (int, float)), \ "Socket connect timeout should be float or integer" kwargs["socket_connect_timeout"] = socket_connect_timeout return kwargs def connect(self, url): """ Given a basic connection parameters, return a new connection. """ params = self.make_connection_params(url) connection = self.get_connection(params) return connection def get_connection(self, params): """ Given a now preformated params, return a new connection. The default implementation uses a cached pools for create new connection. """ pool = self.get_or_create_connection_pool(params) return self.redis_client_cls(connection_pool=pool, **self.redis_client_cls_kwargs) def get_parser_cls(self): cls = self.options.get("PARSER_CLASS", None) if cls is None: return DefaultParser return util.load_class(cls) def get_or_create_connection_pool(self, params): """ Given a connection parameters and return a new or cached connection pool for them. Reimplement this method if you want distinct connection pool instance caching behavior. """ key = params["url"] if key not in self._pools: self._pools[key] = self.get_connection_pool(params) return self._pools[key] def get_connection_pool(self, params): """ Given a connection parameters, return a new connection pool for them. Overwrite this method if you want a custom behavior on creating connection pool. """ cp_params = dict(params) cp_params.update(self.pool_cls_kwargs) pool = self.pool_cls.from_url(**cp_params) if pool.connection_kwargs.get("password", None) is None: pool.connection_kwargs["password"] = params.get("password", None) pool.reset() return pool def get_connection_factory(path=None, options=None): if path is None: path = getattr(settings, "DJANGO_REDIS_CONNECTION_FACTORY", "django_redis.pool.ConnectionFactory") cls = util.load_class(path) return cls(options or {}) django-redis-4.9.0/django_redis/util.py0000644000175000001440000000220213174554724020206 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from importlib import import_module from django.core.exceptions import ImproperlyConfigured from django.utils.encoding import python_2_unicode_compatible, smart_text @python_2_unicode_compatible class CacheKey(object): """ A stub string class that we can use to check if a key was created already. """ def __init__(self, key): self._key = key def __str__(self): return smart_text(self._key) def original_key(self): key = self._key.rsplit(":", 1)[1] return key def load_class(path): """ Loads class from path. """ mod_name, klass_name = path.rsplit('.', 1) try: mod = import_module(mod_name) except AttributeError as e: raise ImproperlyConfigured('Error importing {0}: "{1}"'.format(mod_name, e)) try: klass = getattr(mod, klass_name) except AttributeError: raise ImproperlyConfigured('Module "{0}" does not define a "{1}" class'.format(mod_name, klass_name)) return klass def default_reverse_key(key): return key.split(':', 2)[2] django-redis-4.9.0/django_redis.egg-info/0000755000175000001440000000000013245753134020350 5ustar niwiusers00000000000000django-redis-4.9.0/django_redis.egg-info/PKG-INFO0000644000175000001440000000216113245753134021445 0ustar niwiusers00000000000000Metadata-Version: 1.2 Name: django-redis Version: 4.9.0 Summary: Full featured redis cache backend for Django. Home-page: https://github.com/niwibe/django-redis Author: Andrei Antoukh Author-email: niwi@niwi.nz License: UNKNOWN Description-Content-Type: UNKNOWN Description: UNKNOWN Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: Web Environment Classifier: Framework :: Django Classifier: Framework :: Django :: 1.11 Classifier: Framework :: Django :: 2.0 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: BSD License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: Utilities Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* django-redis-4.9.0/django_redis.egg-info/SOURCES.txt0000644000175000001440000000273013245753134022236 0ustar niwiusers00000000000000AUTHORS.rst CHANGES.txt LICENSE MANIFEST.in README.rst setup.cfg setup.py django_redis/__init__.py django_redis/cache.py django_redis/exceptions.py django_redis/hash_ring.py django_redis/pool.py django_redis/util.py django_redis.egg-info/PKG-INFO django_redis.egg-info/SOURCES.txt django_redis.egg-info/dependency_links.txt django_redis.egg-info/not-zip-safe django_redis.egg-info/requires.txt django_redis.egg-info/top_level.txt django_redis/client/__init__.py django_redis/client/default.py django_redis/client/herd.py django_redis/client/sharded.py django_redis/compressors/__init__.py django_redis/compressors/base.py django_redis/compressors/identity.py django_redis/compressors/lz4.py django_redis/compressors/lzma.py django_redis/compressors/zlib.py django_redis/serializers/__init__.py django_redis/serializers/base.py django_redis/serializers/json.py django_redis/serializers/msgpack.py django_redis/serializers/pickle.py doc/Makefile doc/content-docinfo.html doc/content.adoc doc/index.html tests/README.txt tests/__init__.py tests/runtests-herd.py tests/runtests-json.py tests/runtests-lz4.py tests/runtests-msgpack.py tests/runtests-sharded.py tests/runtests-unixsockets.py tests/runtests-zlib.py tests/runtests.py tests/shell.py tests/test_backend.py tests/test_hashring.py tests/test_sqlite.py tests/test_sqlite_herd.py tests/test_sqlite_json.py tests/test_sqlite_lz4.py tests/test_sqlite_msgpack.py tests/test_sqlite_sharding.py tests/test_sqlite_usock.py tests/test_sqlite_zlib.pydjango-redis-4.9.0/django_redis.egg-info/dependency_links.txt0000644000175000001440000000000113245753134024416 0ustar niwiusers00000000000000 django-redis-4.9.0/django_redis.egg-info/not-zip-safe0000644000175000001440000000000112306430637022573 0ustar niwiusers00000000000000 django-redis-4.9.0/django_redis.egg-info/requires.txt0000644000175000001440000000003313245753134022744 0ustar niwiusers00000000000000Django>=1.11 redis>=2.10.0 django-redis-4.9.0/django_redis.egg-info/top_level.txt0000644000175000001440000000001513245753134023076 0ustar niwiusers00000000000000django_redis django-redis-4.9.0/doc/0000755000175000001440000000000013245753134014773 5ustar niwiusers00000000000000django-redis-4.9.0/doc/Makefile0000644000175000001440000000032512537540302016425 0ustar niwiusers00000000000000all: doc doc: mkdir -p dist/latest/ asciidoctor -a docinfo -a stylesheet! -o dist/latest/index.html content.adoc github: doc ghp-import -m "Generate documentation" -b gh-pages dist/ git push origin gh-pages django-redis-4.9.0/doc/content-docinfo.html0000644000175000001440000000042412537540345020753 0ustar niwiusers00000000000000 django-redis-4.9.0/doc/content.adoc0000644000175000001440000005275713245747656017331 0ustar niwiusers00000000000000django-redis documentation ========================== Andrey Antukh, 4.8.0 :toc: left :numbered: :source-highlighter: pygments :pygments-style: friendly Introduction ------------ _django-redis_ is a xref:license[BSD Licensed], full featured Redis cache/session backend for Django. Why use django-redis? ~~~~~~~~~~~~~~~~~~~~~ Because: - In active development. - Uses native redis-py url notation connection strings. - Pluggable clients. - Pluggable parsers. - Pluggable serializers. - Master-Slave support in the default client. - Complete battery of tests. - Used in production in several projects as cache and session storage. - Supports infinite timeouts. - Facilities for raw access to Redis client/connection pool. - Highly configurable (can emulate memcached exception behavior, for example). - Unix sockets supported by default. - With support for python 2.7, 3.4, 3.5 and 3.6 Supported django-redis versions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Supported stable version: *4.8.0* - Supported stable version: *3.8.4* How version number is handled ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Versions like _3.6_, _3.7_, ... are considered major releases and can contain some backward incompatibilities. For more information is very recommended see the changelog before update. Versions like _3.7.0_, _3.7.1_, ... are considered minor or bug fix releases and are should contain only bug fixes. No new features. Requirements ~~~~~~~~~~~~ Django version support ^^^^^^^^^^^^^^^^^^^^^^ - *django-redis* supports Django 1.11+. Redis Server Support ^^^^^^^^^^^^^^^^^^^^ - *django-redis 3.x.y* will maintain support for redis-server 2.6.x and upper. - *django-redis 4.x.y* will maintain support for redis-server 2.8.x and upper. Other requirements ^^^^^^^^^^^^^^^^^^ All supported versions of *django-redis* depends on `redis-py >= 2.10.0`. User guide ---------- Installation ~~~~~~~~~~~~ The simplest way to use *django-redis* in your project is to install it with *pip*: [source,text] ---- pip install django-redis ---- Configure as cache backend ~~~~~~~~~~~~~~~~~~~~~~~~~~ To start using *django-redis*, you should change your Django cache settings to something like this: [source, python] ---- CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } } } ---- django-redis, since 3.8.0, it starts using redis-py native url notation for connection strings, that allows better interoperability and have a connection string in more "standard" way. .This is a examples of url format ---- redis://[:password]@localhost:6379/0 rediss://[:password]@localhost:6379/0 unix://[:password]@/path/to/socket.sock?db=0 ---- Three URL schemes are supported: - `redis://`: creates a normal TCP socket connection - `rediss://`: creates a SSL wrapped TCP socket connection - `unix://` creates a Unix Domain Socket connection There are several ways to specify a database number: - A `db` querystring option, e.g. redis://localhost?db=0 - If using the redis:// scheme, the path argument of the url, e.g. `redis://localhost/0` In some circumstances the password you should use to connect redis is not URL-safe, in this case you can escape it or just use the convenience option in `OPTIONS` dict: [source, python] ---- CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "PASSWORD": "mysecret" } } } ---- Take care, that this option does not overwrites the password in the uri, so if you have set the password in the uri, this settings will be ignored. Configure as session backend ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Django can by default use any cache backend as session backend and you benefit from that by using *django-redis* as backend for session storage without installing any additional backends: [source, python] ---- SESSION_ENGINE = "django.contrib.sessions.backends.cache" SESSION_CACHE_ALIAS = "default" ---- Testing with django-redis ~~~~~~~~~~~~~~~~~~~~~~~~~ `django-redis` supports customizing the underlying Redis client (see <<_pluggable_redis_client>>). This can be used for testing purposes, e.g., by replacing the default client with `mockredis` (https://github.com/locationlabs/mockredis). Doing so allows you to run your integration tests without depending on a real Redis server. In case you want to flush all data from the cache after a test, add the following lines to your `TestCase`: [source, python] ---- def tearDown(self): from django_redis import get_redis_connection get_redis_connection("default").flushall() ---- Advanced usage -------------- Pickle version ~~~~~~~~~~~~~~ For almost all values, *django-redis* uses pickle to serialize objects. The latest available version of pickle is used by default. If you want set a concrete version, you can do it, using `PICKLE_VERSION` option: [source, python] ---- CACHES = { "default": { # ... "OPTIONS": { "PICKLE_VERSION": -1 # Use the latest protocol version } } } ---- Socket timeout ~~~~~~~~~~~~~~ Socket timeout can be set using `SOCKET_TIMEOUT` and `SOCKET_CONNECT_TIMEOUT` options: [source, python] ---- CACHES = { "default": { # ... "OPTIONS": { "SOCKET_CONNECT_TIMEOUT": 5, # in seconds "SOCKET_TIMEOUT": 5, # in seconds } } } ---- `SOCKET_CONNECT_TIMEOUT` is the timeout for the connection to be established and `SOCKET_TIMEOUT` is the timeout for read and write operations after the connection is established. Compression support ~~~~~~~~~~~~~~~~~~~ _django_redis_ comes with compression support out of the box, but is deactivated by default. You can activate it setting up a concrete backend: [source, python] ---- CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", } } } ---- Let see an example, of how make it work with *lzma* compression format: [source, python] ---- import lzma CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.lzma.LzmaCompressor", } } } ---- *Lz4* compression support (requires the lz4 library): [source, python] ---- import lz4 CACHES = { "default": { # ... "OPTIONS": { "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", } } } ---- Memcached exceptions behavior ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In some situations, when Redis is only used for cache, you do not want exceptions when Redis is down. This is default behavior in the memcached backend and it can be emulated in *django-redis*. For setup memcached like behaviour (ignore connection exceptions), you should set `IGNORE_EXCEPTIONS` settings on your cache configuration: [source, python] ---- CACHES = { "default": { # ... "OPTIONS": { "IGNORE_EXCEPTIONS": True, } } } ---- Also, you can apply the same settings to all configured caches, you can set the global flag in your settings: [source, python] ---- DJANGO_REDIS_IGNORE_EXCEPTIONS = True ---- Log Ignored Exceptions ~~~~~~~~~~~~~~~~~~~~~~ When ignoring exceptions with `IGNORE_EXCEPTIONS` or `DJANGO_REDIS_IGNORE_EXCEPTIONS`, you may optionally log exceptions using the global variable `DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS` in your settings file. [source, python] ---- DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS = True ---- If you wish to specify the logger in which the exceptions are output, simply set the global variable `DJANGO_REDIS_LOGGER` to the string name and/or path of the desired logger. This will default to `__name__` if no logger is specified and `DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS` is `True` [source, python] ---- DJANGO_REDIS_LOGGER = 'some.specified.logger' ---- Infinite timeout ~~~~~~~~~~~~~~~~ *django-redis* comes with infinite timeouts support out of the box. And it behaves in same way as django backend contract specifies: - `timeout=0` expires the value immediately. - `timeout=None` infinite timeout [source, python] ---- cache.set("key", "value", timeout=None) ---- Get ttl (time-to-live) from key ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ With redis, you can access to ttl of any stored key, for it, django-redis exposes `ttl` function. It returns: - 0 if key does not exists (or already expired). - None for keys that exists but does not have any expiration. - ttl value for any volatile key (any key that has expiration). .Simple search keys by pattern [source,pycon] ---- >>> from django.core.cache import cache >>> cache.set("foo", "value", timeout=25) >>> cache.ttl("foo") 25 >>> cache.ttl("not-existent") 0 ---- Expire & Persist ~~~~~~~~~~~~~~~~ Additionally to the simple ttl query, you can send persist a concrete key or specify a new expiration timeout using the `persist` and `expire` methods: .Example using `persist` method [source, pycon] ----- >>> cache.set("foo", "bar", timeout=22) >>> cache.ttl("foo") 22 >>> cache.persist("foo") >>> cache.ttl("foo") None ----- .Example using expire method [source,pycon] ---- >>> cache.set("foo", "bar", timeout=22) >>> cache.expire("foo", timeout=5) >>> cache.ttl("foo") 5 ---- Locks ~~~~~ It also supports the redis ability to create redis distributed named locks. The Lock interface is identical to the `threading.Lock` so you can use it as replacement. .Example allocating a lock using python context managers facilities. [source, python] ---- with cache.lock("somekey"): do_some_thing() ---- Scan & Delete keys in bulk ~~~~~~~~~~~~~~~~~~~~~~~~~~ *django-redis* comes with some additional methods that help with searching or deleting keys using glob patterns. .Simple search keys by pattern [source,pycon] ---- >>> from django.core.cache import cache >>> cache.keys("foo_*") ["foo_1", "foo_2"] ---- A simple search like this will return all matched values. In databases with a large number of keys this isn't suitable method. Instead, you can use the `iter_keys` function that works like the `keys` function but uses redis>=2.8 server side cursors. Calling `iter_keys` will return a generator that you can then iterate over efficiently. .Search using server side cursors [source,pycon] ---- >>> from django.core.cache import cache >>> cache.iter_keys("foo_*") >>> next(cache.iter_keys("foo_*")) "foo_1" ---- For deleting keys, you should use `delete_pattern` which has the same glob pattern syntax as the `keys` function and returns the number of deleted keys. .Example use of delete_pattern [source, pycon] ---- >>> from django.core.cache import cache >>> cache.delete_pattern("foo_*") ---- Redis native commands ~~~~~~~~~~~~~~~~~~~~~ *django-redis* has limited support for some Redis atomic operations, such as the commands `SETNX` and `INCR`. You can use the `SETNX` command through the backend `set()` method with the `nx` parameter: .Example: [source, pycon] ---- >>> from django.core.cache import cache >>> cache.set("key", "value1", nx=True) True >>> cache.set("key", "value2", nx=True) False >>> cache.get("key") "value1" ---- Also, `incr` and `decr` methods uses redis atomic operations when value that contains a key is suitable for it. Raw client access ~~~~~~~~~~~~~~~~~ In some situations your application requires access to a raw Redis client to use some advanced features that aren't exposed by the Django cache interface. To avoid storing another setting for creating a raw connection, *django-redis* exposes functions with which you can obtain a raw client reusing the cache connection string: `get_redis_connection(alias)`. [source, pycon] ---- >>> from django_redis import get_redis_connection >>> con = get_redis_connection("default") >>> con ---- WARNING: Not all pluggable clients support this feature. Connection pools ~~~~~~~~~~~~~~~~ Behind the scenes, *django-redis* uses the underlying *redis-py* connection pool implementation, and exposes a simple way to configure it. Alternatively, you can directly customize a connection/connection pool creation for a backend. The default *redis-py* behavior is to not close connections, recycling them when possible. Configure default connection pool ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The default connection pool is simple. You can only customize the maximum number of connections in the pool, by setting `CONNECTION_POOL_KWARGS` in the `CACHES` setting: [source, python] ---- CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", ... "OPTIONS": { "CONNECTION_POOL_KWARGS": {"max_connections": 100} } } } ---- You can verify how many connections the pool has opened with the following snippet: [source, python] ---- from django.core.cache import get_cache from django_redis import get_redis_connection r = get_redis_connection("default") # Use the name you have defined for Redis in settings.CACHES connection_pool = r.connection_pool print("Created connections so far: %d" % connection_pool._created_connections) ---- Use your own connection pool subclass ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Sometimes you want to use your own subclass of the connection pool. This is possible with *django-redis* using the `CONNECTION_POOL_CLASS` parameter in the backend options. ._myproj/mypool.py_ [source, python] ---- from redis.connection import ConnectionPool class MyOwnPool(ConnectionPool): # Just doing nothing, only for example purpose pass ---- ._settings.py_ [source, python] ---- # Omitting all backend declaration boilerplate code. "OPTIONS": { "CONNECTION_POOL_CLASS": "myproj.mypool.MyOwnPool", } ---- Customize connection factory ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ If none of the previous methods satisfies you, you can get in the middle of the *django-redis* connection factory process and customize or completely rewrite it. By default, *django-redis* creates connections through the `django_redis.pool.ConnectionFactory` class that is specified in the global Django setting `DJANGO_REDIS_CONNECTION_FACTORY`. .Partial interface of `ConnectionFactory` class [source, python] ---- # Note: Using Python 3 notation for code documentation ;) class ConnectionFactory(object): def get_connection_pool(self, params:dict): # Given connection parameters in the `params` argument, # return new connection pool. # It should be overwritten if you want do something # before/after creating the connection pool, or return your # own connection pool. pass def get_connection(self, params:dict): # Given connection parameters in the `params` argument, # return a new connection. # It should be overwritten if you want to do something # before/after creating a new connection. # The default implementation uses `get_connection_pool` # to obtain a pool and create a new connection in the # newly obtained pool. pass def get_or_create_connection_pool(self, params:dict): # This is a high layer on top of `get_connection_pool` for # implementing a cache of created connection pools. # It should be overwritten if you want change the default # behavior. pass def make_connection_params(self, url:str) -> dict: # The responsibility of this method is to convert basic connection # parameters and other settings to fully connection pool ready # connection parameters. pass def connect(self, url:str): # This is really a public API and entry point for this # factory class. This encapsulates the main logic of creating # the previously mentioned `params` using `make_connection_params` # and creating a new connection using the `get_connection` method. pass ---- Pluggable parsers ~~~~~~~~~~~~~~~~~ *redis-py* (the Python Redis client used by *django-redis*) comes with a pure Python Redis parser that works very well for most common task, but if you want some performance boost, you can use *hiredis*. *hiredis* is a Redis client written in C and it has its own parser that can be used with *django-redis*. [source, python] ---- "OPTIONS": { "PARSER_CLASS": "redis.connection.HiredisParser", } ---- Pluggable clients ~~~~~~~~~~~~~~~~~ _django_redis_ is designed for to be very flexible and very configurable. For it, it exposes a pluggable backends that make easy extend the default behavior, and it comes with few ones out the box. Default client ^^^^^^^^^^^^^^ Almost all about the default client is explained, with one exception: the default client comes with master-slave support. To connect to master-slave redis setup, you should change the `LOCATION` to something like this: [source, python] ---- "LOCATION": [ "redis://127.0.0.1:6379/1", "redis://127.0.0.1:6378/1", ] ---- The first connection string represents a master server and the rest to slave servers. WARNING: Master-Slave setup is not heavily tested in production environments. Shard client ^^^^^^^^^^^^ This pluggable client implements client-side sharding. It inherits almost all functionality from the default client. To use it, change your cache settings to something like this: [source, python] ---- CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": [ "redis://127.0.0.1:6379/1", "redis://127.0.0.1:6379/2", ], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.ShardClient", } } } ---- WARNING: Shard client is still experimental, so be careful when using it in production environments. Herd client ^^^^^^^^^^^ This pluggable client helps dealing with the thundering herd problem. You can read more about it on link:http://en.wikipedia.org/wiki/Thundering_herd_problem[Wikipedia]. Like previous pluggable clients, it inherits all functionality from the default client, adding some additional methods for getting/setting keys. .Example setup [source, python] ---- CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.HerdClient", } } } ---- This client exposes additional settings: - `CACHE_HERD_TIMEOUT`: Set default herd timeout. (Default value: 60s) Pluggable serializer ~~~~~~~~~~~~~~~~~~~~ The pluggable clients serialize data before sending it to the server. By default, _django_redis_ serialize the data using Python `pickle`. This is very flexible and can handle a large range of object types. To serialize using JSON instead, the serializer `JSONSerializer` is also available. .Example setup [source, python] ---- CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.json.JSONSerializer", } } } ---- There's also support for serialization using 'MsgPack' 'http://msgpack.org/' (that requires the msgpack-python library): .Example setup [source, python] ---- CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379/1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", } } } ---- Pluggable redis client ~~~~~~~~~~~~~~~~~~~~~~ _django_redis_ uses the Redis client `redis.client.StrictClient` by default. It is possible to use an alternative client. You can customize the client used by setting `REDIS_CLIENT_CLASS` in the `CACHES` setting. Optionally, you can provide arguments to this class by setting `REDIS_CLIENT_KWARGS`. .Example setup [source, python] ---- CACHES = { "default": { "OPTIONS": { "REDIS_CLIENT_CLASS": "my.module.ClientClass", "REDIS_CLIENT_KWARGS": {"some_setting": True}, } } } ---- [[license]] License ------- [source,text] ---- Copyright (c) 2011-2015 Andrey Antukh Copyright (c) 2011 Sean Bleier All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ---- django-redis-4.9.0/doc/index.html0000644000175000001440000024105412533056516016775 0ustar niwiusers00000000000000 django-redis documentation

1. Introduction

django-redis is a BSD Licensed, full featured Redis cache/session backend for Django.

1.1. Why use django-redis?

Because:

  • In active development.

  • Uses native redis-py url notation connection strings.

  • Modular client system (pluggable clients).

  • Master-Slave support in the default client.

  • Complete battery of tests.

  • Used in production in several projects as cache and session storage.

  • Supports infinite timeouts.

  • Facilities for raw access to Redis client/connection pool.

  • Highly configurable (can emulate memcached exception behavior, for example).

  • Unix sockets supported by default.

  • Pluggable parsers.

  • With support for python 2.7, 3.3 and 3.4

1.2. Supported django-redis versions

  • Development version: 4.0.0

  • Supported stable version: 3.8.4

1.3. How version number is handled

Versions like 3.6, 3.7, …​ are considered major releases and can contain some backward incompatibilities. For more information is very recommended see the changelog before update.

Versions like 3.7.0, 3.7.1, …​ are considered minor or bug fix releases and are should contain only bug fixes. No new features.

1.4. Requirements

1.4.1. Django version support

  • django-redis 3.8.x will maintain support for django 1.4, 1.5, 1.6, 1.7 (and maybe 1.8)

  • django-redis 4.0.0 will maintain support for django 1.6, 1.7 and 1.8

1.4.2. Redis Server Support

  • django-redis 3.8.x will maintain support for redis-server 2.6.x and upper.

  • django-redis 4.0.0 will maintain support for redis-server 2.8.x and upper.

1.4.3. Other requirements

All supported versions of django-redis depends on redis-py >= 2.10.0.

2. User guide

2.1. Installation

The simplest way to use django-redis in your project is to install it with pip:

pip install django-redis

2.2. Configure as cache backend

To start using django-redis, you should change your Django cache settings to something like this:

CACHES = {
    "default": {
        "BACKEND": "django_redis.cache.RedisCache",
        "LOCATION": "redis://127.0.0.1:6379/1",
        "OPTIONS": {
            "CLIENT_CLASS": "django_redis.client.DefaultClient",
        }
    }
}

django-redis, since 3.8.0, it starts using redis-py native url notation for connection strings, that allows better interoperability and have a connection string in more "standard" way.

This is a examples of url format
redis://[:password]@localhost:6379/0
rediss://[:password]@localhost:6379/0
unix://[:password]@/path/to/socket.sock?db=0

Three URL schemes are supported:

  • redis://: creates a normal TCP socket connection

  • rediss://: creates a SSL wrapped TCP socket connection

  • unix:// creates a Unix Domain Socket connection

There are several ways to specify a database number:

  • A db querystring option, e.g. redis://localhost?db=0

  • If using the redis:// scheme, the path argument of the url, e.g. redis://localhost/0

Note
if you are coming fron django-redis < 3.8.x, you are probably using redis_cache. Since django-redis 3.8.x, redis_cache module is deprecated in favor to django_redis. The redis_cache module will be removed in 3.9.x versions.

2.3. Configure as session backend

Django can by default use any cache backend as session backend and you benefit from that by using django-redis as backend for session storage without installing any additional backends:

SESSION_ENGINE = "django.contrib.sessions.backends.cache"
SESSION_CACHE_ALIAS = "default"

3. Advanced usage

3.1. Pickle version

For almost all values, django-redis uses pickle to serialize objects.

The latest available version of pickle is used by default. If you want set a concrete version, you can do it, using PICKLE_VERSION option:

CACHES = {
    "default": {
        # ...
        "OPTIONS": {
            "PICKLE_VERSION": -1  # Use the latest protocol version
        }
    }
}

3.2. Socket timeout

Socket timeout can be set using SOCKET_TIMEOUT and SOCKET_CONNECT_TIMEOUT options:

CACHES = {
    "default": {
        # ...
        "OPTIONS": {
            "SOCKET_CONNECT_TIMEOUT": 5,  # in seconds
            "SOCKET_TIMEOUT": 5,  # in seconds
        }
    }
}

SOCKET_CONNECT_TIMEOUT is the timeout for the connection to be established and SOCKET_TIMEOUT is the timeout for read and write operations after the connection is established.

3.3. Compression support

django_redis comes with compression support out of the box, but is deactivated by default. You can activate it setting COMPRESS_MIN_LEN option to any value great than 0.

CACHES = {
    "default": {
        # ...
        "OPTIONS": {
            "COMPRESS_MIN_LEN": 10,
        }
    }
}

zlib is used as default compression format. You can change it providing two callables, one for compress and an other for uncompress.

Let see an example, of how make it work with lzma compression format:

import lzma

CACHES = {
    "default": {
        # ...
        "OPTIONS": {
            "COMPRESS_MIN_LEN": 10,
            "COMPRESS_COMPRESSOR": lzma.compress,
            "COMPRESS_DECOMPRESSOR": lzma.decompress,
            "COMPRESS_DECOMPRESSOR_ERROR": lzma.LZMAError
        }
    }
}

3.4. Memcached exceptions behavior

In some situations, when Redis is only used for cache, you do not want exceptions when Redis is down. This is default behavior in the memcached backend and it can be emulated in django-redis.

For setup memcached like behaviour (ignore connection exceptions), you should set IGNORE_EXCEPTIONS settings on your cache configuration:

CACHES = {
    "default": {
        # ...
        "OPTIONS": {
            "IGNORE_EXCEPTIONS": True,
        }
    }
}

Also, you can apply the same settings to all configured caches, you can set the global flag in your settings:

DJANGO_REDIS_IGNORE_EXCEPTIONS = True

3.5. Infinite timeout

django-redis comes with infinite timeouts support out of the box. And it behaves in same way as django backend contract specifies:

  • timeout=0 expires the value inmediatelly.

  • timeout=None infinite timeout

cache.set("key", "value", timeout=None)

3.6. Get ttl (time-to-live) from key

With redis, you can access to ttl of any stored key, for it, django-redis exposes ttl function.

It returns:

  • ttl value for any volatile key (any key that has expiration)

  • 0 for expired and not existent keys

  • None for keys that does not have expiration

Simple search keys by pattern
>>> from django.core.cache import cache
>>> cache.set("foo", "value", timeout=25)
>>> cache.ttl("foo")
25
>>> cache.ttl("not-existent")
0

3.7. Scan & Delete keys in bulk

django-redis comes with some additional methods that help with searching or deleting keys using glob patterns.

Simple search keys by pattern
>>> from django.core.cache import cache
>>> cache.keys("foo_*")
["foo_1", "foo_2"]

A simple search like this will return all matched values. In databases with a large number of keys this isn’t suitable method. Instead, you can use the iter_keys function that works like the keys function but uses redis>=2.8 server side cursors. Calling iter_keys will return a generator that you can then iterate over efficiently.

Search using server side cursors
>>> from django.core.cache import cache
>>> cache.iter_keys("foo_*")
<generator object algo at 0x7ffa9c2713a8>
>>> next(cache.iter_keys("foo_*"))
"foo_1"

For deleting keys, you should use delete_pattern which has the same glob pattern syntax as the keys function and returns the number of deleted keys.

Example use of delete_pattern
>>> from django.core.cache import cache
>>> cache.delete_pattern("foo_*")

3.8. Redis native commands

django-redis has limited support for some Redis atomic operations, such as the commands SETNX and INCR.

You can use the SETNX command through the backend set() method with the nx parameter:

Example:
>>> from django.core.cache import cache
>>> cache.set("key", "value1", nx=True)
True
>>> cache.set("key", "value2", nx=True)
False
>>> cache.get("key")
"value1"

Also, incr and decr methods uses redis atomic operations when value that contains a key is suitable for it.

3.9. Raw client access

In some situations your application requires access to a raw Redis client to use some advanced features that aren’t exposed by the Django cache interface. To avoid storing another setting for creating a raw connection, django-redis exposes functions with which you can obtain a raw client reusing the cache connection string: get_redis_connection(alias).

>>> from django_redis import get_redis_connection
>>> con = get_redis_connection("default")
>>> con
<redis.client.StrictRedis object at 0x2dc4510>
Warning
Not all pluggable clients support this feature.

3.10. Connection pools

Behind the scenes, django-redis uses the underlying redis-py connection pool implementation, and exposes a simple way to configure it. Alternatively, you can directly customize a connection/connection pool creation for a backend.

The default redis-py behavior is to not close connections, recycling them when possible.

3.10.1. Configure default connection pool

The default connection pool is simple. You can only customize the maximum number of connections in the pool, by setting CONNECTION_POOL_KWARGS in the CACHES setting:

CACHES = {
    "default": {
        "BACKEND": "django_redis.cache.RedisCache",
        ...
        "OPTIONS": {
            "CONNECTION_POOL_KWARGS": {"max_connections": 100}
        }
    }
}

You can verify how many connections the pool has opened with the following snippet:

from django.core.cache import get_cache
from django_redis import get_redis_connection

r = get_redis_connection("default")  # Use the name you have defined for Redis in settings.CACHES
connection_pool = r.connection_pool
print("Created connections so far: %d" % connection_pool._created_connections)

3.10.2. Use your own connection pool subclass

Sometimes you want to use your own subclass of the connection pool. This is possible with django-redis using the CONNECTION_POOL_CLASS parameter in the backend options.

myproj/mypool.py
from redis.connection import ConnectionPool

class MyOwnPool(ConnectionPool):
    # Just doing nothing, only for example purpose
    pass
settings.py
# Omitting all backend declaration boilerplate code.

"OPTIONS": {
    "CONNECTION_POOL_CLASS": "myproj.mypool.MyOwnPool",
}

3.10.3. Customize connection factory

If none of the previous methods satisfies you, you can get in the middle of the django-redis connection factory process and customize or completely rewrite it.

By default, django-redis creates connections through the django_redis.pool.ConnectionFactory class that is specified in the global Django setting DJANGO_REDIS_CONNECTION_FACTORY.

Partial interface of ConnectionFactory class
# Note: Using Python 3 notation for code documentation ;)

class ConnectionFactory(object):
    def get_connection_pool(self, params:dict):
        # Given connection parameters in the `params` argument,
        # return new connection pool.
        # It should be overwritten if you want do something
        # before/after creating the connection pool, or return your
        # own connection pool.
        pass

    def get_connection(self, params:dict):
        # Given connection parameters in the `params` argument,
        # return a new connection.
        # It should be overwritten if you want to do something
        # before/after creating a new connection.
        # The default implementation uses `get_connection_pool`
        # to obtain a pool and create a new connection in the
        # newly obtained pool.
        pass

    def get_or_create_connection_pool(self, params:dict):
        # This is a high layer on top of `get_connection_pool` for
        # implementing a cache of created connection pools.
        # It should be overwritten if you want change the default
        # behavior.
        pass

    def make_connection_params(self, url:str) -> dict:
        # The responsibility of this method is to convert basic connection
        # parameters and other settings to fully connection pool ready
        # connection parameters.
        pass

    def connect(self, url:str):
        # This is really a public API and entry point for this
        # factory class. This encapsulates the main logic of creating
        # the previously mentioned `params` using `make_connection_params`
        # and creating a new connection using the `get_connection` method.
        pass

3.11. Pluggable parsers

redis-py (the Python Redis client used by django-redis) comes with a pure Python Redis parser that works very well for most common task, but if you want some performance boost, you can use hiredis.

hiredis is a Redis client written in C and it has its own parser that can be used with django-redis.

"OPTIONS": {
    "PARSER_CLASS": "redis.connection.HiredisParser",
}

3.12. Pluggable clients

django_redis is designed for to be very flexible and very configurable. For it, it exposes a pluggable backends that make easy extend the default behavior, and it comes with few ones out the box.

3.12.1. Default client

Almost all about the default client is explained, with one exception: the default client comes with master-slave support.

To connect to master-slave redis setup, you should change the LOCATION to something like this:

"LOCATION": [
    "redis://127.0.0.1:6379/1",
    "redis://127.0.0.1:6378/1",
]

The first connection string represents a master server and the rest to slave servers.

Warning
Master-Slave setup is not heavily tested in production environments.

3.12.2. Shard client

This pluggable client implements client-side sharding. It inherits almost all functionality from the default client. To use it, change your cache settings to something like this:

CACHES = {
    "default": {
        "BACKEND": "django_redis.cache.RedisCache",
        "LOCATION": [
            "redis://127.0.0.1:6379/1",
            "redis://127.0.0.1:6379/2",
        ],
        "OPTIONS": {
            "CLIENT_CLASS": "django_redis.client.ShardClient",
        }
    }
}
Warning
Shard client is still experimental, so be careful when using it in production environments.

3.12.3. Herd client

This pluggable client helps dealing with the thundering herd problem. You can read more about it on Wikipedia.

Like previous pluggable clients, it inherits all functionality from the default client, adding some additional methods for getting/setting keys.

Example setup
 CACHES = {
    "default": {
        "BACKEND": "django_redis.cache.RedisCache",
        "LOCATION": "redis://127.0.0.1:6379/1",
        "OPTIONS": {
            "CLIENT_CLASS": "django_redis.client.HerdClient",
        }
    }
}

This client exposes additional settings:

  • CACHE_HERD_TIMEOUT: Set default herd timeout. (Default value: 60s)

3.13. Pluggable serializer

The pluggable clients serialize data before sending it to the server. By default, django_redis serialize the data using Python pickle. This is very flexible and can handle a large range of object types.

To serialize using JSON instead, the serializer JSONSerializer is also available.

Example setup
 CACHES = {
    "default": {
        "BACKEND": "django_redis.cache.RedisCache",
        "LOCATION": "redis://127.0.0.1:6379/1",
        "OPTIONS": {
            "CLIENT_CLASS": "django_redis.client.DefaultClient",
            "SERIALIZER": "django_redis.serializers.json.JSONSerializer",
        }
    }
}

There’s also support for serialization using MsgPack http://msgpack.org/ (that requires the msgpack-python library):

Example setup
 CACHES = {
    "default": {
        "BACKEND": "django_redis.cache.RedisCache",
        "LOCATION": "redis://127.0.0.1:6379/1",
        "OPTIONS": {
            "CLIENT_CLASS": "django_redis.client.DefaultClient",
            "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer",
        }
    }
}

4. License

Copyright (c) 2011-2015 Andrey Antukh <niwi@niwi.be>
Copyright (c) 2011 Sean Bleier

All rights reserved.

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
   notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
   notice, this list of conditions and the following disclaimer in the
   documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
   derived from this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
django-redis-4.9.0/tests/0000755000175000001440000000000013245753134015370 5ustar niwiusers00000000000000django-redis-4.9.0/tests/README.txt0000644000175000001440000000054513245747656017106 0ustar niwiusers00000000000000Test requirements ----------------- Python packages ~~~~~~~~~~~~~~~ Install the development requirements using the requirements.txt file: pip install -r requirements.txt redis ~~~~~ * redis listening on default socket 127.0.0.1:6379 After this, run this command: python runtests.py python runtests.py .. django-redis-4.9.0/tests/__init__.py0000644000175000001440000000000013174553301017462 0ustar niwiusers00000000000000django-redis-4.9.0/tests/runtests-herd.py0000755000175000001440000000056313245747656020574 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_sqlite_herd") if __name__ == "__main__": from django.core.management import execute_from_command_line args = sys.argv args.insert(1, "test") execute_from_command_line(args) django-redis-4.9.0/tests/runtests-json.py0000755000175000001440000000056313245747656020623 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_sqlite_json") if __name__ == "__main__": from django.core.management import execute_from_command_line args = sys.argv args.insert(1, "test") execute_from_command_line(args) django-redis-4.9.0/tests/runtests-lz4.py0000644000175000001440000000056213245747656020357 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_sqlite_lz4") if __name__ == "__main__": from django.core.management import execute_from_command_line args = sys.argv args.insert(1, "test") execute_from_command_line(args) django-redis-4.9.0/tests/runtests-msgpack.py0000755000175000001440000000056613245747656021302 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_sqlite_msgpack") if __name__ == "__main__": from django.core.management import execute_from_command_line args = sys.argv args.insert(1, "test") execute_from_command_line(args) django-redis-4.9.0/tests/runtests-sharded.py0000755000175000001440000000056713245747656021270 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_sqlite_sharding") if __name__ == "__main__": from django.core.management import execute_from_command_line args = sys.argv args.insert(1, "test") execute_from_command_line(args) django-redis-4.9.0/tests/runtests-unixsockets.py0000755000175000001440000000056513174553301022212 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_sqlite_usock") if __name__ == "__main__": from django.core.management import execute_from_command_line args = sys.argv args.insert(1, "test") execute_from_command_line(args) django-redis-4.9.0/tests/runtests-zlib.py0000755000175000001440000000056313245747656020612 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_sqlite_zlib") if __name__ == "__main__": from django.core.management import execute_from_command_line args = sys.argv args.insert(1, "test") execute_from_command_line(args) django-redis-4.9.0/tests/runtests.py0000755000175000001440000000055613245747656017656 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_sqlite") if __name__ == "__main__": from django.core.management import execute_from_command_line args = sys.argv args.insert(1, "test") execute_from_command_line(args) django-redis-4.9.0/tests/shell.py0000755000175000001440000000047113077640133017053 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- import os import sys os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_sqlite") sys.path.insert(0, '..') if __name__ == "__main__": from django.core.management import execute_from_command_line args = sys.argv args.insert(1, "shell") execute_from_command_line(args) django-redis-4.9.0/tests/test_backend.py0000644000175000001440000011457513245747656020421 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import base64 import copy import datetime import time import unittest from datetime import timedelta from django import VERSION from django.conf import settings from django.contrib.sessions.backends.cache import SessionStore as CacheSession from django.core.cache import DEFAULT_CACHE_ALIAS, cache, caches from django.test import override_settings from django.test.utils import patch_logger from django.utils import six, timezone import django_redis.cache from django_redis import pool from django_redis.client import DefaultClient, ShardClient, herd from django_redis.serializers.json import JSONSerializer from django_redis.serializers.msgpack import MSGPackSerializer try: from unittest.mock import patch, Mock except ImportError: from mock import patch, Mock herd.CACHE_HERD_TIMEOUT = 2 if six.PY3: long = int def make_key(key, prefix, version): return "{}#{}#{}".format(prefix, version, key) def reverse_key(key): return key.split("#", 2)[2] class DjangoRedisConnectionStrings(unittest.TestCase): def setUp(self): self.cf = pool.get_connection_factory(options={}) self.constring4 = "unix://tmp/foo.bar?db=1" self.constring5 = "redis://localhost/2" self.constring6 = "rediss://localhost:3333?db=2" def test_new_connection_strings(self): res1 = self.cf.make_connection_params(self.constring4) res2 = self.cf.make_connection_params(self.constring5) res3 = self.cf.make_connection_params(self.constring6) self.assertEqual(res1["url"], self.constring4) self.assertEqual(res2["url"], self.constring5) self.assertEqual(res3["url"], self.constring6) class DjangoRedisCacheTestEscapePrefix(unittest.TestCase): def setUp(self): caches_setting = copy.deepcopy(settings.CACHES) caches_setting['default']['KEY_PREFIX'] = '*' cm = override_settings(CACHES=caches_setting) cm.enable() self.addCleanup(cm.disable) self.cache = caches['default'] try: self.cache.clear() except Exception: pass self.other = caches['with_prefix'] try: self.other.clear() except Exception: pass def test_delete_pattern(self): self.cache.set('a', '1') self.other.set('b', '2') self.cache.delete_pattern('*') self.assertIs(self.cache.has_key('a'), False) self.assertEqual(self.other.get('b'), '2') def test_iter_keys(self): if isinstance(self.cache.client, ShardClient): raise unittest.SkipTest("ShardClient doesn't support iter_keys") self.cache.set('a', '1') self.other.set('b', '2') self.assertEqual(list(self.cache.iter_keys('*')), ['a']) def test_keys(self): self.cache.set('a', '1') self.other.set('b', '2') keys = self.cache.keys('*') self.assertIn('a', keys) self.assertNotIn('b', keys) class DjangoRedisCacheTestCustomKeyFunction(unittest.TestCase): def setUp(self): caches_setting = copy.deepcopy(settings.CACHES) caches_setting['default']['KEY_FUNCTION'] = 'test_backend.make_key' caches_setting['default']['REVERSE_KEY_FUNCTION'] = 'test_backend.reverse_key' cm = override_settings(CACHES=caches_setting) cm.enable() self.addCleanup(cm.disable) self.cache = caches['default'] try: self.cache.clear() except Exception: pass def test_custom_key_function(self): for key in ["foo-aa", "foo-ab", "foo-bb", "foo-bc"]: self.cache.set(key, "foo") res = self.cache.delete_pattern("*foo-a*") self.assertTrue(bool(res)) keys = self.cache.keys("foo*") self.assertEqual(set(keys), {"foo-bb", "foo-bc"}) # ensure our custom function was actually called try: self.assertEqual( {k.decode('utf-8') for k in self.cache.raw_client.keys('*')}, {'#1#foo-bc', '#1#foo-bb'}) except (NotImplementedError, AttributeError): # not all clients support .keys() pass class DjangoRedisCacheTests(unittest.TestCase): def setUp(self): self.cache = cache try: self.cache.clear() except Exception: pass def test_setnx(self): # we should ensure there is no test_key_nx in redis self.cache.delete("test_key_nx") res = self.cache.get("test_key_nx", None) self.assertEqual(res, None) res = self.cache.set("test_key_nx", 1, nx=True) self.assertTrue(res) # test that second set will have res = self.cache.set("test_key_nx", 2, nx=True) self.assertFalse(res) res = self.cache.get("test_key_nx") self.assertEqual(res, 1) self.cache.delete("test_key_nx") res = self.cache.get("test_key_nx", None) self.assertEqual(res, None) def test_setnx_timeout(self): # test that timeout still works for nx=True res = self.cache.set("test_key_nx", 1, timeout=2, nx=True) self.assertTrue(res) time.sleep(3) res = self.cache.get("test_key_nx", None) self.assertEqual(res, None) # test that timeout will not affect key, if it was there self.cache.set("test_key_nx", 1) res = self.cache.set("test_key_nx", 2, timeout=2, nx=True) self.assertFalse(res) time.sleep(3) res = self.cache.get("test_key_nx", None) self.assertEqual(res, 1) self.cache.delete("test_key_nx") res = self.cache.get("test_key_nx", None) self.assertEqual(res, None) def test_unicode_keys(self): self.cache.set('ключ', 'value') res = self.cache.get('ключ') self.assertEqual(res, 'value') def test_save_and_integer(self): self.cache.set("test_key", 2) res = self.cache.get("test_key", "Foo") self.assertIsInstance(res, int) self.assertEqual(res, 2) def test_save_string(self): self.cache.set("test_key", "hello" * 1000) res = self.cache.get("test_key") type(res) self.assertIsInstance(res, six.text_type) self.assertEqual(res, "hello" * 1000) self.cache.set("test_key", "2") res = self.cache.get("test_key") self.assertIsInstance(res, six.text_type) self.assertEqual(res, "2") def test_save_unicode(self): self.cache.set("test_key", "heló") res = self.cache.get("test_key") self.assertIsInstance(res, six.text_type) self.assertEqual(res, "heló") def test_save_dict(self): if isinstance(self.cache.client._serializer, (JSONSerializer, MSGPackSerializer)): # JSONSerializer and MSGPackSerializer use the isoformat for # datetimes. now_dt = datetime.datetime.now().isoformat() else: now_dt = datetime.datetime.now() test_dict = {"id": 1, "date": now_dt, "name": "Foo"} self.cache.set("test_key", test_dict) res = self.cache.get("test_key") self.assertIsInstance(res, dict) self.assertEqual(res["id"], 1) self.assertEqual(res["name"], "Foo") self.assertEqual(res["date"], now_dt) def test_save_float(self): float_val = 1.345620002 self.cache.set("test_key", float_val) res = self.cache.get("test_key") self.assertIsInstance(res, float) self.assertEqual(res, float_val) def test_timeout(self): self.cache.set("test_key", 222, timeout=3) time.sleep(4) res = self.cache.get("test_key", None) self.assertEqual(res, None) def test_timeout_0(self): self.cache.set("test_key", 222, timeout=0) res = self.cache.get("test_key", None) self.assertEqual(res, None) def test_timeout_parameter_as_positional_argument(self): self.cache.set("test_key", 222, -1) res = self.cache.get("test_key", None) self.assertIsNone(res) self.cache.set("test_key", 222, 1) res1 = self.cache.get("test_key", None) time.sleep(2) res2 = self.cache.get("test_key", None) self.assertEqual(res1, 222) self.assertEqual(res2, None) # nx=True should not overwrite expire of key already in db self.cache.set("test_key", 222, None) self.cache.set("test_key", 222, -1, nx=True) res = self.cache.get("test_key", None) self.assertEqual(res, 222) def test_timeout_negative(self): self.cache.set("test_key", 222, timeout=-1) res = self.cache.get("test_key", None) self.assertIsNone(res) self.cache.set("test_key", 222, timeout=None) self.cache.set("test_key", 222, timeout=-1) res = self.cache.get("test_key", None) self.assertIsNone(res) # nx=True should not overwrite expire of key already in db self.cache.set("test_key", 222, timeout=None) self.cache.set("test_key", 222, timeout=-1, nx=True) res = self.cache.get("test_key", None) self.assertEqual(res, 222) def test_timeout_tiny(self): self.cache.set("test_key", 222, timeout=0.00001) res = self.cache.get("test_key", None) self.assertIn(res, (None, 222)) def test_set_add(self): self.cache.set("add_key", "Initial value") self.cache.add("add_key", "New value") res = cache.get("add_key") self.assertEqual(res, "Initial value") def test_get_many(self): self.cache.set("a", 1) self.cache.set("b", 2) self.cache.set("c", 3) res = self.cache.get_many(["a", "b", "c"]) self.assertEqual(res, {"a": 1, "b": 2, "c": 3}) def test_get_many_unicode(self): self.cache.set("a", "1") self.cache.set("b", "2") self.cache.set("c", "3") res = self.cache.get_many(["a", "b", "c"]) self.assertEqual(res, {"a": "1", "b": "2", "c": "3"}) def test_set_many(self): self.cache.set_many({"a": 1, "b": 2, "c": 3}) res = self.cache.get_many(["a", "b", "c"]) self.assertEqual(res, {"a": 1, "b": 2, "c": 3}) def test_delete(self): self.cache.set_many({"a": 1, "b": 2, "c": 3}) res = self.cache.delete("a") self.assertTrue(bool(res)) res = self.cache.get_many(["a", "b", "c"]) self.assertEqual(res, {"b": 2, "c": 3}) res = self.cache.delete("a") self.assertFalse(bool(res)) def test_delete_many(self): self.cache.set_many({"a": 1, "b": 2, "c": 3}) res = self.cache.delete_many(["a", "b"]) self.assertTrue(bool(res)) res = self.cache.get_many(["a", "b", "c"]) self.assertEqual(res, {"c": 3}) res = self.cache.delete_many(["a", "b"]) self.assertFalse(bool(res)) def test_delete_many_generator(self): self.cache.set_many({"a": 1, "b": 2, "c": 3}) res = self.cache.delete_many(key for key in ["a", "b"]) self.assertTrue(bool(res)) res = self.cache.get_many(["a", "b", "c"]) self.assertEqual(res, {"c": 3}) res = self.cache.delete_many(["a", "b"]) self.assertFalse(bool(res)) def test_delete_many_empty_generator(self): res = self.cache.delete_many(key for key in []) self.assertFalse(bool(res)) def test_incr(self): try: self.cache.set("num", 1) self.cache.incr("num") res = self.cache.get("num") self.assertEqual(res, 2) self.cache.incr("num", 10) res = self.cache.get("num") self.assertEqual(res, 12) # max 64 bit signed int self.cache.set("num", 9223372036854775807) self.cache.incr("num") res = self.cache.get("num") self.assertEqual(res, 9223372036854775808) self.cache.incr("num", 2) res = self.cache.get("num") self.assertEqual(res, 9223372036854775810) self.cache.set("num", long(3)) self.cache.incr("num", 2) res = self.cache.get("num") self.assertEqual(res, 5) except NotImplementedError as e: print(e) def test_incr_error(self): try: with self.assertRaises(ValueError): # key does not exist self.cache.incr('numnum') except NotImplementedError: raise unittest.SkipTest("`incr` not supported in herd client") def test_incr_ignore_check(self): try: # incr with 'ignore_key_check' is supported only on the DefaultClient if isinstance(self.cache, DefaultClient): # key exists check will be skipped and the value will be incremented by '1' which is the default delta self.cache.incr("num", ignore_key_check=True) res = self.cache.get("num") self.assertEqual(res, 1) self.cache.delete("num") # since key doesnt exist it is set to the delta value, 10 in this case self.cache.incr("num", 10, ignore_key_check=True) res = self.cache.get("num") self.assertEqual(res, 10) self.cache.delete("num") # following are just regression checks to make sure it still works as expected with incr # max 64 bit signed int self.cache.set("num", 9223372036854775807) self.cache.incr("num", ignore_key_check=True) res = self.cache.get("num") self.assertEqual(res, 9223372036854775808) self.cache.incr("num", 2, ignore_key_check=True) res = self.cache.get("num") self.assertEqual(res, 9223372036854775810) self.cache.set("num", long(3)) self.cache.incr("num", 2, ignore_key_check=True) res = self.cache.get("num") self.assertEqual(res, 5) except NotImplementedError: raise unittest.SkipTest("`incr` not supported in herd client") def test_get_set_bool(self): self.cache.set("bool", True) res = self.cache.get("bool") self.assertIsInstance(res, bool) self.assertEqual(res, True) self.cache.set("bool", False) res = self.cache.get("bool") self.assertIsInstance(res, bool) self.assertEqual(res, False) def test_decr(self): try: self.cache.set("num", 20) self.cache.decr("num") res = self.cache.get("num") self.assertEqual(res, 19) self.cache.decr("num", 20) res = self.cache.get("num") self.assertEqual(res, -1) self.cache.decr("num", long(2)) res = self.cache.get("num") self.assertEqual(res, -3) self.cache.set("num", long(20)) self.cache.decr("num") res = self.cache.get("num") self.assertEqual(res, 19) # max 64 bit signed int + 1 self.cache.set("num", 9223372036854775808) self.cache.decr("num") res = self.cache.get("num") self.assertEqual(res, 9223372036854775807) self.cache.decr("num", 2) res = self.cache.get("num") self.assertEqual(res, 9223372036854775805) except NotImplementedError as e: print(e) def test_version(self): self.cache.set("keytest", 2, version=2) res = self.cache.get("keytest") self.assertEqual(res, None) res = self.cache.get("keytest", version=2) self.assertEqual(res, 2) def test_incr_version(self): try: self.cache.set("keytest", 2) self.cache.incr_version("keytest") res = self.cache.get("keytest") self.assertEqual(res, None) res = self.cache.get("keytest", version=2) self.assertEqual(res, 2) except NotImplementedError as e: print(e) def test_delete_pattern(self): for key in ["foo-aa", "foo-ab", "foo-bb", "foo-bc"]: self.cache.set(key, "foo") res = self.cache.delete_pattern("*foo-a*") self.assertTrue(bool(res)) keys = self.cache.keys("foo*") self.assertEqual(set(keys), {"foo-bb", "foo-bc"}) res = self.cache.delete_pattern("*foo-a*") self.assertFalse(bool(res)) @patch('django_redis.cache.RedisCache.client') def test_delete_pattern_with_custom_count(self, client_mock): for key in ["foo-aa", "foo-ab", "foo-bb", "foo-bc"]: self.cache.set(key, "foo") self.cache.delete_pattern("*foo-a*", itersize=2) client_mock.delete_pattern.assert_called_once_with("*foo-a*", itersize=2) @patch('django_redis.cache.RedisCache.client') def test_delete_pattern_with_settings_default_scan_count(self, client_mock): for key in ["foo-aa", "foo-ab", "foo-bb", "foo-bc"]: self.cache.set(key, "foo") expected_count = django_redis.cache.DJANGO_REDIS_SCAN_ITERSIZE self.cache.delete_pattern("*foo-a*") client_mock.delete_pattern.assert_called_once_with("*foo-a*", itersize=expected_count) def test_close(self): cache = caches["default"] cache.set("f", "1") cache.close() def test_ttl(self): cache = caches["default"] _params = cache._params _is_herd = (_params["OPTIONS"]["CLIENT_CLASS"] == "django_redis.client.HerdClient") _is_shard = (_params["OPTIONS"]["CLIENT_CLASS"] == "django_redis.client.ShardClient") # Not supported for shard client. if _is_shard: return # Test ttl cache.set("foo", "bar", 10) ttl = cache.ttl("foo") if _is_herd: self.assertAlmostEqual(ttl, 12) else: self.assertAlmostEqual(ttl, 10) # Test ttl None cache.set("foo", "foo", timeout=None) ttl = cache.ttl("foo") self.assertEqual(ttl, None) # Test ttl with expired key cache.set("foo", "foo", timeout=-1) ttl = cache.ttl("foo") self.assertEqual(ttl, 0) # Test ttl with not existent key ttl = cache.ttl("not-existent-key") self.assertEqual(ttl, 0) def test_persist(self): self.cache.set("foo", "bar", timeout=20) self.cache.persist("foo") ttl = self.cache.ttl("foo") self.assertIsNone(ttl) def test_expire(self): self.cache.set("foo", "bar", timeout=None) self.cache.expire("foo", 20) ttl = self.cache.ttl("foo") self.assertAlmostEqual(ttl, 20) def test_lock(self): lock = self.cache.lock("foobar") lock.acquire(blocking=True) self.assertTrue(self.cache.has_key("foobar")) lock.release() self.assertFalse(self.cache.has_key("foobar")) def test_iter_keys(self): cache = caches["default"] _params = cache._params _is_shard = (_params["OPTIONS"]["CLIENT_CLASS"] == "django_redis.client.ShardClient") if _is_shard: return cache.set("foo1", 1) cache.set("foo2", 1) cache.set("foo3", 1) # Test simple result result = set(cache.iter_keys("foo*")) self.assertEqual(result, {"foo1", "foo2", "foo3"}) # Test limited result result = list(cache.iter_keys("foo*", itersize=2)) self.assertEqual(len(result), 3) # Test generator object result = cache.iter_keys("foo*") self.assertNotEqual(next(result), None) def test_master_slave_switching(self): try: cache = caches["sample"] client = cache.client client._server = ["foo", "bar"] client._clients = ["Foo", "Bar"] self.assertEqual(client.get_client(write=True), "Foo") self.assertEqual(client.get_client(write=False), "Bar") except NotImplementedError: pass class DjangoOmitExceptionsTests(unittest.TestCase): def setUp(self): self._orig_setting = django_redis.cache.DJANGO_REDIS_IGNORE_EXCEPTIONS django_redis.cache.DJANGO_REDIS_IGNORE_EXCEPTIONS = True caches_setting = copy.deepcopy(settings.CACHES) caches_setting["doesnotexist"]["IGNORE_EXCEPTIONS"] = True cm = override_settings(CACHES=caches_setting) cm.enable() self.addCleanup(cm.disable) self.cache = caches["doesnotexist"] def tearDown(self): django_redis.cache.DJANGO_REDIS_IGNORE_EXCEPTIONS = self._orig_setting def test_get_many_returns_default_arg(self): self.assertEqual(self.cache.get_many(["key1", "key2", "key3"]), {}) def test_get(self): self.assertIsNone(self.cache.get("key")) self.assertEqual(self.cache.get("key", "default"), "default") self.assertEqual(self.cache.get("key", default="default"), "default") # Copied from Django's sessions test suite. Keep in sync with upstream. # https://github.com/django/django/blob/master/tests/sessions_tests/tests.py class SessionTestsMixin: # This does not inherit from TestCase to avoid any tests being run with this # class, which wouldn't work, and to allow different TestCase subclasses to # be used. backend = None # subclasses must specify def setUp(self): self.session = self.backend() def tearDown(self): # NB: be careful to delete any sessions created; stale sessions fill up # the /tmp (with some backends) and eventually overwhelm it after lots # of runs (think buildbots) self.session.delete() def test_new_session(self): self.assertIs(self.session.modified, False) self.assertIs(self.session.accessed, False) def test_get_empty(self): self.assertIsNone(self.session.get('cat')) def test_store(self): self.session['cat'] = "dog" self.assertIs(self.session.modified, True) self.assertEqual(self.session.pop('cat'), 'dog') def test_pop(self): self.session['some key'] = 'exists' # Need to reset these to pretend we haven't accessed it: self.accessed = False self.modified = False self.assertEqual(self.session.pop('some key'), 'exists') self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, True) self.assertIsNone(self.session.get('some key')) def test_pop_default(self): self.assertEqual(self.session.pop('some key', 'does not exist'), 'does not exist') self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_pop_default_named_argument(self): self.assertEqual(self.session.pop('some key', default='does not exist'), 'does not exist') self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_pop_no_default_keyerror_raised(self): with self.assertRaises(KeyError): self.session.pop('some key') def test_setdefault(self): self.assertEqual(self.session.setdefault('foo', 'bar'), 'bar') self.assertEqual(self.session.setdefault('foo', 'baz'), 'bar') self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, True) def test_update(self): self.session.update({'update key': 1}) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, True) self.assertEqual(self.session.get('update key', None), 1) def test_has_key(self): self.session['some key'] = 1 self.session.modified = False self.session.accessed = False self.assertIn('some key', self.session) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_values(self): self.assertEqual(list(self.session.values()), []) self.assertIs(self.session.accessed, True) self.session['some key'] = 1 self.session.modified = False self.session.accessed = False self.assertEqual(list(self.session.values()), [1]) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_keys(self): self.session['x'] = 1 self.session.modified = False self.session.accessed = False self.assertEqual(list(self.session.keys()), ['x']) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_items(self): self.session['x'] = 1 self.session.modified = False self.session.accessed = False self.assertEqual(list(self.session.items()), [('x', 1)]) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_clear(self): self.session['x'] = 1 self.session.modified = False self.session.accessed = False self.assertEqual(list(self.session.items()), [('x', 1)]) self.session.clear() self.assertEqual(list(self.session.items()), []) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, True) def test_save(self): self.session.save() self.assertIs(self.session.exists(self.session.session_key), True) def test_delete(self): self.session.save() self.session.delete(self.session.session_key) self.assertIs(self.session.exists(self.session.session_key), False) def test_flush(self): self.session['foo'] = 'bar' self.session.save() prev_key = self.session.session_key self.session.flush() self.assertIs(self.session.exists(prev_key), False) self.assertNotEqual(self.session.session_key, prev_key) self.assertIsNone(self.session.session_key) self.assertIs(self.session.modified, True) self.assertIs(self.session.accessed, True) def test_cycle(self): self.session['a'], self.session['b'] = 'c', 'd' self.session.save() prev_key = self.session.session_key prev_data = list(self.session.items()) self.session.cycle_key() self.assertIs(self.session.exists(prev_key), False) self.assertNotEqual(self.session.session_key, prev_key) self.assertEqual(list(self.session.items()), prev_data) def test_cycle_with_no_session_cache(self): self.session['a'], self.session['b'] = 'c', 'd' self.session.save() prev_data = self.session.items() self.session = self.backend(self.session.session_key) self.assertIs(hasattr(self.session, '_session_cache'), False) self.session.cycle_key() self.assertCountEqual(self.session.items(), prev_data) def test_save_doesnt_clear_data(self): self.session['a'] = 'b' self.session.save() self.assertEqual(self.session['a'], 'b') def test_invalid_key(self): # Submitting an invalid session key (either by guessing, or if the db has # removed the key) results in a new key being generated. try: session = self.backend('1') session.save() self.assertNotEqual(session.session_key, '1') self.assertIsNone(session.get('cat')) session.delete() finally: # Some backends leave a stale cache entry for the invalid # session key; make sure that entry is manually deleted session.delete('1') def test_session_key_empty_string_invalid(self): """Falsey values (Such as an empty string) are rejected.""" self.session._session_key = '' self.assertIsNone(self.session.session_key) def test_session_key_too_short_invalid(self): """Strings shorter than 8 characters are rejected.""" self.session._session_key = '1234567' self.assertIsNone(self.session.session_key) def test_session_key_valid_string_saved(self): """Strings of length 8 and up are accepted and stored.""" self.session._session_key = '12345678' self.assertEqual(self.session.session_key, '12345678') def test_session_key_is_read_only(self): def set_session_key(session): session.session_key = session._get_new_session_key() with self.assertRaises(AttributeError): set_session_key(self.session) # Custom session expiry def test_default_expiry(self): # A normal session has a max age equal to settings self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) # So does a custom session with an idle expiration time of 0 (but it'll # expire at browser close) self.session.set_expiry(0) self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) def test_custom_expiry_seconds(self): modification = timezone.now() self.session.set_expiry(10) date = self.session.get_expiry_date(modification=modification) self.assertEqual(date, modification + timedelta(seconds=10)) age = self.session.get_expiry_age(modification=modification) self.assertEqual(age, 10) def test_custom_expiry_timedelta(self): modification = timezone.now() # Mock timezone.now, because set_expiry calls it on this code path. original_now = timezone.now try: timezone.now = lambda: modification self.session.set_expiry(timedelta(seconds=10)) finally: timezone.now = original_now date = self.session.get_expiry_date(modification=modification) self.assertEqual(date, modification + timedelta(seconds=10)) age = self.session.get_expiry_age(modification=modification) self.assertEqual(age, 10) def test_custom_expiry_datetime(self): modification = timezone.now() self.session.set_expiry(modification + timedelta(seconds=10)) date = self.session.get_expiry_date(modification=modification) self.assertEqual(date, modification + timedelta(seconds=10)) age = self.session.get_expiry_age(modification=modification) self.assertEqual(age, 10) def test_custom_expiry_reset(self): self.session.set_expiry(None) self.session.set_expiry(10) self.session.set_expiry(None) self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) def test_get_expire_at_browser_close(self): # Tests get_expire_at_browser_close with different settings and different # set_expiry calls with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False): self.session.set_expiry(10) self.assertIs(self.session.get_expire_at_browser_close(), False) self.session.set_expiry(0) self.assertIs(self.session.get_expire_at_browser_close(), True) self.session.set_expiry(None) self.assertIs(self.session.get_expire_at_browser_close(), False) with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True): self.session.set_expiry(10) self.assertIs(self.session.get_expire_at_browser_close(), False) self.session.set_expiry(0) self.assertIs(self.session.get_expire_at_browser_close(), True) self.session.set_expiry(None) self.assertIs(self.session.get_expire_at_browser_close(), True) def test_decode(self): # Ensure we can decode what we encode data = {'a test key': 'a test value'} encoded = self.session.encode(data) self.assertEqual(self.session.decode(encoded), data) def test_decode_failure_logged_to_security(self): bad_encode = base64.b64encode(b'flaskdj:alkdjf') with patch_logger('django.security.SuspiciousSession', 'warning') as calls: self.assertEqual({}, self.session.decode(bad_encode)) # check that the failed decode is logged self.assertEqual(len(calls), 1) self.assertIn('corrupted', calls[0]) def test_actual_expiry(self): # this doesn't work with JSONSerializer (serializing timedelta) with override_settings(SESSION_SERIALIZER='django.contrib.sessions.serializers.PickleSerializer'): self.session = self.backend() # reinitialize after overriding settings # Regression test for #19200 old_session_key = None new_session_key = None try: self.session['foo'] = 'bar' self.session.set_expiry(-timedelta(seconds=10)) self.session.save() old_session_key = self.session.session_key # With an expiry date in the past, the session expires instantly. new_session = self.backend(self.session.session_key) new_session_key = new_session.session_key self.assertNotIn('foo', new_session) finally: self.session.delete(old_session_key) self.session.delete(new_session_key) @unittest.skipIf(VERSION < (2, 0), 'Requires Django 2.0+') def test_session_load_does_not_create_record(self): """ Loading an unknown session key does not create a session record. Creating session records on load is a DOS vulnerability. """ session = self.backend('someunknownkey') session.load() self.assertIsNone(session.session_key) self.assertIs(session.exists(session.session_key), False) # provided unknown key was cycled, not reused self.assertNotEqual(session.session_key, 'someunknownkey') def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self): """ Sessions shouldn't be resurrected by a concurrent request. """ from django.contrib.sessions.backends.base import UpdateError # Create new session. s1 = self.backend() s1['test_data'] = 'value1' s1.save(must_create=True) # Logout in another context. s2 = self.backend(s1.session_key) s2.delete() # Modify session in first context. s1['test_data'] = 'value2' with self.assertRaises(UpdateError): # This should throw an exception as the session is deleted, not # resurrect the session. s1.save() self.assertEqual(s1.load(), {}) if six.PY2: assertCountEqual = unittest.TestCase.assertItemsEqual class SessionTests(SessionTestsMixin, unittest.TestCase): backend = CacheSession def test_actual_expiry(self): if isinstance(caches[DEFAULT_CACHE_ALIAS].client._serializer, MSGPackSerializer): raise unittest.SkipTest("msgpack serializer doesn't support datetime serialization") super(SessionTests, self).test_actual_expiry() class TestDefaultClient(unittest.TestCase): @patch('test_backend.DefaultClient.get_client') @patch('test_backend.DefaultClient.__init__', return_value=None) def test_delete_pattern_calls_get_client_given_no_client(self, init_mock, get_client_mock): client = DefaultClient() client._backend = Mock() client._backend.key_prefix = '' client.delete_pattern(pattern='foo*') get_client_mock.assert_called_once_with(write=True) @patch('test_backend.DefaultClient.make_pattern') @patch('test_backend.DefaultClient.get_client', return_value=Mock()) @patch('test_backend.DefaultClient.__init__', return_value=None) def test_delete_pattern_calls_make_pattern( self, init_mock, get_client_mock, make_pattern_mock): client = DefaultClient() client._backend = Mock() client._backend.key_prefix = '' get_client_mock.return_value.scan_iter.return_value = [] client.delete_pattern(pattern='foo*') kwargs = {'version': None, 'prefix': None} # if not isinstance(caches['default'].client, ShardClient): # kwargs['prefix'] = None make_pattern_mock.assert_called_once_with('foo*', **kwargs) @patch('test_backend.DefaultClient.make_pattern') @patch('test_backend.DefaultClient.get_client', return_value=Mock()) @patch('test_backend.DefaultClient.__init__', return_value=None) def test_delete_pattern_calls_scan_iter_with_count_if_itersize_given( self, init_mock, get_client_mock, make_pattern_mock): client = DefaultClient() client._backend = Mock() client._backend.key_prefix = '' get_client_mock.return_value.scan_iter.return_value = [] client.delete_pattern(pattern='foo*', itersize=90210) get_client_mock.return_value.scan_iter.assert_called_once_with( count=90210, match=make_pattern_mock.return_value) class TestShardClient(unittest.TestCase): @patch('test_backend.DefaultClient.make_pattern') @patch('test_backend.ShardClient.__init__', return_value=None) def test_delete_pattern_calls_scan_iter_with_count_if_itersize_given( self, init_mock, make_pattern_mock): client = ShardClient() client._backend = Mock() client._backend.key_prefix = '' connection = Mock() connection.scan_iter.return_value = [] client._serverdict = {'test': connection} client.delete_pattern(pattern='foo*', itersize=10) connection.scan_iter.assert_called_once_with(count=10, match=make_pattern_mock.return_value) @patch('test_backend.DefaultClient.make_pattern') @patch('test_backend.ShardClient.__init__', return_value=None) def test_delete_pattern_calls_scan_iter(self, init_mock, make_pattern_mock): client = ShardClient() client._backend = Mock() client._backend.key_prefix = '' connection = Mock() connection.scan_iter.return_value = [] client._serverdict = {'test': connection} client.delete_pattern(pattern='foo*') connection.scan_iter.assert_called_once_with(match=make_pattern_mock.return_value) @patch('test_backend.DefaultClient.make_pattern') @patch('test_backend.ShardClient.__init__', return_value=None) def test_delete_pattern_calls_delete_for_given_keys(self, init_mock, make_pattern_mock): client = ShardClient() client._backend = Mock() client._backend.key_prefix = '' connection = Mock() connection.scan_iter.return_value = [Mock(), Mock()] connection.delete.return_value = 0 client._serverdict = {'test': connection} client.delete_pattern(pattern='foo*') connection.delete.assert_called_once_with(*connection.scan_iter.return_value) django-redis-4.9.0/tests/test_hashring.py0000644000175000001440000000164113245747656020622 0ustar niwiusers00000000000000# -*- coding: utf-8 -*- import unittest from django_redis.hash_ring import HashRing class Node(object): def __init__(self, id): self.id = id def __str__(self): return "node:{0}".format(self.id) def __repr__(self): return "".format(self.id) class HashRingTest(unittest.TestCase): def setUp(self): self.node0 = Node(0) self.node1 = Node(1) self.node2 = Node(2) self.nodes = [self.node0, self.node1, self.node2] self.ring = HashRing(self.nodes) def test_hashring(self): ids = [] for key in ["test{0}".format(x) for x in range(10)]: node = self.ring.get_node(key) ids.append(node.id) self.assertEqual(ids, [0, 2, 1, 2, 2, 2, 2, 0, 1, 1]) def test_hashring_brute_force(self): for key in ("test{0}".format(x) for x in range(10000)): self.ring.get_node(key) django-redis-4.9.0/tests/test_sqlite.py0000644000175000001440000000212413245747656020315 0ustar niwiusers00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": [ "redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=1", ], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "127.0.0.1:56379:1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "127.0.0.1:6379:1,127.0.0.1:6379:1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", }, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ( "django.contrib.sessions", ) django-redis-4.9.0/tests/test_sqlite_herd.py0000644000175000001440000000211613245747656021320 0ustar niwiusers00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { 'default': { 'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': [ '127.0.0.1:6379:5', ], 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.HerdClient', } }, 'doesnotexist': { 'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': '127.0.0.1:56379:1', 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.HerdClient', } }, 'sample': { 'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': '127.0.0.1:6379:1,127.0.0.1:6379:1', 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.HerdClient', } }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.HerdClient", }, "KEY_PREFIX": "test-prefix", }, } # TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner' INSTALLED_APPS = ( "django.contrib.sessions", ) django-redis-4.9.0/tests/test_sqlite_json.py0000644000175000001440000000257413245747656021357 0ustar niwiusers00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": [ "redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=1", ], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.json.JSONSerializer", } }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "127.0.0.1:56379:1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.json.JSONSerializer", } }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "127.0.0.1:6379:1,127.0.0.1:6379:1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.json.JSONSerializer", } }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.json.JSONSerializer", }, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ( "django.contrib.sessions", ) django-redis-4.9.0/tests/test_sqlite_lz4.py0000644000175000001440000000256313245747656021115 0ustar niwiusers00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": [ "redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=1", ], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor" } }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "127.0.0.1:56379:1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", } }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "127.0.0.1:6379:1,127.0.0.1:6379:1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", } }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", }, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ( "django.contrib.sessions", ) django-redis-4.9.0/tests/test_sqlite_msgpack.py0000644000175000001440000000262413245747656022027 0ustar niwiusers00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": [ "redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=1", ], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", } }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "127.0.0.1:56379:1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", } }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "127.0.0.1:6379:1,127.0.0.1:6379:1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", } }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", }, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ( "django.contrib.sessions", ) django-redis-4.9.0/tests/test_sqlite_sharding.py0000644000175000001440000000224313245747656022176 0ustar niwiusers00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { 'default': { 'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': [ '127.0.0.1:6379:1', '127.0.0.1:6379:2', ], 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.ShardClient', } }, 'doesnotexist': { 'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': [ '127.0.0.1:56379:1', '127.0.0.1:56379:2', ], 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.ShardClient', } }, 'sample': { 'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': '127.0.0.1:6379:1,127.0.0.1:6379:1', 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.ShardClient', } }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.ShardClient", }, "KEY_PREFIX": "test-prefix", }, } TEST_RUNNER = 'django.test.runner.DiscoverRunner' INSTALLED_APPS = ( "django.contrib.sessions", ) django-redis-4.9.0/tests/test_sqlite_usock.py0000644000175000001440000000217613245747656021530 0ustar niwiusers00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { 'default': { 'BACKEND': 'redis_cache.cache.RedisCache', 'LOCATION': [ 'unix:/tmp/redis.sock:1', 'unix:/tmp/redis.sock:1', ], 'OPTIONS': { 'CLIENT_CLASS': 'redis_cache.client.DefaultClient', } }, 'doesnotexist': { 'BACKEND': 'redis_cache.cache.RedisCache', 'LOCATION': '127.0.0.1:56379:1', 'OPTIONS': { 'CLIENT_CLASS': 'redis_cache.client.DefaultClient', } }, 'sample': { 'BACKEND': 'redis_cache.cache.RedisCache', 'LOCATION': '127.0.0.1:6379:1,127.0.0.1:6379:1', 'OPTIONS': { 'CLIENT_CLASS': 'redis_cache.client.DefaultClient', } }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", }, "KEY_PREFIX": "test-prefix", }, } TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner' INSTALLED_APPS = ( "django.contrib.sessions", ) django-redis-4.9.0/tests/test_sqlite_zlib.py0000644000175000001440000000257313245747656021345 0ustar niwiusers00000000000000SECRET_KEY = "django_tests_secret_key" CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": [ "redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=1", ], "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor" } }, "doesnotexist": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "127.0.0.1:56379:1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", } }, "sample": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "127.0.0.1:6379:1,127.0.0.1:6379:1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", } }, "with_prefix": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": "redis://127.0.0.1:6379?db=1", "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", }, "KEY_PREFIX": "test-prefix", }, } INSTALLED_APPS = ( "django.contrib.sessions", ) django-redis-4.9.0/AUTHORS.rst0000644000175000001440000000130512471627113016101 0ustar niwiusers00000000000000Andrei Antoukh / niwibe Sean Bleier Matt Dennewitz Jannis Leidel S. Angel / Twidi Noah Kantrowitz / coderanger Martin Mahner / bartTC Timothée Peignier / cyberdelia Lior Sion / liorsion Ales Zoulek / aleszoulek James Aylett / jaylett Todd Boland / boland David Zderic / dzderic Kirill Zaitsev / teferi django-redis-4.9.0/CHANGES.txt0000644000175000001440000002200513245753031016032 0ustar niwiusers00000000000000Changelog ========= Version 4.9.0 ------------- Date: 2018-03-01 - Add testing and support for Django 2.0. No actual code changes were required. - Escape `KEY_PREFIX` and `VERSION` when used in glob expressions. - Improve handling timeouts less than 1ms. - Remove fakeredis support. - Add datetime, date, time, and timedelta serialization support to the JSON serializer. - The deprecated feature of passing `True` as a timeout value is no longer supported. - Fix `add()` with a negative timeout to not store key (it is immediately invalid). - Remove support for Django < 1.11. - Add support for atomic incr if key is not set. Version 4.8.0 ------------- Date: 2017-04-25 - Drop deprecated exception with typo ConnectionInterrumped. Use ConnectionInterrupted instead. - Remove many workarounds related to old and not supported versions of django and redis-py. - Code cleaning and flake8 compliance fixes. - Add better impl for `close` method. - Fix compatibility warnings with python 3.6 Version 4.7.0 ------------- Date: 2017-01-02 - Add the ability to enable write to slave when master is not available. - Add `itersize` parameter to `delete_pattern`. Version 4.6.0 ------------- Date: 2016-11-02 - Fix incorrect behavior of `clear()` method. Version 4.5.0 ------------- Date: 2016-09-21 - Now only support Django 1.8 and above. Support for older versions has been dropped. - Remove undocumented and deprecated support for old connection string format. - Add support for `PASSWORD` option (useful when the password contains url unsafe characters). - Make the package compatible with fake redis. - Fix compatibility issues with latest django version (1.10). Version 4.4.4 ------------- Date: 2016-07-25 - Fix possible race condition on incr implementation using lua script (thanks to @prokaktus). Version 4.4.3 ------------- Date: 2016-05-17 - Fix minor ttl inconsistencies. Version 4.4.2 ------------- Date: 2016-04-21 - Fix timeout bug (thanks to @skorokithakis) Version 4.4.1 ------------- Date: 2016-04-13 - Add additional check for avoid wrong exception on `get_redis_connection`. Version 4.4.0 ------------- Date: 2016-04-12 - Make redis client pluggable (thanks to @arnuschky) - Add version number inside python module (thanks to @BertrandBordage) - Fix clear method (thanks to @ostcar) - Add the ability to specify key prefix on delete and delete_pattern. - BREAKING CHANGE: improved compression support (make it more plugable). Version 4.3.0 ------------- Date: 2015-10-31 - Improved exception handling in herd client (thanks to @brandoshmando) - Fix bug that not allows use generators on delete_many (thanks to @ostcar). - Remove obsolete code that makes hard dependency to mspack. Version 4.2.0 ------------- Date: 2015-07-03 - Add `persist` and `expire` methods. - Remove old and broken dummy client. - Expose a redis lock method. Version 4.1.0 ------------- Date: 2015-06-15 - Add plugable serializers architecture (thanks to @jdufresne) - Add json serializer (thanks to @jdufresne) - Add msgpack serializer (thanks to @uditagarwal) - Implement delete_pattern using iter_scan for better performance (thanks to @lenzenmi) Version 4.0.0 ------------- - Remove usage of deprecated `get_cache` method. - Added connection option SOCKET_CONNECT_TIMEOUT. [Jorge C. Leitão]. - Replace setex and friends with set, because it now supports all need for atomic. updates (thanks to @23doors) (re revert changes from 3.8.x branch). - Fix django 1.8 compatibilities. - Fix django 1.9 compatibilities. - BREAKING CHANGE: Now timeout=0 works as django specified (expires immediately) - Now requires redis server >= 2.8 - BREAKING CHANGE: `redis_cache` is no longer a valid package name Version 3.8.4 ------------- - Backport django 1.8 fixes from master. Version 3.8.3 ------------- - Minor fix on regular expression for old url notation. Version 3.8.2 ------------- - Revert some changes from 3.8.1 that are incompatible with redis server < 2.6.12 Version 3.8.1 ------------- - Fix documentation related to new url format. - Fix documentation parts that uses now removed functions. - Fix invalid url transformation from old format (password was not set properly) - Replace setex and friends with set, because it now supports all need for atomic updates (thanks to @23doors). Version 3.8.0 ------------- - Add compression support. (Thanks to @alanjds) - Change package name from redis_cache to django_redis. - Add backward compatibility layer for redis_cache package name. - BACKWARD INCOMPATIBLE CHANGE: use StrictRedis instead of Redis class of redis-py - Add redis dummy backend for development purposes. (Thanks to @papaloizouc) - Now use redis native url notation for connection string (the own connection string notation is also supported but is marked as deprecated). - Now requires redis-py >= 2.10.0 - Remove deprecated `raw_cache` property from backend. Version 3.7.2 ------------- - Add missing forward of version parameter from `add()` to `set()` function. (by @fellowshipofone) Version 3.7.1 ------------- - Improve docs (by @dkingman). - Fix missing imports on sentinel client (by @opapy). - Connection closing improvements on sentinel client (by @opapy). Version 3.7.0 ------------- - Add support for django's `KEY_FUNCTION` and `REVERSE_KEY_FUNCTION` (by @teferi) - Accept float value for socket timeout. - Fix wrong behavior of `DJANGO_REDIS_IGNORE_EXCEPTIONS` with socket timeouts. - Backward incompatible change: now raises original exceptions instead of self defined. Version 3.6.2 ------------- - Add ttl method purposed to be included in django core. - Add iter_keys method that uses redis scan methods for memory efficient keys retrieval. - Add version keyword parameter to keys. - Deprecate django 1.3.x support. Version 3.6.1 ------------- - Fix wrong import on sentinel client. Version 3.6.0 ------------- - Add pluggable connection factory. - Negative timeouts now works as expected. - Delete operation now returns a number of deleted items instead of None. Version 3.5.1 ------------- - Fixed redis-py < 2.9.0 incompatibilities - Fixed runtests error with django 1.7 Version 3.5.0 ------------- - Removed: stats module (should be replaced with an other in future) - New: experimental client for add support to redis-sentinel. - Now uses a django DEFAULT_TIMEOUT constant instead of ``True``. Deprecation warning added for code that now uses True (unlikely). - Fix wrong forward of timeout on shard client. - Fix incr_version wrong behavior when using shard client (wrong client used for set new key). Version 3.4.0 ------------- - Fix exception name from ConnectionInterrumped to ConnectionInterrupted maintaining an old exception class for backward compatibility (thanks Łukasz Langa (@ambv)) - Fix wrong behavior for "default" parameter on get method when DJANGO_REDIS_IGNORE_EXCEPTIONS is True (also thanks to Łukasz Langa (@ambv)). - Now added support for master-slave connection to default client (it still experimental because is not tested in production environments). - Merged SimpleFailoverClient experimental client (only for experiment with it, not ready for use in production) - Django 1.6 cache changes compatibility. Explicitly passing in timeout=None no longer results in using the default timeout. - Major code cleaning. (Thanks to Bertrand Bordage @BertrandBordage) - Bugfixes related to some index error on hashring module. Version 3.3.0 ------------- - Add SOCKET_TIMEOUT attribute to OPTIONS (thanks to @eclipticplane) Version 3.2.0 ------------- - Changed default behavior of connection error exceptions: now by default raises exception on connection error is occurred. Thanks to Mümin Öztürk: - cache.add now uses setnx redis command (atomic operation) - cache.incr and cache.decr now uses redis incrby command (atomic operation) Version 3.1.7 ------------- - Fix python3 compatibility on utils module. Version 3.1.6 ------------- - Add nx argument on set method for both clients (thanks to Kirill Zaitsev) Version 3.1.5 ------------- - Bug fixes on sharded client. Version 3.1.4 ------------- - Now reuse connection pool on massive use of `get_cache` method. Version 3.1.3 ------------- - Fixed python 2.6 compatibility. Version 3.1.2 ------------- - Now on call close() not disconnect all connection pool. Version 3.1.1 ------------- - Fixed incorrect exception message on LOCATION has wrong format. (Thanks to Yoav Weiss) Version 3.1 ----------- - Helpers for access to raw redis connection. Version 3.0 ----------- - Python 3.2+ support. - Code cleaning and refactor. - Ignore exceptions (same behavior as memcached backend) - Pluggable clients. - Unified connection string. Version 2.2.2 ------------- - Bug fixes on ``keys`` and ``delete_pattern`` methods. Version 2.2.1 ------------- - Remove duplicate check if key exists on ``incr`` method. - Fix incorrect behavior of ``delete_pattern`` with sharded client. Version 2.2 ----------- - New ``delete_pattern`` method. Useful for delete keys using wildcard syntax. Version 2.1 ----------- - Many bug fixes. - Client side sharding. django-redis-4.9.0/LICENSE0000644000175000001440000000264412770416315015240 0ustar niwiusers00000000000000Copyright (c) 2011-2016 Andrey Antukh Copyright (c) 2011 Sean Bleier All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. django-redis-4.9.0/MANIFEST.in0000644000175000001440000000024013174553301015753 0ustar niwiusers00000000000000include LICENSE include AUTHORS.rst include README.rst include CHANGES.txt recursive-include tests README.txt *.py recursive-include doc Makefile *.adoc *.html django-redis-4.9.0/README.rst0000644000175000001440000000133013041623551015703 0ustar niwiusers00000000000000============================== Redis cache backend for Django ============================== Full featured redis cache backend for Django. .. image:: https://img.shields.io/travis/niwinz/django-redis.svg?style=flat :target: https://travis-ci.org/niwinz/django-redis .. image:: https://img.shields.io/pypi/v/django-redis.svg?style=flat :target: https://pypi.python.org/pypi/django-redis Documentation ------------- http://niwinz.github.io/django-redis/latest/ 简体中文版: http://django-redis-chs.readthedocs.io/zh_CN/latest/ How to install -------------- Run ``python setup.py install`` to install, or place ``django_redis`` on your Python path. You can also install it with: ``pip install django-redis`` django-redis-4.9.0/setup.cfg0000644000175000001440000000050013245753134016042 0ustar niwiusers00000000000000[bdist_wheel] universal = 1 [flake8] ignore = W601 max-line-length = 119 [isort] combine_as_imports = true default_section = THIRDPARTY include_trailing_comma = true known_first_party = django_redis multi_line_output = 5 not_skip = __init__.py [metadata] license_file = LICENSE [egg_info] tag_build = tag_date = 0 django-redis-4.9.0/setup.py0000644000175000001440000000276313245747656015764 0ustar niwiusers00000000000000from setuptools import setup from django_redis import __version__ description = """ Full featured redis cache backend for Django. """ setup( name="django-redis", url="https://github.com/niwibe/django-redis", author="Andrei Antoukh", author_email="niwi@niwi.nz", version=__version__, packages=[ "django_redis", "django_redis.client", "django_redis.serializers", "django_redis.compressors" ], description=description.strip(), python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", install_requires=[ "Django>=1.11", "redis>=2.10.0", ], zip_safe=False, include_package_data=True, package_data={ "": ["*.html"], }, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Framework :: Django", "Framework :: Django :: 1.11", "Framework :: Django :: 2.0", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries", "Topic :: Utilities", ], ) django-redis-4.9.0/PKG-INFO0000644000175000001440000000216113245753134015323 0ustar niwiusers00000000000000Metadata-Version: 1.2 Name: django-redis Version: 4.9.0 Summary: Full featured redis cache backend for Django. Home-page: https://github.com/niwibe/django-redis Author: Andrei Antoukh Author-email: niwi@niwi.nz License: UNKNOWN Description-Content-Type: UNKNOWN Description: UNKNOWN Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: Web Environment Classifier: Framework :: Django Classifier: Framework :: Django :: 1.11 Classifier: Framework :: Django :: 2.0 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: BSD License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: Utilities Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*