././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9874682 cachy-0.3.0/LICENSE0000644000000000000000000000204600000000000011737 0ustar0000000000000000Copyright (c) 2015 Sébastien Eustace Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1565119137.987576 cachy-0.3.0/README.rst0000644000000000000000000000063000000000000012416 0ustar0000000000000000Cachy ##### .. image:: https://travis-ci.org/sdispater/cachy.png :alt: Cachy Build status :target: https://travis-ci.org/sdispater/cachy Cachy provides a simple yet effective caching library. The full documentation is available here: http://cachy.readthedocs.org Resources ========= * `Documentation `_ * `Issue Tracker `_ ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565122351.7049751 cachy-0.3.0/cachy/__init__.py0000644000000000000000000000017400000000000014132 0ustar0000000000000000# -*- coding: utf-8 -*- from .cache_manager import CacheManager from .repository import Repository __version__ = "0.3.0" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9880502 cachy-0.3.0/cachy/cache_manager.py0000644000000000000000000001625300000000000015135 0ustar0000000000000000# -*- coding: utf-8 -*- import threading import types from .contracts.factory import Factory from .contracts.store import Store from .stores import ( DictStore, FileStore, RedisStore, MemcachedStore ) from .repository import Repository from .serializers import ( Serializer, JsonSerializer, MsgPackSerializer, PickleSerializer ) class CacheManager(Factory, threading.local): """ A CacheManager is a pool of cache stores. """ _serializers = { 'json': JsonSerializer(), 'msgpack': MsgPackSerializer(), 'pickle': PickleSerializer() } def __init__(self, config): super(CacheManager, self).__init__() self._config = config self._stores = {} self._custom_creators = {} self._serializer = self._resolve_serializer(config.get('serializer', 'pickle')) def store(self, name=None): """ Get a cache store instance by name. :param name: The cache store name :type name: str :rtype: Repository """ if name is None: name = self.get_default_driver() self._stores[name] = self._get(name) return self._stores[name] def driver(self, name=None): """ Get a cache store instance by name. :param name: The cache store name :type name: str :rtype: Repository """ return self.store(name) def _get(self, name): """ Attempt to get the store from the local cache. :param name: The store name :type name: str :rtype: Repository """ return self._stores.get(name, self._resolve(name)) def _resolve(self, name): """ Resolve the given store :param name: The store to resolve :type name: str :rtype: Repository """ config = self._get_config(name) if not config: raise RuntimeError('Cache store [%s] is not defined.' % name) if config['driver'] in self._custom_creators: repository = self._call_custom_creator(config) else: repository = getattr(self, '_create_%s_driver' % config['driver'])(config) if 'serializer' in config: serializer = self._resolve_serializer(config['serializer']) else: serializer = self._serializer repository.get_store().set_serializer(serializer) return repository def _call_custom_creator(self, config): """ Call a custom driver creator. :param config: The driver configuration :type config: dict :rtype: Repository """ creator = self._custom_creators[config['driver']](config) if isinstance(creator, Store): creator = self.repository(creator) if not isinstance(creator, Repository): raise RuntimeError('Custom creator should return a Repository instance.') return creator def _create_dict_driver(self, config): """ Create an instance of the dict cache driver. :param config: The driver configuration :type config: dict :rtype: Repository """ return self.repository(DictStore()) def _create_file_driver(self, config): """ Create an instance of the file cache driver. :param config: The driver configuration :type config: dict :rtype: Repository """ kwargs = { 'directory': config['path'] } if 'hash_type' in config: kwargs['hash_type'] = config['hash_type'] return self.repository(FileStore(**kwargs)) def _create_redis_driver(self, config): """ Create an instance of the redis cache driver. :param config: The driver configuration :type config: dict :return: Repository """ return self.repository(RedisStore(**config)) def _create_memcached_driver(self, config): """ Create an instance of the redis cache driver. :param config: The driver configuration :type config: dict :return: Repository """ return self.repository(MemcachedStore(**config)) def repository(self, store): """ Create a new cache repository with the given implementation. :param store: The cache store implementation instance :type store: Store :rtype: Repository """ repository = Repository(store) return repository def _get_prefix(self, config): """ Get the cache prefix. :param config: The configuration :type config: dict :rtype: str """ return config.get('prefix', '') def _get_config(self, name): """ Get the cache connection configuration. :param name: The cache name :type name: str :rtype: dict """ return self._config['stores'].get(name) def get_default_driver(self): """ Get the default cache driver name. :rtype: str :raises: RuntimeError """ if 'default' in self._config: return self._config['default'] if len(self._config['stores']) == 1: return list(self._config['stores'].keys())[0] raise RuntimeError('Missing "default" cache in configuration.') def set_default_driver(self, name): """ Set the default cache driver name. :param name: The default cache driver name :type name: str """ self._config['default'] = name def extend(self, driver, store): """ Register a custom driver creator. :param driver: The driver :type driver: name :param store: The store class :type store: Store or callable :rtype: self """ self._custom_creators[driver] = store return self def _resolve_serializer(self, serializer): """ Resolve the given serializer. :param serializer: The serializer to resolve :type serializer: str or Serializer :rtype: Serializer """ if isinstance(serializer, Serializer): return serializer if serializer in self._serializers: return self._serializers[serializer] raise RuntimeError('Unsupported serializer') def register_serializer(self, name, serializer): """ Register a new serializer. :param name: The name of the serializer :type name: str :param serializer: The serializer :type serializer: Serializer """ self._serializers[name] = serializer def __getattr__(self, item): return getattr(self.store(), item) def __call__(self, store=None, *args, **kwargs): if isinstance(store, (types.FunctionType, types.MethodType)): fn = store if len(args) > 0: store = args[0] args = args[1:] if len(args) > 1 else [] else: store = None args = (fn,) + args return self.store(store)(*args, **kwargs) else: return self.store(store)(*args, **kwargs) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9882753 cachy-0.3.0/cachy/contracts/__init__.py0000644000000000000000000000003100000000000016122 0ustar0000000000000000# -*- coding: utf-8 -*- ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9884694 cachy-0.3.0/cachy/contracts/factory.py0000644000000000000000000000050300000000000016036 0ustar0000000000000000# -*- coding: utf-8 -*- class Factory(object): """ Represent a cahce factory. """ def store(self, name=None): """ Get a cache store instance by name. :param name: The cache store name :type name: str :rtype: mixed """ raise NotImplementedError() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9886162 cachy-0.3.0/cachy/contracts/repository.py0000644000000000000000000000557600000000000016625 0ustar0000000000000000# -*- coding: utf-8 -*- class Repository(object): def has(self, key): """ Determine if an item exists in the cache. :param key: The cache key :type key: str :rtype: bool """ raise NotImplementedError() def get(self, key, default=None): """ Retrieve an item from the cache by key. :param key: The cache key :type key: str :param default: The default value to return :type default: mixed :rtype: mixed """ raise NotImplementedError() def pull(self, key, default=None): """ Retrieve an item from the cache by key and delete ir. :param key: The cache key :type key: str :param default: The default value to return :type default: mixed :rtype: mixed """ raise NotImplementedError() def put(self, key, value, minutes): """ Store an item in the cache. :param key: The cache key :type key: str :param value: The cache value :type value: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int or datetime """ raise NotImplementedError() def add(self, key, value, minutes): """ Store an item in the cache if it does not exist. :param key: The cache key :type key: str :param value: The cache value :type value: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int or datetime :rtype: bool """ raise NotImplementedError() def forever(self, key, value): """ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param value: The cache value :type value: mixed """ raise NotImplementedError() def remember(self, key, minutes, callback): """ Get an item from the cache, or store the default value. :param key: The cache key :type key: str :param minutes: The lifetime in minutes of the cached value :type minutes: int or datetime :param callback: The default function :type callback: callable :rtype: mixed """ raise NotImplementedError() def remember_forever(self, key, callback): """ Get an item from the cache, or store the default value forever. :param key: The cache key :type key: str :param callback: The default function :type callback: callable :rtype: mixed """ raise NotImplementedError() def forget(self, key): """ Remove an item from the cache. :param key: The cache key :type key: str :rtype: bool """ raise NotImplementedError() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9887702 cachy-0.3.0/cachy/contracts/store.py0000644000000000000000000000504200000000000015526 0ustar0000000000000000# -*- coding: utf-8 -*- from ..serializers import PickleSerializer class Store(object): """ Abstract class representing a cache store. """ _serializer = PickleSerializer() def get(self, key): """ Retrieve an item from the cache by key. :param key: The cache key :type key: str :return: The cache value """ raise NotImplementedError() def put(self, key, value, minutes): """ Store an item in the cache for a given number of minutes. :param key: The cache key :type key: str :param value: The cache value :type value: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int """ raise NotImplementedError() def increment(self, key, value=1): """ Increment the value of an item in the cache. :param key: The cache key :type key: str :param value: The increment value :type value: int :rtype: int or bool """ raise NotImplementedError() def decrement(self, key, value=1): """ Decrement the value of an item in the cache. :param key: The cache key :type key: str :param value: The decrement value :type value: int :rtype: int or bool """ raise NotImplementedError() def forever(self, key, value): """ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param value: The value :type value: mixed """ raise NotImplementedError() def forget(self, key): """ Remove an item from the cache. :param key: The cache key :type key: str :rtype: bool """ raise NotImplementedError() def flush(self): """ Remove all items from the cache. """ raise NotImplementedError() def get_prefix(self): """ Get the cache key prefix. :rtype: str """ raise NotImplementedError() def set_serializer(self, serializer): """ Set the serializer. :param serializer: The serializer :type serializer: cachy.serializers.Serializer :rtype: Store """ self._serializer = serializer return self def unserialize(self, data): return self._serializer.unserialize(data) def serialize(self, data): return self._serializer.serialize(data) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9890203 cachy-0.3.0/cachy/contracts/taggable_store.py0000644000000000000000000000076100000000000017357 0ustar0000000000000000# -*- coding: utf-8 -*- from .store import Store from ..tagged_cache import TaggedCache from ..tag_set import TagSet class TaggableStore(Store): def tags(self, *names): """ Begin executing a new tags operation. :param names: The tags :type names: tuple :rtype: cachy.tagged_cache.TaggedCache """ if len(names) == 1 and isinstance(names[0], list): names = names[0] return TaggedCache(self, TagSet(self, names)) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9891593 cachy-0.3.0/cachy/helpers.py0000644000000000000000000000014500000000000014033 0ustar0000000000000000# -*- coding: utf-8 -*- def value(val): if callable(val): return val() return val ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9893177 cachy-0.3.0/cachy/redis_tagged_cache.py0000644000000000000000000000417300000000000016142 0ustar0000000000000000# -*- coding: utf-8 -*- import hashlib from .tagged_cache import TaggedCache from .utils import encode class RedisTaggedCache(TaggedCache): def forever(self, key, value): """ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param value: The value :type value: mixed """ namespace = self._tags.get_namespace() self._push_forever_keys(namespace, key) self._store.forever( '%s:%s' % (hashlib.sha1(encode(self._tags.get_namespace())).hexdigest(), key), value ) def flush(self): """ Remove all items from the cache. """ self._delete_forever_keys() super(RedisTaggedCache, self).flush() def _push_forever_keys(self, namespace, key): """ Store a copy of the full key for each namespace segment. :type namespace: str :type key: str """ full_key = '%s%s:%s' % (self.get_prefix(), hashlib.sha1(encode(self._tags.get_namespace())).hexdigest(), key) for segment in namespace.split('|'): self._store.connection().lpush(self._forever_key(segment), full_key) def _delete_forever_keys(self): """ Delete all of the items that were stored forever. """ for segment in self._tags.get_namespace().split('|'): segment = self._forever_key(segment) self._delete_forever_values(segment) self._store.connection().delete(segment) def _delete_forever_values(self, forever_key): """ Delete all of the keys that have been stored forever. :type forever_key: str """ forever = self._store.connection().lrange(forever_key, 0, -1) if len(forever) > 0: self._store.connection().delete(*forever) def _forever_key(self, segment): """ Get the forever reference key for the segment. :type segment: str :rtype: str """ return '%s%s:forever' % (self.get_prefix(), segment) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9895122 cachy-0.3.0/cachy/repository.py0000644000000000000000000002007200000000000014611 0ustar0000000000000000# -*- coding: utf-8 -*- import math import datetime import types import hashlib from functools import wraps from .contracts.repository import Repository as CacheContract from .helpers import value from .utils import encode, decode class Repository(CacheContract): _default = 60 def __init__(self, store): """ :param store: The underlying cache store :type store: Store """ self._store = store def has(self, key): """ Determine if an item exists in the cache. :param key: The cache key :type key: str :rtype: bool """ return self.get(key) is not None def get(self, key, default=None): """ Retrieve an item from the cache by key. :param key: The cache key :type key: str :param default: The default value to return :type default: mixed :rtype: mixed """ val = self._store.get(key) if val is None: return value(default) return val def pull(self, key, default=None): """ Retrieve an item from the cache by key and delete ir. :param key: The cache key :type key: str :param default: The default value to return :type default: mixed :rtype: mixed """ val = self.get(key, default) self.forget(key) return val def put(self, key, val, minutes): """ Store an item in the cache. :param key: The cache key :type key: str :param val: The cache value :type val: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int|datetime """ minutes = self._get_minutes(minutes) if minutes is not None: self._store.put(key, val, minutes) def add(self, key, val, minutes): """ Store an item in the cache if it does not exist. :param key: The cache key :type key: str :param val: The cache value :type val: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int|datetime :rtype: bool """ if hasattr(self._store, 'add'): return self._store.add(key, val, self._get_minutes(minutes)) if not self.has(key): self.put(key, val, minutes) return True return False def forever(self, key, val): """ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param val: The cache value :type val: mixed """ self._store.forever(key, val) def remember(self, key, minutes, callback): """ Get an item from the cache, or store the default value. :param key: The cache key :type key: str :param minutes: The lifetime in minutes of the cached value :type minutes: int or datetime :param callback: The default function :type callback: mixed :rtype: mixed """ # If the item exists in the cache we will just return this immediately # otherwise we will execute the given callback and cache the result # of that execution for the given number of minutes in storage. val = self.get(key) if val is not None: return val val = value(callback) self.put(key, val, minutes) return val def remember_forever(self, key, callback): """ Get an item from the cache, or store the default value forever. :param key: The cache key :type key: str :param callback: The default function :type callback: mixed :rtype: mixed """ # If the item exists in the cache we will just return this immediately # otherwise we will execute the given callback and cache the result # of that execution forever. val = self.get(key) if val is not None: return val val = value(callback) self.forever(key, val) return val def forget(self, key): """ Remove an item from the cache. :param key: The cache key :type key: str :rtype: bool """ success = self._store.forget(key) return success def get_default_cache_time(self): """ Get the default cache time. :rtype: int """ return self._default def set_default_cache_time(self, minutes): """ Set the default cache time. :param minutes: The default cache time :type minutes: int :rtype: self """ self._default = minutes return self def get_store(self): """ Get the cache store implementation. :rtype: Store """ return self._store def __getitem__(self, item): return self.get(item) def __setitem__(self, key, val): self.put(key, val, self._default) def __delitem__(self, key): self.forget(key) def _get_minutes(self, duration): """ Calculate the number of minutes with the given duration. :param duration: The duration :type duration: int or datetime :rtype: int or None """ if isinstance(duration, datetime.datetime): from_now = (duration - datetime.datetime.now()).total_seconds() from_now = math.ceil(from_now / 60) if from_now > 0: return from_now return return duration def _hash(self, value): """ Calculate the hash given a value. :param value: The value to hash :type value: str or bytes :rtype: str """ return hashlib.sha1(encode(value)).hexdigest() def _get_key(self, fn, args, kwargs): """ Calculate a cache key given a function, args and kwargs. :param fn: The function :type fn: callable or str :param args: The function args :type args: tuple :param kwargs: The function kwargs :type kwargs: dict :rtype: str """ if args: serialized_arguments = ( self._store.serialize(args[1:]) + self._store.serialize([(k, kwargs[k]) for k in sorted(kwargs.keys())]) ) else: serialized_arguments = self._store.serialize([(k, kwargs[k]) for k in sorted(kwargs.keys())]) if isinstance(fn, types.MethodType): key = self._hash('%s.%s.%s' % (fn.__self__.__class__.__name__, args[0].__name__, serialized_arguments)) elif isinstance(fn, types.FunctionType): key = self._hash('%s.%s' % (fn.__name__, serialized_arguments)) else: key = '%s:' % fn + self._hash(serialized_arguments) return key def __getattr__(self, item): try: return object.__getattribute__(self, item) except AttributeError: return getattr(self._store, item) def __call__(self, *args, **kwargs): if args and isinstance(args[0], (types.FunctionType, types.MethodType)): fn = args[0] @wraps(fn) def wrapper(*a, **kw): return self.remember( self._get_key(fn, a, kw), self._default, lambda: fn(*a, **kw) ) return wrapper else: k = kwargs.get('key') minutes = kwargs.get('minutes', self._default) def decorated(fn): key = k @wraps(fn) def wrapper(*a, **kw): return self.remember( self._get_key(key or fn, a, kw), minutes, lambda: fn(*a, **kw) ) return wrapper return decorated ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9898167 cachy-0.3.0/cachy/serializers/__init__.py0000644000000000000000000000031200000000000016460 0ustar0000000000000000# -*- coding: utf-8 -*- from .serializer import Serializer from .json_serializer import JsonSerializer from .msgpack_serializer import MsgPackSerializer from .pickle_serializer import PickleSerializer ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9901898 cachy-0.3.0/cachy/serializers/json_serializer.py0000644000000000000000000000125300000000000020130 0ustar0000000000000000# -*- coding: utf-8 -*- try: import simplejson as json except ImportError: import json from cachy.utils import decode from .serializer import Serializer class JsonSerializer(Serializer): """ Serializer that uses JSON representations. """ def serialize(self, data): """ Serialize data. :param data: The data to serialize :type data: mixed :rtype: str """ return json.dumps(data) def unserialize(self, data): """ Unserialize data. :param data: The data to unserialize :type data: mixed :rtype: str """ return json.loads(decode(data)) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9903908 cachy-0.3.0/cachy/serializers/msgpack_serializer.py0000644000000000000000000000145600000000000020611 0ustar0000000000000000# -*- coding: utf-8 -*- try: import msgpack except ImportError: msgpack = None from .serializer import Serializer class MsgPackSerializer(Serializer): """ Serializer that uses `msgpack `_ representations. By default, this serializer does not support serializing custom objects. """ def serialize(self, data): """ Serialize data. :param data: The data to serialize :type data: mixed :rtype: str """ return msgpack.packb(data, use_bin_type=True) def unserialize(self, data): """ Unserialize data. :param data: The data to unserialize :type data: mixed :rtype: str """ return msgpack.unpackb(data, encoding='utf-8') ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9905167 cachy-0.3.0/cachy/serializers/pickle_serializer.py0000644000000000000000000000152000000000000020423 0ustar0000000000000000# -*- coding: utf-8 -*- from functools import partial try: import cPickle as pickle except ImportError: # noqa import pickle # Serialize pickle dumps using the highest pickle protocol (binary, default # uses ascii) dumps = partial(pickle.dumps, protocol=pickle.HIGHEST_PROTOCOL) loads = pickle.loads from .serializer import Serializer class PickleSerializer(Serializer): """ Serializer that uses the pickle module. """ def serialize(self, data): """ Serialize data. :param data: The data to serialize :type data: mixed :rtype: str """ return dumps(data) def unserialize(self, data): """ Unserialize data. :param data: The data to unserialize :type data: mixed :rtype: str """ return loads(data) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9906425 cachy-0.3.0/cachy/serializers/serializer.py0000644000000000000000000000100100000000000017066 0ustar0000000000000000# -*- coding: utf-8 -*- class Serializer(object): """ Abstract serializer. """ def serialize(self, data): """ Serialize data. :param data: The data to serialize :type data: mixed :rtype: str """ raise NotImplementedError() def unserialize(self, data): """ Unserialize data. :param data: The data to unserialize :type data: mixed :rtype: str """ raise NotImplementedError() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9908035 cachy-0.3.0/cachy/stores/__init__.py0000644000000000000000000000031700000000000015450 0ustar0000000000000000# -*- coding: utf-8 -*- from .dict_store import DictStore from .file_store import FileStore from .memcached_store import MemcachedStore from .redis_store import RedisStore from .null_store import NullStore ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1565119137.990902 cachy-0.3.0/cachy/stores/dict_store.py0000644000000000000000000000716400000000000016057 0ustar0000000000000000# -*- coding: utf-8 -*- import time import math from ..contracts.taggable_store import TaggableStore class DictStore(TaggableStore): """ A cache store using a dictionary as its backend. """ def __init__(self): self._storage = {} def get(self, key): """ Retrieve an item from the cache by key. :param key: The cache key :type key: str :return: The cache value """ return self._get_payload(key)[0] def _get_payload(self, key): """ Retrieve an item and expiry time from the cache by key. :param key: The cache key :type key: str :rtype: dict """ payload = self._storage.get(key) # If the key does not exist, we return nothing if not payload: return (None, None) expire = payload[0] # If the current time is greater than expiration timestamps we will delete # the entry if round(time.time()) >= expire: self.forget(key) return (None, None) data = payload[1] # Next, we'll extract the number of minutes that are remaining for a cache # so that we can properly retain the time for things like the increment # operation that may be performed on the cache. We'll round this out. time_ = math.ceil((expire - round(time.time())) / 60.) return (data, time_) def put(self, key, value, minutes): """ Store an item in the cache for a given number of minutes. :param key: The cache key :type key: str :param value: The cache value :type value: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int """ self._storage[key] = (self._expiration(minutes), value) def increment(self, key, value=1): """ Increment the value of an item in the cache. :param key: The cache key :type key: str :param value: The increment value :type value: int :rtype: int or bool """ data, time_ = self._get_payload(key) integer = int(data) + value self.put(key, integer, int(time_)) return integer def decrement(self, key, value=1): """ Decrement the value of an item in the cache. :param key: The cache key :type key: str :param value: The decrement value :type value: int :rtype: int or bool """ return self.increment(key, value * -1) def forever(self, key, value): """ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param value: The increment value :type value: int """ self.put(key, value, 0) def forget(self, key): """ Remove an item from the cache. :param key: The cache key :type key: str :rtype: bool """ if key in self._storage: del self._storage[key] return True return False def flush(self): """ Remove all items from the cache. """ self._storage = {} def _expiration(self, minutes): """ Get the expiration time based on the given minutes. :param minutes: The minutes :type minutes: int :rtype: int """ if minutes == 0: return 9999999999 return round(time.time()) + (minutes * 60) def get_prefix(self): """ Get the cache key prefix. :rtype: str """ return '' ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9910028 cachy-0.3.0/cachy/stores/file_store.py0000644000000000000000000001325600000000000016052 0ustar0000000000000000# -*- coding: utf-8 -*- import os import time import math import hashlib from ..contracts.store import Store from ..utils import mkdir_p, encode class FileStore(Store): """ A cache store using the filesystem as its backend. """ _HASHES = { 'md5': (hashlib.md5, 2), 'sha1': (hashlib.sha1, 4), 'sha256': (hashlib.sha256, 8) } def __init__(self, directory, hash_type='sha256'): """ :param directory: The cache directory :type directory: str """ self._directory = directory if hash_type not in self._HASHES: raise ValueError('hash_type "{}" is not valid.'.format(hash_type)) self._hash_type = hash_type def get(self, key): """ Retrieve an item from the cache by key. :param key: The cache key :type key: str :return: The cache value """ return self._get_payload(key).get('data') def _get_payload(self, key): """ Retrieve an item and expiry time from the cache by key. :param key: The cache key :type key: str :rtype: dict """ path = self._path(key) # If the file doesn't exists, we obviously can't return the cache so we will # just return null. Otherwise, we'll get the contents of the file and get # the expiration UNIX timestamps from the start of the file's contents. if not os.path.exists(path): return {'data': None, 'time': None} with open(path, 'rb') as fh: contents = fh.read() expire = int(contents[:10]) # If the current time is greater than expiration timestamps we will delete # the file and return null. This helps clean up the old files and keeps # this directory much cleaner for us as old files aren't hanging out. if round(time.time()) >= expire: self.forget(key) return {'data': None, 'time': None} data = self.unserialize(contents[10:]) # Next, we'll extract the number of minutes that are remaining for a cache # so that we can properly retain the time for things like the increment # operation that may be performed on the cache. We'll round this out. time_ = math.ceil((expire - round(time.time())) / 60.) return {'data': data, 'time': time_} def put(self, key, value, minutes): """ Store an item in the cache for a given number of minutes. :param key: The cache key :type key: str :param value: The cache value :type value: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int """ value = encode(str(self._expiration(minutes))) + encode(self.serialize(value)) path = self._path(key) self._create_cache_directory(path) with open(path, 'wb') as fh: fh.write(value) def _create_cache_directory(self, path): """ Create the file cache directory if necessary :param path: The cache path :type path: str """ mkdir_p(os.path.dirname(path)) def increment(self, key, value=1): """ Increment the value of an item in the cache. :param key: The cache key :type key: str :param value: The increment value :type value: int :rtype: int or bool """ raw = self._get_payload(key) integer = int(raw['data']) + value self.put(key, integer, int(raw['time'])) return integer def decrement(self, key, value=1): """ Decrement the value of an item in the cache. :param key: The cache key :type key: str :param value: The decrement value :type value: int :rtype: int or bool """ return self.increment(key, value * -1) def forever(self, key, value): """ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param value: The increment value :type value: int """ self.put(key, value, 0) def forget(self, key): """ Remove an item from the cache. :param key: The cache key :type key: str :rtype: bool """ path = self._path(key) if os.path.exists(path): os.remove(path) return True return False def flush(self): """ Remove all items from the cache. """ if os.path.isdir(self._directory): for root, dirs, files in os.walk(self._directory, topdown=False): for name in files: os.remove(os.path.join(root, name)) for name in dirs: os.rmdir(os.path.join(root, name)) def _path(self, key): """ Get the full path for the given cache key. :param key: The cache key :type key: str :rtype: str """ hash_type, parts_count = self._HASHES[self._hash_type] h = hash_type(encode(key)).hexdigest() parts = [h[i:i+2] for i in range(0, len(h), 2)][:parts_count] return os.path.join(self._directory, os.path.sep.join(parts), h) def _expiration(self, minutes): """ Get the expiration time based on the given minutes. :param minutes: The minutes :type minutes: int :rtype: int """ if minutes == 0: return 9999999999 return int(round(time.time()) + (minutes * 60)) def get_prefix(self): """ Get the cache key prefix. :rtype: str """ return '' return '' ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9911747 cachy-0.3.0/cachy/stores/memcached_store.py0000644000000000000000000000565000000000000017040 0ustar0000000000000000# -*- coding: utf-8 -*- try: from pylibmc import memcache except ImportError: try: import memcache except ImportError: memcache = None from ..contracts.taggable_store import TaggableStore class MemcachedStore(TaggableStore): def __init__(self, servers, prefix='', **kwargs): # Removing potential "driver" key kwargs.pop('driver', None) self._prefix = prefix self._memcache = memcache.Client(servers, **kwargs) def get(self, key): """ Retrieve an item from the cache by key. :param key: The cache key :type key: str :return: The cache value """ return self._memcache.get(self._prefix + key) def put(self, key, value, minutes): """ Store an item in the cache for a given number of minutes. :param key: The cache key :type key: str :param value: The cache value :type value: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int """ self._memcache.set(self._prefix + key, value, minutes * 60) def add(self, key, val, minutes): """ Store an item in the cache if it does not exist. :param key: The cache key :type key: str :param val: The cache value :type val: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int :rtype: bool """ return self._memcache.add(self._prefix + key, val, minutes * 60) def increment(self, key, value=1): """ Increment the value of an item in the cache. :param key: The cache key :type key: str :param value: The increment value :type value: int :rtype: int or bool """ return self._memcache.incr(self._prefix + key, value) def decrement(self, key, value=1): """ Decrement the value of an item in the cache. :param key: The cache key :type key: str :param value: The decrement value :type value: int :rtype: int or bool """ return self._memcache.decr(self._prefix + key, value) def forever(self, key, value): """ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param value: The value :type value: mixed """ self.put(key, value, 0) def forget(self, key): """ Remove an item from the cache. :param key: The cache key :type key: str :rtype: bool """ return self._memcache.delete(self._prefix + key) def flush(self): """ Remove all items from the cache. """ self._memcache.flush_all() def get_prefix(self): """ Get the cache key prefix. :rtype: str """ return self._prefix ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9913557 cachy-0.3.0/cachy/stores/null_store.py0000644000000000000000000000375200000000000016105 0ustar0000000000000000# -*- coding: utf-8 -*- from ..contracts.store import Store class NullStore(Store): """ This cache store implementation is meant to be used only in development or test environments and it never stores anything. """ def get(self, key): """ Retrieve an item from the cache by key. :param key: The cache key :type key: str :return: The cache value """ pass def put(self, key, value, minutes): """ Store an item in the cache for a given number of minutes. :param key: The cache key :type key: str :param value: The cache value :type value: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int """ pass def increment(self, key, value=1): """ Increment the value of an item in the cache. :param key: The cache key :type key: str :param value: The increment value :type value: int :rtype: int or bool """ pass def decrement(self, key, value=1): """ Decrement the value of an item in the cache. :param key: The cache key :type key: str :param value: The decrement value :type value: int :rtype: int or bool """ pass def forever(self, key, value): """ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param value: The increment value :type value: int """ pass def forget(self, key): """ Remove an item from the cache. :param key: The cache key :type key: str :rtype: bool """ pass def flush(self): """ Remove all items from the cache. """ pass def get_prefix(self): """ Get the cache key prefix. :rtype: str """ return '' ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9914694 cachy-0.3.0/cachy/stores/redis_store.py0000644000000000000000000000636600000000000016245 0ustar0000000000000000# -*- coding: utf-8 -*- try: from redis import StrictRedis except ImportError: StrictRedis = None from ..contracts.taggable_store import TaggableStore from ..redis_tagged_cache import RedisTaggedCache from ..tag_set import TagSet class RedisStore(TaggableStore): """ A cache store using the Redis as its backend. """ def __init__(self, host='localhost', port=6379, db=0, password=None, prefix='', redis_class=StrictRedis, **kwargs): # Removing potential "driver" key kwargs.pop('driver', None) self._prefix = prefix self._redis = redis_class(host=host, port=port, db=db, password=password, **kwargs) def get(self, key): """ Retrieve an item from the cache by key. :param key: The cache key :type key: str :return: The cache value """ value = self._redis.get(self._prefix + key) if value is not None: return self.unserialize(value) def put(self, key, value, minutes): """ Store an item in the cache for a given number of minutes. :param key: The cache key :type key: str :param value: The cache value :type value: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int """ value = self.serialize(value) minutes = max(1, minutes) self._redis.setex(self._prefix + key, minutes * 60, value) def increment(self, key, value=1): """ Increment the value of an item in the cache. :param key: The cache key :type key: str :param value: The increment value :type value: int :rtype: int or bool """ return self._redis.incrby(self._prefix + key, value) def decrement(self, key, value=1): """ Decrement the value of an item in the cache. :param key: The cache key :type key: str :param value: The decrement value :type value: int :rtype: int or bool """ return self._redis.decr(self._prefix + key, value) def forever(self, key, value): """ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param value: The value to store :type value: mixed """ value = self.serialize(value) self._redis.set(self._prefix + key, value) def forget(self, key): """ Remove an item from the cache. :param key: The cache key :type key: str :rtype: bool """ return bool(self._redis.delete(self._prefix + key)) def flush(self): """ Remove all items from the cache. """ return self._redis.flushdb() def get_prefix(self): """ Get the cache key prefix. :rtype: str """ return self._prefix def connection(self): return self._redis def tags(self, *names): """ Begin executing a new tags operation. :param names: The tags :type names: tuple :rtype: cachy.tagged_cache.TaggedCache """ return RedisTaggedCache(self, TagSet(self, names)) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9915855 cachy-0.3.0/cachy/tag_set.py0000644000000000000000000000320100000000000014013 0ustar0000000000000000# -*- coding: utf-8 -*- import uuid class TagSet(object): def __init__(self, store, names=None): """ :param store: The cache store implementation :type store: cachy.contracts.store.Store :param names: The tags names :type names: list or tuple """ self._store = store self._names = names or [] def reset(self): """ Reset all tags in the set. """ list(map(self.reset_tag, self._names)) def tag_id(self, name): """ Get the unique tag identifier for a given tag. :param name: The tag :type name: str :rtype: str """ return self._store.get(self.tag_key(name)) or self.reset_tag(name) def _tag_ids(self): """ Get a list of tag identifiers for all of the tags in the set. :rtype: list """ return list(map(self.tag_id, self._names)) def get_namespace(self): """ Get a unique namespace that changes when any of the tags are flushed. :rtype: str """ return '|'.join(self._tag_ids()) def reset_tag(self, name): """ Reset the tag and return the new tag identifier. :param name: The tag :type name: str :rtype: str """ id_ = str(uuid.uuid4()).replace('-', '') self._store.forever(self.tag_key(name), id_) return id_ def tag_key(self, name): """ Get the tag identifier key for a given tag. :param name: The tag :type name: str :rtype: str """ return 'tag:%s:key' % name ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9917202 cachy-0.3.0/cachy/tagged_cache.py0000644000000000000000000001331300000000000014750 0ustar0000000000000000# -*- coding: utf-8 -*- import hashlib import datetime import math from .contracts.store import Store from .helpers import value from .utils import encode class TaggedCache(Store): """ """ def __init__(self, store, tags): """ :param store: The cache store implementation :type store: cachy.contracts.store.Store :param tags: The tag set :type tags: cachy.tag_set.TagSet """ self._store = store self._tags = tags def has(self, key): """ Determine if an item exists in the cache. :param key: The cache key :type key: str :rtype: bool """ return self.get(key) is not None def get(self, key, default=None): """ Retrieve an item from the cache by key. :param key: The cache key :type key: str :param default: The default value :type default: mixed :return: The cache value """ val = self._store.get(self.tagged_item_key(key)) if val is not None: return val return value(default) def put(self, key, value, minutes): """ Store an item in the cache for a given number of minutes. :param key: The cache key :type key: str :param value: The cache value :type value: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int or datetime """ minutes = self._get_minutes(minutes) if minutes is not None: return self._store.put(self.tagged_item_key(key), value, minutes) def add(self, key, val, minutes): """ Store an item in the cache if it does not exist. :param key: The cache key :type key: str :param val: The cache value :type val: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int|datetime :rtype: bool """ if not self.has(key): self.put(key, val, minutes) return True return False def increment(self, key, value=1): """ Increment the value of an item in the cache. :param key: The cache key :type key: str :param value: The increment value :type value: int :rtype: int or bool """ self._store.increment(self.tagged_item_key(key), value) def decrement(self, key, value=1): """ Decrement the value of an item in the cache. :param key: The cache key :type key: str :param value: The decrement value :type value: int :rtype: int or bool """ self._store.decrement(self.tagged_item_key(key), value) def forever(self, key, value): """ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param value: The value :type value: mixed """ self._store.forever(self.tagged_item_key(key), value) def forget(self, key): """ Remove an item from the cache. :param key: The cache key :type key: str :rtype: bool """ self._store.forget(self.tagged_item_key(key)) def flush(self): """ Remove all items from the cache. """ self._tags.reset() def remember(self, key, minutes, callback): """ Get an item from the cache, or store the default value. :param key: The cache key :type key: str :param minutes: The lifetime in minutes of the cached value :type minutes: int or datetime :param callback: The default function :type callback: mixed :rtype: mixed """ # If the item exists in the cache we will just return this immediately # otherwise we will execute the given callback and cache the result # of that execution for the given number of minutes in storage. val = self.get(key) if val is not None: return val val = value(callback) self.put(key, val, minutes) return val def remember_forever(self, key, callback): """ Get an item from the cache, or store the default value forever. :param key: The cache key :type key: str :param callback: The default function :type callback: mixed :rtype: mixed """ # If the item exists in the cache we will just return this immediately # otherwise we will execute the given callback and cache the result # of that execution forever. val = self.get(key) if val is not None: return val val = value(callback) self.forever(key, val) return val def tagged_item_key(self, key): """ Get a fully qualified key for a tagged item. :param key: The cache key :type key: str :rtype: str """ return '%s:%s' % (hashlib.sha1(encode(self._tags.get_namespace())).hexdigest(), key) def get_prefix(self): """ Get the cache key prefix. :rtype: str """ return self._store.get_prefix() def _get_minutes(self, duration): """ Calculate the number of minutes with the given duration. :param duration: The duration :type duration: int or datetime :rtype: int or None """ if isinstance(duration, datetime.datetime): from_now = (duration - datetime.datetime.now()).total_seconds() from_now = math.ceil(from_now / 60) if from_now > 0: return from_now return return duration ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9918292 cachy-0.3.0/cachy/utils.py0000644000000000000000000000243500000000000013535 0ustar0000000000000000# -*- coding: utf-8 -*- import sys import os import errno PY2 = sys.version_info[0] == 2 PY3K = sys.version_info[0] >= 3 PY33 = sys.version_info >= (3, 3) if PY2: import imp long = long unicode = unicode basestring = basestring else: long = int unicode = str basestring = str def decode(string, encodings=None): if not PY2 and not isinstance(string, bytes): return string if encodings is None: encodings = ['utf-8', 'latin1', 'ascii'] for encoding in encodings: try: return string.decode(encoding) except UnicodeDecodeError: pass return string.decode(encodings[0], errors='ignore') def encode(string, encodings=None): if isinstance(string, bytes) or PY2 and isinstance(string, unicode): return string if encodings is None: encodings = ['utf-8', 'latin1', 'ascii'] for encoding in encodings: try: return string.encode(encoding) except UnicodeDecodeError: pass return string.encode(encodings[0], errors='ignore') def mkdir_p(path, mode=0o777): try: os.makedirs(path, mode) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565122333.9321065 cachy-0.3.0/pyproject.toml0000644000000000000000000000157400000000000013653 0ustar0000000000000000[tool.poetry] name = "cachy" version = "0.3.0" description = "Cachy provides a simple yet effective caching library." license = "MIT" authors = [ "Sébastien Eustace " ] readme = 'README.rst' homepage = "https://github.com/sdispater/cachy" repository = "https://github.com/sdispater/cachy" keywords = ['cache'] packages = [ {include = "cachy"}, {include = "tests", format = "sdist"}, ] [tool.poetry.dependencies] python = "~2.7 || ^3.4" redis = { version = "^3.3.6", optional = true } python-memcached = { version = "^1.59", optional = true } msgpack-python = { version = "^0.5", optional = true } [tool.poetry.extras] redis = ["redis"] memcached = ["python-memcached"] msgpack = ["msgpack-python"] [tool.poetry.dev-dependencies] pytest = "^4.6" pytest-mock = "^1.10.4" flexmock = "^0.10.4" fakeredis = {git = "https://github.com/jamesls/fakeredis.git"} ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9936213 cachy-0.3.0/tests/__init__.py0000644000000000000000000000003100000000000014175 0ustar0000000000000000# -*- coding: utf-8 -*- ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1565119137.993776 cachy-0.3.0/tests/stores/__init__.py0000644000000000000000000000003100000000000015514 0ustar0000000000000000# -*- coding: utf-8 -*- ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9939542 cachy-0.3.0/tests/stores/test_dict_store.py0000644000000000000000000000275300000000000017170 0ustar0000000000000000# -*- coding: utf-8 -*- from unittest import TestCase from flexmock import flexmock, flexmock_teardown from cachy.stores import DictStore class DictStoreTestCase(TestCase): def tearDown(self): flexmock_teardown() def test_items_can_be_set_and_retrieved(self): store = DictStore() store.put('foo', 'bar', 10) self.assertEqual('bar', store.get('foo')) def test_store_item_forever_properly_stores_in_dict(self): mock = flexmock(DictStore()) mock.should_receive('put').once().with_args('foo', 'bar', 0) mock.forever('foo', 'bar') def test_values_can_be_incremented(self): store = DictStore() store.put('foo', 1, 10) store.increment('foo') self.assertEqual(2, store.get('foo')) def test_values_can_be_decremented(self): store = DictStore() store.put('foo', 1, 10) store.decrement('foo') self.assertEqual(0, store.get('foo')) def test_values_can_be_removed(self): store = DictStore() store.put('foo', 'bar', 10) store.forget('foo') self.assertIsNone(store.get('foo')) def test_items_can_be_flushed(self): store = DictStore() store.put('foo', 'bar', 10) store.put('baz', 'boom', 10) store.flush() self.assertIsNone(store.get('foo')) self.assertIsNone(store.get('baz')) def test_cache_key(self): store = DictStore() self.assertEqual('', store.get_prefix()) ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1565119137.994094 cachy-0.3.0/tests/stores/test_file_store.py0000644000000000000000000001272300000000000017162 0ustar0000000000000000# -*- coding: utf-8 -*- import glob import os import tempfile import hashlib import shutil from unittest import TestCase from flexmock import flexmock, flexmock_teardown from cachy.serializers import JsonSerializer from cachy.stores import FileStore from cachy.utils import PY2, encode if PY2: import __builtin__ as builtins else: import builtins class DictStoreTestCase(TestCase): def setUp(self): self._dir = os.path.join(tempfile.gettempdir(), 'cachy') def tearDown(self): for e in glob.glob(os.path.join(self._dir, '*')): if os.path.isdir(e): shutil.rmtree(e) flexmock_teardown() def test_none_is_returned_if_file_doesnt_exist(self): mock = flexmock(os.path) mock.should_receive('exists').once().and_return(False) store = FileStore(tempfile.gettempdir()) self.assertIsNone(store.get('foo')) def test_put_creates_missing_directories(self): store = flexmock(FileStore(self._dir)) sha = hashlib.sha256(encode('foo')).hexdigest() full_dir = os.path.join( self._dir, sha[0:2], sha[2:4], sha[4:6], sha[6:8], sha[8:10], sha[10:12], sha[12:14], sha[14:16] ) full_path = os.path.join(full_dir, sha) store.should_receive('_create_cache_directory').once().with_args(full_path) mock = flexmock(builtins) handler = flexmock() mock.should_receive('open').once().with_args(full_path, 'wb').and_return(handler) handler.should_receive('write').once() store.put('foo', '0000000000', 0) def test_expired_items_return_none(self): store = flexmock(FileStore(self._dir)) contents = b'0000000000' + store.serialize('bar') flexmock(os.path).should_receive('exists').once().and_return(True) mock = flexmock(builtins) handler = flexmock() sha = hashlib.sha256(encode('foo')).hexdigest() full_dir = os.path.join( self._dir, sha[0:2], sha[2:4], sha[4:6], sha[6:8], sha[8:10], sha[10:12], sha[12:14], sha[14:16] ) full_path = os.path.join(full_dir, sha) mock.should_receive('open').once().with_args(full_path, 'rb').and_return(handler) handler.should_receive('read').once().and_return(contents) store.should_receive('forget').once().with_args('foo') store.get('foo') def test_store_items_properly_store_values(self): store = flexmock(FileStore(self._dir)) contents = b'1111111111' + store.serialize('bar') sha = hashlib.sha256(encode('foo')).hexdigest() full_dir = os.path.join( self._dir, sha[0:2], sha[2:4], sha[4:6], sha[6:8], sha[8:10], sha[10:12], sha[12:14], sha[14:16] ) full_path = os.path.join(full_dir, sha) store.should_receive('_expiration').with_args(10).and_return(1111111111) mock = flexmock(builtins) handler = flexmock() mock.should_receive('open').once().with_args(full_path, 'wb').and_return(handler) handler.should_receive('write').once().with_args(contents) store.put('foo', 'bar', 10) def test_forever_store_values_with_high_timestamp(self): store = flexmock(FileStore(self._dir)) contents = b'9999999999' + store.serialize('bar') sha = hashlib.sha256(encode('foo')).hexdigest() full_dir = os.path.join( self._dir, sha[0:2], sha[2:4], sha[4:6], sha[6:8], sha[8:10], sha[10:12], sha[12:14], sha[14:16] ) full_path = os.path.join(full_dir, sha) mock = flexmock(builtins) handler = flexmock() mock.should_receive('open').once().with_args(full_path, 'wb').and_return(handler) handler.should_receive('write').once().with_args(contents) store.forever('foo', 'bar') def test_forget_with_missing_file(self): store = FileStore(self._dir) sha = hashlib.sha256(encode('foo')).hexdigest() full_dir = os.path.join( self._dir, sha[0:2], sha[2:4], sha[4:6], sha[6:8], sha[8:10], sha[10:12], sha[12:14], sha[14:16] ) full_path = os.path.join(full_dir, sha) mock = flexmock(os.path) mock.should_receive('exists').once().with_args(full_path).and_return(False) self.assertFalse(store.forget('foo')) def test_forget_removes_file(self): store = FileStore(self._dir) sha = hashlib.sha256(encode('foo')).hexdigest() full_dir = os.path.join( self._dir, sha[0:2], sha[2:4], sha[4:6], sha[6:8], sha[8:10], sha[10:12], sha[12:14], sha[14:16] ) full_path = os.path.join(full_dir, sha) mock = flexmock(os.path) mock.should_receive('exists').once().with_args(full_path).and_return(True) flexmock(os).should_receive('remove').once().with_args(full_path) self.assertTrue(store.forget('foo')) def test_get_with_json_serializer(self): store = FileStore(self._dir) store.set_serializer(JsonSerializer()) store.forever('foo', {'foo': 'bar'}) result = store.get('foo') assert result == {'foo': 'bar'} def test_set_hash_type(self): store = FileStore(self._dir, hash_type='md5') store.put('foo', 'bar', 10) md5 = hashlib.md5(encode('foo')).hexdigest() full_dir = os.path.join(self._dir, md5[0:2], md5[2:4]) assert os.path.exists(full_dir) ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1565119137.994231 cachy-0.3.0/tests/stores/test_memcached_store.py0000644000000000000000000000357000000000000020151 0ustar0000000000000000# -*- coding: utf-8 -*- import math from unittest import TestCase from cachy.stores import MemcachedStore class RedisStoreTestCase(TestCase): def setUp(self): self.store = MemcachedStore(['127.0.0.1:11211'], 'prefix:') super(RedisStoreTestCase, self).setUp() def tearDown(self): self.store._memcache.flush_all() def test_get_returns_null_when_not_found(self): self.assertIsNone(self.store.get('foo')) def test_value_is_returned(self): mc = self.get_memcached() mc.set('prefix:foo', 'bar') self.assertEqual('bar', self.store.get('foo')) def test_value_is_returned_for_numerics(self): mc = self.get_memcached() mc.set('prefix:foo', 1) self.assertEqual(1, self.store.get('foo')) def test_put_value_into_memcache(self): mc = self.get_memcached() self.store.put('foo', 'bar', 60) self.assertEqual('bar', mc.get('prefix:foo')) def test_put_numeric_value(self): mc = self.get_memcached() self.store.put('foo', 1, 60) self.assertEqual(1, mc.get('prefix:foo')) def test_increment(self): mc = self.get_memcached() mc.set('prefix:foo', 1, 60) self.store.increment('foo', 2) self.assertEqual(3, mc.get('prefix:foo')) def test_decrement(self): mc = self.get_memcached() mc.set('prefix:foo', 3, 60) self.store.decrement('foo', 2) self.assertEqual(1, mc.get('prefix:foo')) def test_forever(self): mc = self.get_memcached() self.store.forever('foo', 'bar') self.assertEqual('bar', mc.get('prefix:foo')) def test_forget(self): mc = self.get_memcached() mc.set('prefix:foo', 'bar') self.store.forget('foo') self.assertIsNone(mc.get('prefix:foo')) def get_memcached(self): return self.store._memcache ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9943411 cachy-0.3.0/tests/stores/test_null_store.py0000644000000000000000000000042700000000000017213 0ustar0000000000000000# -*- coding: utf-8 -*- from unittest import TestCase from cachy.stores import NullStore class NullStoreTestCase(TestCase): def test_items_cannot_be_cached(self): store = NullStore() store.put('foo', 'bar', 10) self.assertIsNone(store.get('foo')) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565121478.5502684 cachy-0.3.0/tests/stores/test_redis_store.py0000644000000000000000000000443500000000000017352 0ustar0000000000000000# -*- coding: utf-8 -*- import math import redis from unittest import TestCase from flexmock import flexmock, flexmock_teardown from fakeredis import FakeServer from fakeredis import FakeStrictRedis from cachy.stores import RedisStore class RedisStoreTestCase(TestCase): def setUp(self): server = FakeServer() server.connected = True self.store = RedisStore( prefix='prefix:', redis_class=FakeStrictRedis, server=server ) self.redis = FakeStrictRedis(server=server) super(RedisStoreTestCase, self).setUp() def tearDown(self): flexmock_teardown() self.redis.flushdb() def test_get_returns_null_when_not_found(self): self.assertIsNone(self.store.get('foo')) def test_redis_value_is_returned(self): self.redis.set('prefix:foo', self.store.serialize('bar')) self.assertEqual('bar', self.store.get('foo')) def test_redis_value_is_returned_for_numerics(self): self.redis.set('prefix:foo', self.store.serialize(1)) self.assertEqual(1, self.store.get('foo')) def test_put_value_into_redis(self): self.store.put('foo', 'bar', 60) self.assertEqual(self.store.serialize('bar'), self.redis.get('prefix:foo')) self.assertEqual(60., round(math.ceil(float(self.redis.ttl('prefix:foo')) / 60))) def test_put_numeric_value_into_redis(self): self.store.put('foo', 1, 60) self.assertEqual(self.store.serialize(1), self.redis.get('prefix:foo')) self.assertEqual(60., round(math.ceil(float(self.redis.ttl('prefix:foo')) / 60))) def test_increment(self): self.redis.set('prefix:foo', 1) self.store.increment('foo', 2) self.assertEqual(3, int(self.redis.get('prefix:foo'))) def test_decrement(self): self.redis.set('prefix:foo', 3) self.store.decrement('foo', 2) self.assertEqual(1, int(self.redis.get('prefix:foo'))) def test_forever(self): self.store.forever('foo', 'bar') self.assertEqual(self.store.serialize('bar'), self.redis.get('prefix:foo')) assert self.redis.ttl('prefix:foo') == -1 def test_forget(self): self.redis.set('prefix:foo', 'bar') self.store.forget('foo') self.assertFalse(self.redis.exists('prefix:foo')) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9945493 cachy-0.3.0/tests/test_cache_manager.py0000644000000000000000000001111600000000000016240 0ustar0000000000000000# -*- coding: utf-8 -*- import os import tempfile from unittest import TestCase from flexmock import flexmock, flexmock_teardown from cachy import CacheManager, Repository from cachy.stores import DictStore, FileStore from cachy.contracts.store import Store class RepositoryTestCase(TestCase): def tearDown(self): flexmock_teardown() def test_store_get_the_correct_store(self): cache = CacheManager({ 'default': 'dict', 'stores': { 'dict': { 'driver': 'dict' }, 'file': { 'driver': 'file', 'path': os.path.join(tempfile.gettempdir(), 'cachy') } } }) self.assertIsInstance(cache.store().get_store(), DictStore) self.assertIsInstance(cache.store('dict').get_store(), DictStore) self.assertIsInstance(cache.store('file').get_store(), FileStore) def test_set_default_driver_changes_driver(self): cache = CacheManager({ 'default': 'dict', 'stores': { 'dict': { 'driver': 'dict' }, 'file': { 'driver': 'file', 'path': os.path.join(tempfile.gettempdir(), 'cachy') } } }) self.assertIsInstance(cache.store().get_store(), DictStore) cache.set_default_driver('file') self.assertIsInstance(cache.store().get_store(), FileStore) def test_extend_accepts_a_callable_returning_a_store(self): cache = CacheManager({ 'default': 'my-driver', 'stores': { 'my-driver': { 'driver': 'my-driver' } } }) cache.extend('my-driver', lambda config: CustomStore()) self.assertIsInstance(cache.store().get_store(), CustomStore) def test_extend_accepts_a_callable_returning_a_repository(self): cache = CacheManager({ 'default': 'my-driver', 'stores': { 'my-driver': { 'driver': 'my-driver' } } }) cache.extend('my-driver', lambda config: Repository(CustomStore())) self.assertIsInstance(cache.store().get_store(), CustomStore) def test_extend_accepts_a_store_class(self): cache = CacheManager({ 'default': 'my-driver', 'stores': { 'my-driver': { 'driver': 'my-driver' } } }) cache.extend('my-driver', CustomStore) self.assertIsInstance(cache.store().get_store(), CustomStore) def test_default_store_with_one_store(self): manager = CacheManager({ 'stores': { 'dict': { 'driver': 'dict' } } }) self.assertEqual('dict', manager.get_default_driver()) def test_decorator(self): manager = flexmock(CacheManager({ 'stores': { 'dict': { 'driver': 'dict' } } })) store = flexmock(Repository(flexmock(CustomStore()))) manager.should_receive('store').once().with_args(None).and_return(store) store.get_store().should_receive('get').and_return(None, 6, 6).one_by_one() store.get_store().should_receive('put').once() calls = [] @manager def test(i, m=3): calls.append(i) return i*3 test(2) test(2) test(2) self.assertEqual(1, len(calls)) def test_full_decorator(self): manager = flexmock(CacheManager({ 'stores': { 'dict': { 'driver': 'dict' } } })) store = flexmock(Repository(flexmock(CustomStore()))) store.should_receive('_get_key').with_args('my_key', (2,), {'m': 4}).and_return('foo') manager.should_receive('store').once().with_args('dict').and_return(store) store.get_store().should_receive('get').and_return(None, 6, 6).one_by_one() store.get_store().should_receive('put').once()\ .with_args('foo', 6, 35) calls = [] @manager('dict', key='my_key', minutes=35) def test(i, m=3): calls.append(i) return i*3 test(2, m=4) test(2, m=4) test(2, m=4) self.assertEqual(1, len(calls)) class CustomStore(Store): def __init__(self, config=None): pass ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1565119137.994738 cachy-0.3.0/tests/test_repository.py0000644000000000000000000001124600000000000015726 0ustar0000000000000000# -*- coding: utf-8 -*- import datetime from unittest import TestCase from flexmock import flexmock, flexmock_teardown from cachy import Repository from cachy.contracts.store import Store class RepositoryTestCase(TestCase): def tearDown(self): flexmock_teardown() def test_get_returns_value_from_cache(self): repo = self._get_repository() repo.get_store().should_receive('get').once().with_args('foo').and_return('bar') self.assertEqual('bar', repo.get('foo')) def test_default_value_is_returned(self): repo = self._get_repository() repo.get_store().should_receive('get').and_return(None) self.assertEqual('bar', repo.get('foo', 'bar')) self.assertEqual('baz', repo.get('foo', lambda: 'baz')) def test_set_default_cache_time(self): repo = self._get_repository() repo.set_default_cache_time(10) self.assertEqual(10, repo.get_default_cache_time()) def test_has_method(self): repo = self._get_repository() repo.get_store().should_receive('get').with_args('foo').and_return(None) repo.get_store().should_receive('get').with_args('bar').and_return('baz') self.assertFalse(repo.has('foo')) self.assertTrue(repo.has('bar')) def test_pull(self): repo = self._get_repository() repo.get_store().should_receive('get').with_args('foo').and_return('bar') repo.get_store().should_receive('forget').with_args('foo') self.assertEqual('bar', repo.get('foo')) def test_put(self): repo = self._get_repository() repo.get_store().should_receive('put').with_args('foo', 'bar', 10) repo.put('foo', 'bar', 10) def test_put_supports_datetime_as_minutes(self): repo = self._get_repository() repo.get_store().should_receive('put').with_args('foo', 'bar', 60) repo.put('foo', 'bar', datetime.datetime.now() + datetime.timedelta(hours=1)) def test_put_with_minutes_to_zero_doesnt_store(self): repo = self._get_repository() repo.get_store().should_receive('put').never() repo.put('foo', 'bar', datetime.datetime.now() - datetime.timedelta(hours=1)) def test_add(self): repo = self._get_repository() repo.get_store().should_receive('get').once().with_args('foo').and_return(None) repo.get_store().should_receive('get').once().with_args('bar').and_return('baz') repo.get_store().should_receive('put').once().with_args('foo', 'bar', 10) repo.get_store().should_receive('put').never().with_args('bar', 'baz', 10) self.assertTrue(repo.add('foo', 'bar', 10)) self.assertFalse(repo.add('bar', 'baz', 10)) def test_forever(self): repo = self._get_repository() repo.get_store().should_receive('forever').once().with_args('foo', 'bar') repo.forever('foo', 'bar') def test_remember_calls_put_and_returns_default(self): repo = self._get_repository() repo.get_store().should_receive('get').and_return(None) repo.get_store().should_receive('put').once().with_args('foo', 'bar', 10) result = repo.remember('foo', 10, lambda: 'bar') self.assertEqual('bar', result) def test_remember_forever_calls_forever_and_returns_default(self): repo = self._get_repository() repo.get_store().should_receive('get').and_return(None) repo.get_store().should_receive('forever').once().with_args('foo', 'bar') result = repo.remember_forever('foo', lambda: 'bar') self.assertEqual('bar', result) def test_repository_can_serve_as_a_decorator(self): repo = self._get_repository() repo.get_store().should_receive('get').and_return(None, 6, 6).one_by_one() repo.get_store().should_receive('put').once() calls = [] @repo def test(i, m=3): calls.append(i) return i*3 test(2) test(2) test(2) self.assertEqual(1, len(calls)) def test_repository_can_serve_as_a_decorator_with_key_and_minutes(self): repo = flexmock(self._get_repository()) repo.should_receive('_get_key').with_args('my_key', (2,), {'m': 4}).and_return('foo') repo.get_store().should_receive('get').and_return(None, 6, 6).one_by_one() repo.get_store().should_receive('put').once()\ .with_args('foo', 6, 35) calls = [] @repo(key='my_key', minutes=35) def test(i, m=3): calls.append(i) return i*3 test(2, m=4) test(2, m=4) test(2, m=4) self.assertEqual(1, len(calls)) def _get_repository(self): repo = Repository(flexmock(Store())) return repo ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1565119137.9948583 cachy-0.3.0/tests/test_tagged_cache.py0000644000000000000000000000741200000000000016065 0ustar0000000000000000# -*- coding: utf-8 -*- import hashlib from unittest import TestCase from fakeredis import FakeStrictRedis from cachy.stores import DictStore, RedisStore from cachy.tag_set import TagSet from cachy.redis_tagged_cache import RedisTaggedCache from datetime import datetime, timedelta from flexmock import flexmock, flexmock_teardown class TaggedCacheTestCase(TestCase): def tearDown(self): flexmock_teardown() def test_tags_can_be_flushed(self): store = DictStore() store.tags('bop').put('foo', 'bar', 10) store.tags('zap').put('baz', 'boom', 10) store.tags('bop').flush() self.assertIsNone(store.tags('bop').get('foo')) self.assertEqual('boom', store.tags('zap').get('baz')) def test_cache_can_be_saved_with_multiple_tags(self): store = DictStore() tags = ['bop', 'zap'] store.tags(*tags).put('foo', 'bar', 10) self.assertEqual('bar', store.tags(tags).get('foo')) def test_cache_can_be_set_with_datetime(self): store = DictStore() duration = datetime.now() + timedelta(minutes=10) store.tags('bop').put('foo', 'bar', duration) self.assertEqual('bar', store.tags('bop').get('foo')) def test_cache_saved_with_multiple_tags_can_be_flushed(self): store = DictStore() tags = ['bop', 'zap'] store.tags(*tags).put('foo', 'bar', 10) tags2 = ['bam', 'pow'] store.tags(*tags2).put('foo', 'bar', 10) store.tags('zap').flush() self.assertIsNone(store.tags(tags).get('foo')) self.assertEqual('bar', store.tags(tags2).get('foo')) def test_tags_cache_forever(self): store = DictStore() tags = ['bop', 'zap'] store.tags(*tags).forever('foo', 'bar') self.assertEqual('bar', store.tags(tags).get('foo')) def test_redis_cache_tags_push_forever_keys_correctly(self): store = flexmock(RedisStore(redis_class=FakeStrictRedis)) tag_set = flexmock(TagSet(store, ['foo', 'bar'])) tag_set.should_receive('get_namespace').and_return('foo|bar') redis = RedisTaggedCache(store, tag_set) store.should_receive('get_prefix').and_return('prefix:') conn = flexmock() store.should_receive('connection').and_return(conn) conn.should_receive('lpush').once()\ .with_args('prefix:foo:forever', 'prefix:%s:key1' % hashlib.sha1(b'foo|bar').hexdigest()) conn.should_receive('lpush').once()\ .with_args('prefix:bar:forever', 'prefix:%s:key1' % hashlib.sha1(b'foo|bar').hexdigest()) store.should_receive('forever').with_args(hashlib.sha1(b'foo|bar').hexdigest() + ':key1', 'key1:value') redis.forever('key1', 'key1:value') def test_redis_cache_forever_tags_can_be_flushed(self): store = flexmock(RedisStore(redis_class=FakeStrictRedis)) tag_set = flexmock(TagSet(store, ['foo', 'bar'])) tag_set.should_receive('get_namespace').and_return('foo|bar') redis = RedisTaggedCache(store, tag_set) store.should_receive('get_prefix').and_return('prefix:') conn = flexmock() store.should_receive('connection').and_return(conn) conn.should_receive('lrange').once()\ .with_args('prefix:foo:forever', 0, -1)\ .and_return(['key1', 'key2']) conn.should_receive('lrange').once()\ .with_args('prefix:bar:forever', 0, -1)\ .and_return(['key3']) conn.should_receive('delete').once().with_args('key1', 'key2') conn.should_receive('delete').once().with_args('key3') conn.should_receive('delete').once().with_args('prefix:foo:forever') conn.should_receive('delete').once().with_args('prefix:bar:forever') tag_set.should_receive('reset').once() redis.flush() cachy-0.3.0/setup.py0000644000000000000000000000243500000000000012446 0ustar0000000000000000# -*- coding: utf-8 -*- from distutils.core import setup packages = \ ['cachy', 'cachy.contracts', 'cachy.serializers', 'cachy.stores', 'tests', 'tests.stores'] package_data = \ {'': ['*']} extras_require = \ {'memcached': ['python-memcached>=1.59,<2.0'], 'msgpack': ['msgpack-python>=0.5,<0.6'], 'redis': ['redis>=3.3.6,<4.0.0']} setup_kwargs = { 'name': 'cachy', 'version': '0.3.0', 'description': 'Cachy provides a simple yet effective caching library.', 'long_description': 'Cachy\n#####\n\n.. image:: https://travis-ci.org/sdispater/cachy.png\n :alt: Cachy Build status\n :target: https://travis-ci.org/sdispater/cachy\n\nCachy provides a simple yet effective caching library.\n\nThe full documentation is available here: http://cachy.readthedocs.org\n\n\nResources\n=========\n\n* `Documentation `_\n* `Issue Tracker `_\n', 'author': 'Sébastien Eustace', 'author_email': 'sebastien@eustace.io', 'maintainer': None, 'maintainer_email': None, 'url': 'https://github.com/sdispater/cachy', 'packages': packages, 'package_data': package_data, 'extras_require': extras_require, 'python_requires': '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', } setup(**setup_kwargs) cachy-0.3.0/PKG-INFO0000644000000000000000000000270000000000000012024 0ustar0000000000000000Metadata-Version: 2.1 Name: cachy Version: 0.3.0 Summary: Cachy provides a simple yet effective caching library. Home-page: https://github.com/sdispater/cachy License: MIT Keywords: cache Author: Sébastien Eustace Author-email: sebastien@eustace.io Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Provides-Extra: memcached Provides-Extra: msgpack Provides-Extra: redis Requires-Dist: msgpack-python (>=0.5,<0.6); extra == "msgpack" Requires-Dist: python-memcached (>=1.59,<2.0); extra == "memcached" Requires-Dist: redis (>=3.3.6,<4.0.0); extra == "redis" Project-URL: Repository, https://github.com/sdispater/cachy Description-Content-Type: text/x-rst Cachy ##### .. image:: https://travis-ci.org/sdispater/cachy.png :alt: Cachy Build status :target: https://travis-ci.org/sdispater/cachy Cachy provides a simple yet effective caching library. The full documentation is available here: http://cachy.readthedocs.org Resources ========= * `Documentation `_ * `Issue Tracker `_