././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1676050658.1701503 aiomcache-0.8.1/0000755000175100001730000000000014371500342013031 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/CHANGES.rst0000644000175100001730000000230614371500326014636 0ustar00runnerdocker======= CHANGES ======= .. towncrier release notes start 0.8.1 (2023-02-10) ================== - Add ``conn_args`` to ``Client`` to allow TLS and other options when connecting to memcache. 0.8.0 (2022-12-11) ================== - Add ``FlagClient`` to support memcached flags. - Fix type annotations for ``@acquire``. - Fix rare exception caused by memcached server dying in middle of operation. - Fix get method to not use CAS. 0.7.0 (2022-01-20) ===================== - Added support for Python 3.10 - Added support for non-ascii keys - Added type annotations 0.6.0 (2017-12-03) ================== - Drop python 3.3 support 0.5.2 (2017-05-27) ================== - Fix issue with pool concurrency and task cancellation 0.5.1 (2017-03-08) ================== - Added MANIFEST.in 0.5.0 (2017-02-08) ================== - Added gets and cas commands 0.4.0 (2016-09-26) ================== - Make max_size strict #14 0.3.0 (2016-03-11) ================== - Dockerize tests - Reuse memcached connections in Client Pool #4 - Fix stats parse to compatible more mc class software #5 0.2 (2015-12-15) ================ - Make the library Python 3.5 compatible 0.1 (2014-06-18) ================ - Initial release ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/LICENSE0000644000175100001730000000243114371500326014040 0ustar00runnerdockerCopyright (c) 2013-2016, Nikolay Kim, KeepSafe All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/MANIFEST.in0000644000175100001730000000014214371500326014566 0ustar00runnerdockerinclude LICENSE include CHANGES.rst include README.rst graft aiomcache global-exclude *.pyc *.swp ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1676050658.1701503 aiomcache-0.8.1/PKG-INFO0000644000175100001730000000571414371500342014135 0ustar00runnerdockerMetadata-Version: 2.1 Name: aiomcache Version: 0.8.1 Summary: Minimal pure python memcached client Home-page: https://github.com/aio-libs/aiomcache/ Author: Nikolay Kim Author-email: fafhrd91@gmail.com Maintainer: Nikolay Kim , Andrew Svetlov Maintainer-email: aio-libs@googlegroups.com License: BSD Classifier: License :: OSI Approved :: BSD License Classifier: Intended Audience :: Developers Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Operating System :: POSIX Classifier: Operating System :: MacOS :: MacOS X Classifier: Operating System :: Microsoft :: Windows Classifier: Environment :: Web Environment Classifier: Framework :: AsyncIO Requires-Python: >=3.7 Description-Content-Type: text/x-rst License-File: LICENSE memcached client for asyncio ============================ asyncio (PEP 3156) library to work with memcached. Getting started --------------- The API looks very similar to the other memcache clients: .. code:: python import asyncio import aiomcache async def hello_aiomcache(): mc = aiomcache.Client("127.0.0.1", 11211) await mc.set(b"some_key", b"Some value") value = await mc.get(b"some_key") print(value) values = await mc.multi_get(b"some_key", b"other_key") print(values) await mc.delete(b"another_key") asyncio.run(hello_aiomcache()) Version 0.8 introduces `FlagClient` which allows registering callbacks to set or process flags. See `examples/simple_with_flag_handler.py` ======= CHANGES ======= .. towncrier release notes start 0.8.1 (2023-02-10) ================== - Add ``conn_args`` to ``Client`` to allow TLS and other options when connecting to memcache. 0.8.0 (2022-12-11) ================== - Add ``FlagClient`` to support memcached flags. - Fix type annotations for ``@acquire``. - Fix rare exception caused by memcached server dying in middle of operation. - Fix get method to not use CAS. 0.7.0 (2022-01-20) ===================== - Added support for Python 3.10 - Added support for non-ascii keys - Added type annotations 0.6.0 (2017-12-03) ================== - Drop python 3.3 support 0.5.2 (2017-05-27) ================== - Fix issue with pool concurrency and task cancellation 0.5.1 (2017-03-08) ================== - Added MANIFEST.in 0.5.0 (2017-02-08) ================== - Added gets and cas commands 0.4.0 (2016-09-26) ================== - Make max_size strict #14 0.3.0 (2016-03-11) ================== - Dockerize tests - Reuse memcached connections in Client Pool #4 - Fix stats parse to compatible more mc class software #5 0.2 (2015-12-15) ================ - Make the library Python 3.5 compatible 0.1 (2014-06-18) ================ - Initial release ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/README.rst0000644000175100001730000000137114371500326014524 0ustar00runnerdockermemcached client for asyncio ============================ asyncio (PEP 3156) library to work with memcached. Getting started --------------- The API looks very similar to the other memcache clients: .. code:: python import asyncio import aiomcache async def hello_aiomcache(): mc = aiomcache.Client("127.0.0.1", 11211) await mc.set(b"some_key", b"Some value") value = await mc.get(b"some_key") print(value) values = await mc.multi_get(b"some_key", b"other_key") print(values) await mc.delete(b"another_key") asyncio.run(hello_aiomcache()) Version 0.8 introduces `FlagClient` which allows registering callbacks to set or process flags. See `examples/simple_with_flag_handler.py` ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1676050658.1701503 aiomcache-0.8.1/aiomcache/0000755000175100001730000000000014371500342014742 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/aiomcache/__init__.py0000644000175100001730000000072714371500326017063 0ustar00runnerdocker"""memcached client, based on mixpanel's memcache_client library Usage example:: import aiomcache mc = aiomcache.Client("127.0.0.1", 11211) await mc.set("some_key", "Some value") value = await mc.get("some_key") await mc.delete("another_key") """ from .client import Client, FlagClient from .exceptions import ClientException, ValidationException __all__ = ("Client", "ClientException", "FlagClient", "ValidationException") __version__ = "0.8.1" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/aiomcache/client.py0000644000175100001730000004664614371500326016614 0ustar00runnerdockerimport functools import re import sys from typing import (Any, Awaitable, Callable, Dict, Generic, Mapping, Optional, Tuple, TypeVar, Union, overload) from . import constants as const from .exceptions import ClientException, ValidationException from .pool import Connection, MemcachePool if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal if sys.version_info >= (3, 10): from typing import Concatenate, ParamSpec else: from typing_extensions import Concatenate, ParamSpec __all__ = ['Client'] _P = ParamSpec("_P") _T = TypeVar("_T") _U = TypeVar("_U") _Client = TypeVar("_Client", bound="FlagClient[Any]") _Result = Tuple[Dict[bytes, Union[bytes, _T]], Dict[bytes, _U]] _GetFlagHandler = Callable[[bytes, int], Awaitable[_T]] _SetFlagHandler = Callable[[_T], Awaitable[Tuple[bytes, int]]] def acquire( func: Callable[Concatenate[_Client, Connection, _P], Awaitable[_T]] ) -> Callable[Concatenate[_Client, _P], Awaitable[_T]]: @functools.wraps(func) async def wrapper(self: _Client, *args: _P.args, # type: ignore[misc] **kwargs: _P.kwargs) -> _T: conn = await self._pool.acquire() try: return await func(self, conn, *args, **kwargs) except Exception as exc: conn[0].set_exception(exc) raise finally: self._pool.release(conn) return wrapper class FlagClient(Generic[_T]): def __init__(self, host: str, port: int = 11211, *, pool_size: int = 2, pool_minsize: Optional[int] = None, conn_args: Optional[Mapping[str, Any]] = None, get_flag_handler: Optional[_GetFlagHandler[_T]] = None, set_flag_handler: Optional[_SetFlagHandler[_T]] = None): """ Creates new Client instance. :param host: memcached host :param port: memcached port :param pool_size: max connection pool size :param pool_minsize: min connection pool size :param conn_args: extra arguments passed to asyncio.open_connection(). For details, see: https://docs.python.org/3/library/asyncio-stream.html#asyncio.open_connection. :param get_flag_handler: async method to call to convert flagged values. Method takes tuple: (value, flags) and should return processed value or raise ClientException if not supported. :param set_flag_handler: async method to call to convert non bytes value to flagged value. Method takes value and must return tuple: (value, flags). """ if not pool_minsize: pool_minsize = pool_size self._pool = MemcachePool( host, port, minsize=pool_minsize, maxsize=pool_size, conn_args=conn_args) self._get_flag_handler = get_flag_handler self._set_flag_handler = set_flag_handler # key may be anything except whitespace and control chars, upto 250 characters. # Must be str for unicode-aware regex. _valid_key_re = re.compile("^[^\\s\x00-\x1F\x7F-\x9F]{1,250}$") def _validate_key(self, key: bytes) -> bytes: if not isinstance(key, bytes): # avoid bugs subtle and otherwise raise ValidationException('key must be bytes', key) # Must decode to str for unicode-aware comparison. key_str = key.decode() m = self._valid_key_re.match(key_str) if m: # in python re, $ matches either end of line or right before # \n at end of line. We can't allow latter case, so # making sure length matches is simplest way to detect if len(m.group(0)) != len(key_str): raise ValidationException('trailing newline', key) else: raise ValidationException('invalid key', key) return key async def _execute_simple_command(self, conn: Connection, raw_command: bytes) -> bytes: response, line = bytearray(), b'' conn.writer.write(raw_command) await conn.writer.drain() while not line.endswith(b'\r\n'): line = await conn.reader.readline() response.extend(line) return response[:-2] async def close(self) -> None: """Closes the sockets if its open.""" await self._pool.clear() @overload async def _multi_get(self, conn: Connection, *keys: bytes, with_cas: Literal[True] = ...) -> _Result[_T, int]: ... @overload async def _multi_get(self, conn: Connection, *keys: bytes, with_cas: Literal[False]) -> _Result[_T, None]: ... async def _multi_get( # type: ignore[misc] self, conn: Connection, *keys: bytes, with_cas: bool = True) -> _Result[_T, Optional[int]]: # req - get [ ...]\r\n # resp - VALUE []\r\n # \r\n (if exists) # [...] # END\r\n if not keys: return {}, {} [self._validate_key(key) for key in keys] if len(set(keys)) != len(keys): raise ClientException('duplicate keys passed to multi_get') cmd = b'gets ' if with_cas else b'get ' conn.writer.write(cmd + b' '.join(keys) + b'\r\n') received = {} cas_tokens = {} line = await conn.reader.readline() while line != b'END\r\n': terms = line.split() if terms and terms[0] == b"VALUE": # exists key = terms[1] flags = int(terms[2]) length = int(terms[3]) val_bytes = (await conn.reader.readexactly(length+2))[:-2] if key in received: raise ClientException('duplicate results from server') if flags: if not self._get_flag_handler: raise ClientException("received flags without handler") val: Union[bytes, _T] = await self._get_flag_handler(val_bytes, flags) else: val = val_bytes received[key] = val cas_tokens[key] = int(terms[4]) if with_cas else None else: raise ClientException('get failed', line) line = await conn.reader.readline() if len(received) > len(keys): raise ClientException('received too many responses') return received, cas_tokens @acquire async def delete(self, conn: Connection, key: bytes) -> bool: """Deletes a key/value pair from the server. :param key: is the key to delete. :return: True if case values was deleted or False to indicate that the item with this key was not found. """ self._validate_key(key) command = b'delete ' + key + b'\r\n' response = await self._execute_simple_command(conn, command) if response not in (const.DELETED, const.NOT_FOUND): raise ClientException('Memcached delete failed', response) return response == const.DELETED @overload async def get(self, key: bytes, default: None = ...) -> Union[bytes, _T, None]: ... @overload async def get(self, key: bytes, default: _U) -> Union[bytes, _T, _U]: ... # Mypy bug: https://github.com/python/mypy/issues/12716 @acquire # type: ignore[misc] async def get( self, conn: Connection, key: bytes, default: Optional[_U] = None ) -> Union[bytes, _T, _U, None]: """Gets a single value from the server. :param key: ``bytes``, is the key for the item being fetched :param default: default value if there is no value. :return: ``bytes``, is the data for this specified key. """ values, _ = await self._multi_get(conn, key, with_cas=False) return values.get(key, default) @acquire async def gets( self, conn: Connection, key: bytes, default: Optional[bytes] = None ) -> Tuple[Union[bytes, _T, None], Optional[int]]: """Gets a single value from the server together with the cas token. :param key: ``bytes``, is the key for the item being fetched :param default: default value if there is no value. :return: ``bytes``, ``bytes tuple with the value and the cas """ values, cas_tokens = await self._multi_get(conn, key, with_cas=True) return values.get(key, default), cas_tokens.get(key) @acquire async def multi_get( self, conn: Connection, *keys: bytes ) -> Tuple[Union[bytes, _T, None], ...]: """Takes a list of keys and returns a list of values. :param keys: ``list`` keys for the item being fetched. :return: ``list`` of values for the specified keys. :raises:``ValidationException``, ``ClientException``, and socket errors """ values, _ = await self._multi_get(conn, *keys) return tuple(values.get(key) for key in keys) @acquire async def stats( self, conn: Connection, args: Optional[bytes] = None ) -> Dict[bytes, Optional[bytes]]: """Runs a stats command on the server.""" # req - stats [additional args]\r\n # resp - STAT \r\n (one per result) # END\r\n if args is None: args = b'' conn.writer.write(b''.join((b'stats ', args, b'\r\n'))) result: Dict[bytes, Optional[bytes]] = {} resp = await conn.reader.readline() while resp != b'END\r\n': terms = resp.split() if len(terms) == 2 and terms[0] == b'STAT': result[terms[1]] = None elif len(terms) == 3 and terms[0] == b'STAT': result[terms[1]] = terms[2] elif len(terms) >= 3 and terms[0] == b'STAT': result[terms[1]] = b' '.join(terms[2:]) else: raise ClientException('stats failed', resp) resp = await conn.reader.readline() return result async def _storage_command(self, conn: Connection, command: bytes, key: bytes, value: Union[bytes, _T], exptime: int = 0, cas: Optional[int] = None) -> bool: # req - set [noreply]\r\n # \r\n # resp - STORED\r\n (or others) # req - set [noreply]\r\n # \r\n # resp - STORED\r\n (or others) # typically, if val is > 1024**2 bytes server returns: # SERVER_ERROR object too large for cache\r\n # however custom-compiled memcached can have different limit # so, we'll let the server decide what's too much self._validate_key(key) if not isinstance(exptime, int): raise ValidationException('exptime not int', exptime) elif exptime < 0: raise ValidationException('exptime negative', exptime) flags = 0 if not isinstance(value, bytes): # flag handler only invoked on non-byte values, # consistent with only being invoked on non-zero flags on retrieval if self._set_flag_handler is None: raise ValidationException("flag handler must be set for non-byte values") value, flags = await self._set_flag_handler(value) args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))] _cmd = b' '.join([command, key] + args) if cas: _cmd += b' ' + str(cas).encode('utf-8') cmd = _cmd + b'\r\n' + value + b'\r\n' resp = await self._execute_simple_command(conn, cmd) if resp not in ( const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND): raise ClientException('stats {} failed'.format(command.decode()), resp) return resp == const.STORED @acquire async def set(self, conn: Connection, key: bytes, value: Union[bytes, _T], exptime: int = 0) -> bool: """Sets a key to a value on the server with an optional exptime (0 means don't auto-expire) :param key: ``bytes``, is the key of the item. :param value: ``bytes``, data to store. :param exptime: ``int``, is expiration time. If it's 0, the item never expires. :return: ``bool``, True in case of success. """ return await self._storage_command(conn, b"set", key, value, exptime) @acquire async def cas(self, conn: Connection, key: bytes, value: Union[bytes, _T], cas_token: int, exptime: int = 0) -> bool: """Sets a key to a value on the server with an optional exptime (0 means don't auto-expire) only if value hasn't changed from first retrieval :param key: ``bytes``, is the key of the item. :param value: ``bytes``, data to store. :param exptime: ``int``, is expiration time. If it's 0, the item never expires. :param cas_token: ``int``, unique cas token retrieve from previous ``gets`` :return: ``bool``, True in case of success. """ return await self._storage_command(conn, b"cas", key, value, exptime, cas=cas_token) @acquire async def add(self, conn: Connection, key: bytes, value: Union[bytes, _T], exptime: int = 0) -> bool: """Store this data, but only if the server *doesn't* already hold data for this key. :param key: ``bytes``, is the key of the item. :param value: ``bytes``, data to store. :param exptime: ``int`` is expiration time. If it's 0, the item never expires. :return: ``bool``, True in case of success. """ return await self._storage_command(conn, b"add", key, value, exptime) @acquire async def replace(self, conn: Connection, key: bytes, value: Union[bytes, _T], exptime: int = 0) -> bool: """Store this data, but only if the server *does* already hold data for this key. :param key: ``bytes``, is the key of the item. :param value: ``bytes``, data to store. :param exptime: ``int`` is expiration time. If it's 0, the item never expires. :return: ``bool``, True in case of success. """ return await self._storage_command(conn, b"replace", key, value, exptime) @acquire async def append(self, conn: Connection, key: bytes, value: Union[bytes, _T], exptime: int = 0) -> bool: """Add data to an existing key after existing data :param key: ``bytes``, is the key of the item. :param value: ``bytes``, data to store. :param exptime: ``int`` is expiration time. If it's 0, the item never expires. :return: ``bool``, True in case of success. """ return await self._storage_command(conn, b"append", key, value, exptime) @acquire async def prepend(self, conn: Connection, key: bytes, value: bytes, exptime: int = 0) -> bool: """Add data to an existing key before existing data :param key: ``bytes``, is the key of the item. :param value: ``bytes``, data to store. :param exptime: ``int`` is expiration time. If it's 0, the item never expires. :return: ``bool``, True in case of success. """ return await self._storage_command(conn, b"prepend", key, value, exptime) async def _incr_decr( self, conn: Connection, command: bytes, key: bytes, delta: int ) -> Optional[int]: delta_byte = str(delta).encode('utf-8') cmd = b' '.join([command, key, delta_byte]) + b'\r\n' resp = await self._execute_simple_command(conn, cmd) if not resp.isdigit() or resp == const.NOT_FOUND: raise ClientException( 'Memcached {} command failed'.format(str(command)), resp) return int(resp) if resp.isdigit() else None @acquire async def incr(self, conn: Connection, key: bytes, increment: int = 1) -> Optional[int]: """Command is used to change data for some item in-place, incrementing it. The data for the item is treated as decimal representation of a 64-bit unsigned integer. :param key: ``bytes``, is the key of the item the client wishes to change :param increment: ``int``, is the amount by which the client wants to increase the item. :return: ``int``, new value of the item's data, after the increment or ``None`` to indicate the item with this value was not found """ self._validate_key(key) return await self._incr_decr(conn, b"incr", key, increment) @acquire async def decr(self, conn: Connection, key: bytes, decrement: int = 1) -> Optional[int]: """Command is used to change data for some item in-place, decrementing it. The data for the item is treated as decimal representation of a 64-bit unsigned integer. :param key: ``bytes``, is the key of the item the client wishes to change :param decrement: ``int``, is the amount by which the client wants to decrease the item. :return: ``int`` new value of the item's data, after the increment or ``None`` to indicate the item with this value was not found """ self._validate_key(key) return await self._incr_decr(conn, b"decr", key, decrement) @acquire async def touch(self, conn: Connection, key: bytes, exptime: int) -> bool: """The command is used to update the expiration time of an existing item without fetching it. :param key: ``bytes``, is the key to update expiration time :param exptime: ``int``, is expiration time. This replaces the existing expiration time. :return: ``bool``, True in case of success. """ self._validate_key(key) _cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')]) cmd = _cmd + b'\r\n' resp = await self._execute_simple_command(conn, cmd) if resp not in (const.TOUCHED, const.NOT_FOUND): raise ClientException('Memcached touch failed', resp) return resp == const.TOUCHED @acquire async def version(self, conn: Connection) -> bytes: """Current version of the server. :return: ``bytes``, memcached version for current the server. """ command = b'version\r\n' response = await self._execute_simple_command(conn, command) if not response.startswith(const.VERSION): raise ClientException('Memcached version failed', response) version, number = response.rstrip(b"\r\n").split(maxsplit=1) return number @acquire async def flush_all(self, conn: Connection) -> None: """Its effect is to invalidate all existing items immediately""" command = b'flush_all\r\n' response = await self._execute_simple_command(conn, command) if const.OK != response: raise ClientException('Memcached flush_all failed', response) class Client(FlagClient[bytes]): def __init__(self, host: str, port: int = 11211, *, pool_size: int = 2, pool_minsize: Optional[int] = None, conn_args: Optional[Mapping[str, Any]] = None): super().__init__(host, port, pool_size=pool_size, pool_minsize=pool_minsize, conn_args=conn_args, get_flag_handler=None, set_flag_handler=None) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/aiomcache/constants.py0000644000175100001730000000024414371500326017332 0ustar00runnerdockerSTORED = b'STORED' NOT_STORED = b'NOT_STORED' TOUCHED = b'TOUCHED' NOT_FOUND = b'NOT_FOUND' DELETED = b'DELETED' VERSION = b'VERSION' EXISTS = b'EXISTS' OK = b'OK' ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/aiomcache/exceptions.py0000644000175100001730000000073214371500326017501 0ustar00runnerdockerfrom typing import Optional __all__ = ['ClientException', 'ValidationException'] class ClientException(Exception): """Raised when the server does something we don't expect.""" def __init__(self, msg: str, item: Optional[object] = None): if item is not None: msg = '%s: %r' % (msg, item) super().__init__(msg) class ValidationException(ClientException): """Raised when an invalid parameter is passed to a ``Client`` function.""" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/aiomcache/pool.py0000644000175100001730000000506414371500326016274 0ustar00runnerdockerimport asyncio from typing import Any, Mapping, NamedTuple, Optional, Set __all__ = ['MemcachePool'] class Connection(NamedTuple): reader: asyncio.StreamReader writer: asyncio.StreamWriter class MemcachePool: def __init__(self, host: str, port: int, *, minsize: int, maxsize: int, conn_args: Optional[Mapping[str, Any]] = None): self._host = host self._port = port self._minsize = minsize self._maxsize = maxsize self.conn_args = conn_args or {} self._pool: asyncio.Queue[Connection] = asyncio.Queue() self._in_use: Set[Connection] = set() async def clear(self) -> None: """Clear pool connections.""" while not self._pool.empty(): conn = await self._pool.get() self._do_close(conn) def _do_close(self, conn: Connection) -> None: conn.reader.feed_eof() conn.writer.close() async def acquire(self) -> Connection: """Acquire connection from the pool, or spawn new one if pool maxsize permits. :return: ``tuple`` (reader, writer) """ while self.size() == 0 or self.size() < self._minsize: _conn = await self._create_new_conn() if _conn is None: break self._pool.put_nowait(_conn) conn: Optional[Connection] = None while not conn: _conn = await self._pool.get() if _conn.reader.at_eof() or _conn.reader.exception() is not None: self._do_close(_conn) conn = await self._create_new_conn() else: conn = _conn self._in_use.add(conn) return conn def release(self, conn: Connection) -> None: """Releases connection back to the pool. :param conn: ``namedtuple`` (reader, writer) """ self._in_use.remove(conn) if conn.reader.at_eof() or conn.reader.exception() is not None: self._do_close(conn) else: self._pool.put_nowait(conn) async def _create_new_conn(self) -> Optional[Connection]: if self.size() < self._maxsize: reader, writer = await asyncio.open_connection( self._host, self._port, **self.conn_args) if self.size() < self._maxsize: return Connection(reader, writer) else: reader.feed_eof() writer.close() return None else: return None def size(self) -> int: return self._pool.qsize() + len(self._in_use) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/aiomcache/py.typed0000644000175100001730000000000014371500326016431 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1676050658.1701503 aiomcache-0.8.1/aiomcache.egg-info/0000755000175100001730000000000014371500342016434 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050658.0 aiomcache-0.8.1/aiomcache.egg-info/PKG-INFO0000644000175100001730000000571414371500342017540 0ustar00runnerdockerMetadata-Version: 2.1 Name: aiomcache Version: 0.8.1 Summary: Minimal pure python memcached client Home-page: https://github.com/aio-libs/aiomcache/ Author: Nikolay Kim Author-email: fafhrd91@gmail.com Maintainer: Nikolay Kim , Andrew Svetlov Maintainer-email: aio-libs@googlegroups.com License: BSD Classifier: License :: OSI Approved :: BSD License Classifier: Intended Audience :: Developers Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Operating System :: POSIX Classifier: Operating System :: MacOS :: MacOS X Classifier: Operating System :: Microsoft :: Windows Classifier: Environment :: Web Environment Classifier: Framework :: AsyncIO Requires-Python: >=3.7 Description-Content-Type: text/x-rst License-File: LICENSE memcached client for asyncio ============================ asyncio (PEP 3156) library to work with memcached. Getting started --------------- The API looks very similar to the other memcache clients: .. code:: python import asyncio import aiomcache async def hello_aiomcache(): mc = aiomcache.Client("127.0.0.1", 11211) await mc.set(b"some_key", b"Some value") value = await mc.get(b"some_key") print(value) values = await mc.multi_get(b"some_key", b"other_key") print(values) await mc.delete(b"another_key") asyncio.run(hello_aiomcache()) Version 0.8 introduces `FlagClient` which allows registering callbacks to set or process flags. See `examples/simple_with_flag_handler.py` ======= CHANGES ======= .. towncrier release notes start 0.8.1 (2023-02-10) ================== - Add ``conn_args`` to ``Client`` to allow TLS and other options when connecting to memcache. 0.8.0 (2022-12-11) ================== - Add ``FlagClient`` to support memcached flags. - Fix type annotations for ``@acquire``. - Fix rare exception caused by memcached server dying in middle of operation. - Fix get method to not use CAS. 0.7.0 (2022-01-20) ===================== - Added support for Python 3.10 - Added support for non-ascii keys - Added type annotations 0.6.0 (2017-12-03) ================== - Drop python 3.3 support 0.5.2 (2017-05-27) ================== - Fix issue with pool concurrency and task cancellation 0.5.1 (2017-03-08) ================== - Added MANIFEST.in 0.5.0 (2017-02-08) ================== - Added gets and cas commands 0.4.0 (2016-09-26) ================== - Make max_size strict #14 0.3.0 (2016-03-11) ================== - Dockerize tests - Reuse memcached connections in Client Pool #4 - Fix stats parse to compatible more mc class software #5 0.2 (2015-12-15) ================ - Make the library Python 3.5 compatible 0.1 (2014-06-18) ================ - Initial release ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050658.0 aiomcache-0.8.1/aiomcache.egg-info/SOURCES.txt0000644000175100001730000000073214371500342020322 0ustar00runnerdockerCHANGES.rst LICENSE MANIFEST.in README.rst setup.cfg setup.py aiomcache/__init__.py aiomcache/client.py aiomcache/constants.py aiomcache/exceptions.py aiomcache/pool.py aiomcache/py.typed aiomcache.egg-info/PKG-INFO aiomcache.egg-info/SOURCES.txt aiomcache.egg-info/dependency_links.txt aiomcache.egg-info/requires.txt aiomcache.egg-info/top_level.txt tests/__init__.py tests/commands_test.py tests/conftest.py tests/conn_args_test.py tests/flag_helper.py tests/pool_test.py././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050658.0 aiomcache-0.8.1/aiomcache.egg-info/dependency_links.txt0000644000175100001730000000000114371500342022502 0ustar00runnerdocker ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050658.0 aiomcache-0.8.1/aiomcache.egg-info/requires.txt0000644000175100001730000000006114371500342021031 0ustar00runnerdocker [:python_version < "3.11"] typing_extensions>=4 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050658.0 aiomcache-0.8.1/aiomcache.egg-info/top_level.txt0000644000175100001730000000002014371500342021156 0ustar00runnerdockeraiomcache tests ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1676050658.1701503 aiomcache-0.8.1/setup.cfg0000644000175100001730000000021214371500342014645 0ustar00runnerdocker[easy_install] zip_ok = false [nosetests] nocapture = 1 cover-package = aiomcache cover-erase = 1 [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/setup.py0000644000175100001730000000361414371500326014551 0ustar00runnerdockerimport codecs import os import re from setuptools import find_packages, setup with codecs.open(os.path.join(os.path.abspath(os.path.dirname( __file__)), 'aiomcache', '__init__.py'), 'r', 'latin1') as fp: try: version = re.findall(r'^__version__ = "([^"]+)"\r?$', fp.read(), re.M)[0] except IndexError: raise RuntimeError('Unable to determine version.') def read(f): return open(os.path.join(os.path.dirname(__file__), f)).read().strip() setup(name='aiomcache', version=version, description=('Minimal pure python memcached client'), long_description='\n\n'.join((read('README.rst'), read('CHANGES.rst'))), long_description_content_type='text/x-rst', classifiers=[ 'License :: OSI Approved :: BSD License', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Operating System :: POSIX', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Environment :: Web Environment', 'Framework :: AsyncIO', ], author='Nikolay Kim', author_email='fafhrd91@gmail.com', maintainer=', '.join(('Nikolay Kim ', 'Andrew Svetlov ')), maintainer_email='aio-libs@googlegroups.com', url='https://github.com/aio-libs/aiomcache/', license='BSD', packages=find_packages(), python_requires='>=3.7', install_requires=('typing_extensions>=4; python_version<"3.11"',), tests_require=("nose",), test_suite='nose.collector', include_package_data=True) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1676050658.1701503 aiomcache-0.8.1/tests/0000755000175100001730000000000014371500342014173 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/tests/__init__.py0000644000175100001730000000000014371500326016274 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/tests/commands_test.py0000644000175100001730000002744314371500326017421 0ustar00runnerdockerimport asyncio import datetime from typing import Any from unittest import mock from unittest.mock import MagicMock import pytest from aiomcache import Client, FlagClient from aiomcache.exceptions import ClientException, ValidationException from .flag_helper import FlagHelperDemo @pytest.mark.parametrize("key", ( b"key", b"123", bytes("!@#", "utf-8"), bytes("中文", "utf-8"), bytes("こんにちは", "utf-8"), bytes("안녕하세요", "utf-8"), )) async def test_valid_key(mcache: Client, key: bytes) -> None: assert mcache._validate_key(key) == key @pytest.mark.parametrize("key", ( # Whitespace b"foo bar", b"foo\t", b"\nbar", b"foo\x20\x0Dbar", b"\x18\x0E", b"\x20\x60", b"\x30\x00", b"\x20\x01", # Control characters b"foo\x00bar", b"\x1F", b"\x7F", "\u0080".encode(), "\u009F".encode(), )) async def test_invalid_key(mcache: Client, key: bytes) -> None: with pytest.raises(ValidationException, match="invalid key"): mcache._validate_key(key) async def test_version(mcache: Client) -> None: version = await mcache.version() stats = await mcache.stats() assert version == stats[b'version'] with mock.patch.object( mcache, "_execute_simple_command", new_callable=MagicMock) as patched: fut: asyncio.Future[bytes] = asyncio.Future() fut.set_result(b'SERVER_ERROR error\r\n') patched.return_value = fut with pytest.raises(ClientException): await mcache.version() async def test_flush_all(mcache: Client) -> None: key, value = b'key:flush_all', b'flush_all_value' await mcache.set(key, value) # make sure value exists test_value = await mcache.get(key) assert test_value == value # flush data await mcache.flush_all() # make sure value does not exists test_value = await mcache.get(key) assert test_value is None with mock.patch.object(mcache, '_execute_simple_command') as patched: fut: asyncio.Future[bytes] = asyncio.Future() fut.set_result(b'SERVER_ERROR error\r\n') patched.return_value = fut with pytest.raises(ClientException): await mcache.flush_all() async def test_set_get(mcache: Client) -> None: key, value = b'key:set', b'1' await mcache.set(key, value) test_value = await mcache.get(key) assert test_value == value test_value = await mcache.get(b"not:" + key) assert test_value is None test_value = await mcache.get(b"not:" + key, default=value) assert test_value == value with mock.patch.object(mcache, '_execute_simple_command') as patched: fut: asyncio.Future[bytes] = asyncio.Future() fut.set_result(b'SERVER_ERROR error\r\n') patched.return_value = fut with pytest.raises(ClientException): await mcache.set(key, value) async def test_gets(mcache: Client) -> None: key, value = b'key:set', b'1' await mcache.set(key, value) test_value, cas = await mcache.gets(key) assert test_value == value assert isinstance(cas, int) test_value, cas = await mcache.gets(b"not:" + key) assert test_value is None assert cas is None test_value, cas = await mcache.gets(b"not:" + key, default=value) assert test_value == value assert cas is None async def test_multi_get(mcache: Client) -> None: key1, value1 = b'key:multi_get:1', b'1' key2, value2 = b'key:multi_get:2', b'2' await mcache.set(key1, value1) await mcache.set(key2, value2) test_value = await mcache.multi_get(key1, key2) assert test_value == (value1, value2) test_value = await mcache.multi_get(b'not' + key1, key2) assert test_value == (None, value2) test_value = await mcache.multi_get() assert test_value == () async def test_multi_get_doubling_keys(mcache: Client) -> None: key, value = b'key:multi_get:3', b'1' await mcache.set(key, value) with pytest.raises(ClientException): await mcache.multi_get(key, key) async def test_set_expire(mcache: Client) -> None: key, value = b'key:set', b'1' await mcache.set(key, value, exptime=1) test_value = await mcache.get(key) assert test_value == value await asyncio.sleep(1) test_value = await mcache.get(key) assert test_value is None async def test_set_errors(mcache: Client) -> None: key, value = b'key:set', b'1' await mcache.set(key, value, exptime=1) with pytest.raises(ValidationException): await mcache.set(key, value, exptime=-1) with pytest.raises(ValidationException): await mcache.set(key, value, exptime=3.14) # type: ignore[arg-type] async def test_gets_cas(mcache: Client) -> None: key, value = b'key:set', b'1' await mcache.set(key, value) test_value, cas = await mcache.gets(key) assert cas is not None stored = await mcache.cas(key, value, cas) assert stored is True stored = await mcache.cas(key, value, cas) assert stored is False async def test_cas_missing(mcache: Client) -> None: key, value = b'key:set', b'1' stored = await mcache.cas(key, value, 123) assert stored is False async def test_add(mcache: Client) -> None: key, value = b'key:add', b'1' await mcache.set(key, value) test_value1 = await mcache.add(key, b"2") assert not test_value1 test_value2 = await mcache.add(b"not:" + key, b"2") assert test_value2 test_value3 = await mcache.get(b"not:" + key) assert test_value3 == b"2" async def test_replace(mcache: Client) -> None: key, value = b'key:replace', b'1' await mcache.set(key, value) test_value1 = await mcache.replace(key, b"2") assert test_value1 # make sure value exists test_value2 = await mcache.get(key) assert test_value2 == b"2" test_value3 = await mcache.replace(b"not:" + key, b"3") assert not test_value3 # make sure value exists test_value4 = await mcache.get(b"not:" + key) assert test_value4 is None async def test_append(mcache: Client) -> None: key, value = b'key:append', b'1' await mcache.set(key, value) test_value1 = await mcache.append(key, b"2") assert test_value1 # make sure value exists test_value2 = await mcache.get(key) assert test_value2 == b"12" test_value3 = await mcache.append(b"not:" + key, b"3") assert not test_value3 # make sure value exists test_value4 = await mcache.get(b"not:" + key) assert test_value4 is None async def test_prepend(mcache: Client) -> None: key, value = b'key:prepend', b'1' await mcache.set(key, value) test_value1 = await mcache.prepend(key, b"2") assert test_value1 # make sure value exists test_value2 = await mcache.get(key) assert test_value2 == b"21" test_value3 = await mcache.prepend(b"not:" + key, b"3") assert not test_value3 # make sure value exists test_value4 = await mcache.get(b"not:" + key) assert test_value4 is None async def test_delete(mcache: Client) -> None: key, value = b'key:delete', b'value' await mcache.set(key, value) # make sure value exists test_value = await mcache.get(key) assert test_value == value is_deleted = await mcache.delete(key) assert is_deleted # make sure value does not exists test_value = await mcache.get(key) assert test_value is None with mock.patch.object(mcache, '_execute_simple_command') as patched: fut: asyncio.Future[bytes] = asyncio.Future() fut.set_result(b'SERVER_ERROR error\r\n') patched.return_value = fut with pytest.raises(ClientException): await mcache.delete(key) async def test_delete_key_not_exists(mcache: Client) -> None: is_deleted = await mcache.delete(b"not:key") assert not is_deleted async def test_incr(mcache: Client) -> None: key, value = b'key:incr:1', b'1' await mcache.set(key, value) test_value1 = await mcache.incr(key, 2) assert test_value1 == 3 # make sure value exists test_value2 = await mcache.get(key) assert test_value2 == b"3" async def test_incr_errors(mcache: Client) -> None: key, value = b'key:incr:2', b'string' await mcache.set(key, value) with pytest.raises(ClientException): await mcache.incr(key, 2) with pytest.raises(ClientException): await mcache.incr(key, 3.14) # type: ignore[arg-type] async def test_decr(mcache: Client) -> None: key, value = b'key:decr:1', b'17' await mcache.set(key, value) test_value1 = await mcache.decr(key, 2) assert test_value1 == 15 test_value2 = await mcache.get(key) assert test_value2 == b"15" test_value3 = await mcache.decr(key, 1000) assert test_value3 == 0 async def test_decr_errors(mcache: Client) -> None: key, value = b'key:decr:2', b'string' await mcache.set(key, value) with pytest.raises(ClientException): await mcache.decr(key, 2) with pytest.raises(ClientException): await mcache.decr(key, 3.14) # type: ignore[arg-type] async def test_stats(mcache: Client) -> None: stats = await mcache.stats() assert b'pid' in stats async def test_touch(mcache: Client) -> None: key, value = b'key:touch:1', b'17' await mcache.set(key, value) test_value1 = await mcache.touch(key, 1) assert test_value1 test_value2 = await mcache.get(key) assert test_value2 == value await asyncio.sleep(1) test_value3 = await mcache.get(key) assert test_value3 is None test_value4 = await mcache.touch(b"not:" + key, 1) assert not test_value4 with mock.patch.object(mcache, '_execute_simple_command') as patched: fut: asyncio.Future[bytes] = asyncio.Future() fut.set_result(b'SERVER_ERROR error\r\n') patched.return_value = fut with pytest.raises(ClientException): await mcache.touch(b"not:" + key, 1) async def test_close(mcache: Client) -> None: await mcache.close() assert mcache._pool.size() == 0 @pytest.mark.parametrize( "value", [ "key", b"bkey", False, 1, None, 0.5, [1, 2, 3], tuple([1, 2, 3]), [datetime.date(2015, 12, 28)], bytes("!@#", "utf-8"), bytes("안녕하세요", "utf-8"), ] ) async def test_flag_helper( mcache_flag_client: FlagClient[Any], value: object) -> None: key = b"key:test_flag_helper" await mcache_flag_client.set(key, value) v2 = await mcache_flag_client.get(key) assert v2 == value async def test_objects_not_supported_without_flag_handler(mcache: Client) -> None: key = b"key:test_objects_not_supported_without_flag_handler" date_value = datetime.date(2015, 12, 28) with pytest.raises(ValidationException): await mcache.set(key, date_value) # type: ignore[arg-type] result = await mcache.get(key) assert result is None async def test_flag_handler_invoked_only_when_expected( mcache_flag_client: FlagClient[Any], demo_flag_helper: FlagHelperDemo) -> None: key = b"key:test_flag_handler_invoked_only_when_expected" orig_get_count = demo_flag_helper.get_invocation_count orig_set_count = demo_flag_helper.set_invocation_count # should be invoked on non-byte values date_value = datetime.date(2015, 12, 28) await mcache_flag_client.set(key, date_value) v2 = await mcache_flag_client.get(key) assert v2 == date_value assert orig_get_count + 1 == demo_flag_helper.get_invocation_count assert orig_set_count + 1 == demo_flag_helper.set_invocation_count # should not be invoked on byte values byte_value = bytes("안녕하세요", "utf-8") await mcache_flag_client.set(key, byte_value) v3 = await mcache_flag_client.get(key) assert v3 == byte_value assert orig_get_count + 1 == demo_flag_helper.get_invocation_count assert orig_set_count + 1 == demo_flag_helper.set_invocation_count ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/tests/conftest.py0000644000175100001730000001004614371500326016375 0ustar00runnerdockerimport contextlib import socket import sys import time import uuid from typing import Any, AsyncIterator, Callable, Iterator import docker as docker_mod import memcache import pytest import aiomcache from .flag_helper import FlagHelperDemo if sys.version_info < (3, 8): from typing_extensions import TypedDict else: from typing import TypedDict if sys.version_info < (3, 11): from typing_extensions import NotRequired else: from typing import NotRequired class McacheParams(TypedDict): host: str port: int class ServerParams(TypedDict): Id: NotRequired[str] host: str port: int mcache_params: McacheParams mcache_server_option = "localhost" def pytest_addoption(parser: pytest.Parser) -> None: parser.addoption( '--memcached', help='Memcached server') @pytest.fixture(scope='session') def unused_port() -> Callable[[], int]: def f() -> int: with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.bind(('127.0.0.1', 0)) return s.getsockname()[1] # type: ignore[no-any-return] return f def pytest_runtest_setup(item: pytest.Item) -> None: global mcache_server_option mcache_server_option = item.config.getoption("--memcached", "localhost") @pytest.fixture(scope='session') def session_id() -> str: '''Unique session identifier, random string.''' return str(uuid.uuid4()) @pytest.fixture(scope='session') def docker() -> docker_mod.Client: # type: ignore[no-any-unimported] return docker_mod.from_env() def mcache_server_actual(host: str, port: int = 11211) -> ServerParams: port = int(port) return { "host": host, "port": port, "mcache_params": {"host": host, "port": port} } @contextlib.contextmanager def mcache_server_docker( # type: ignore[no-any-unimported] unused_port: Callable[[], int], docker: docker_mod.Client, session_id: str ) -> Iterator[ServerParams]: docker.images.pull("memcached:alpine") container = docker.containers.run( image='memcached:alpine', name='memcached-test-server-{}'.format(session_id), ports={"11211/tcp": None}, detach=True, ) try: container.start() container.reload() net_settings = container.attrs["NetworkSettings"] host = net_settings["IPAddress"] port = int(net_settings["Ports"]["11211/tcp"][0]["HostPort"]) mcache_params: McacheParams = {"host": host, "port": port} delay = 0.001 for _i in range(10): try: conn = memcache.Client(["{host}:{port}".format_map(mcache_params)]) conn.get_stats() break except Exception: time.sleep(delay) delay *= 2 else: pytest.fail("Cannot start memcached") ret: ServerParams = { "Id": container.id, "host": host, "port": port, "mcache_params": mcache_params } time.sleep(0.1) yield ret finally: container.kill() container.remove() @pytest.fixture(scope='session') def mcache_server() -> ServerParams: return mcache_server_actual("localhost") @pytest.fixture def mcache_params(mcache_server: ServerParams) -> McacheParams: return mcache_server["mcache_params"] @pytest.fixture async def mcache(mcache_params: McacheParams) -> AsyncIterator[aiomcache.Client]: client = aiomcache.Client(**mcache_params) yield client await client.close() test_only_demo_flag_helper = FlagHelperDemo() @pytest.fixture async def demo_flag_helper() -> FlagHelperDemo: return test_only_demo_flag_helper @pytest.fixture async def mcache_flag_client( mcache_params: McacheParams, demo_flag_helper: FlagHelperDemo ) -> AsyncIterator[aiomcache.FlagClient[Any]]: client = aiomcache.FlagClient( get_flag_handler=demo_flag_helper.demo_get_flag_handler, set_flag_handler=demo_flag_helper.demo_set_flag_handler, **mcache_params) try: yield client finally: await client.close() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/tests/conn_args_test.py0000644000175100001730000000213314371500326017556 0ustar00runnerdockerimport ssl import sys from asyncio import StreamReader, StreamWriter from unittest import mock import pytest from aiomcache import Client from .conftest import McacheParams @pytest.mark.skipif(sys.version_info < (3, 8), reason="AsyncMock requires python3.8") async def test_params_forwarded_from_client() -> None: client = Client("host", port=11211, conn_args={ "ssl": True, "ssl_handshake_timeout": 20 }) with mock.patch( "asyncio.open_connection", return_value=( mock.create_autospec(StreamReader), mock.create_autospec(StreamWriter), ), autospec=True, ) as oc: await client._pool.acquire() oc.assert_called_with("host", 11211, ssl=True, ssl_handshake_timeout=20) async def test_ssl_client_fails_against_plaintext_server( mcache_params: McacheParams, ) -> None: client = Client(**mcache_params, conn_args={"ssl": True}) # If SSL was correctly enabled, this should # fail, since SSL isn't enabled on the memcache # server. with pytest.raises(ssl.SSLError): await client.get(b"key") ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/tests/flag_helper.py0000644000175100001730000000224214371500326017017 0ustar00runnerdockerimport pickle # noqa: S403 from enum import IntEnum from typing import Any, Tuple # See also: # https://github.com/lericson/pylibmc/blob/master/src/_pylibmcmodule.h#L63 class DemoFlags(IntEnum): DEMO_FLAG_PICKLE = 1 # demo/ref flag handler, for more elaborate potential handlers, see: # https://github.com/lericson/pylibmc/blob/master/src/_pylibmcmodule.c#L640 class FlagHelperDemo: get_invocation_count = 0 set_invocation_count = 0 async def demo_get_flag_handler(self, value: bytes, flags: int) -> Any: self.get_invocation_count += 1 if flags == DemoFlags.DEMO_FLAG_PICKLE: return pickle.loads(value) # noqa: S301 raise ValueError(f"unrecognized flag: {flags}") # demo/ref flag handler, for more elaborate potential handlers, see: # https://github.com/lericson/pylibmc/blob/master/src/_pylibmcmodule.c#L1241 async def demo_set_flag_handler(self, value: Any) -> Tuple[bytes, int]: self.set_invocation_count += 1 # in this example exclusively use Pickle, more elaborate handler # could use additional/alternate flags return pickle.dumps(value), DemoFlags.DEMO_FLAG_PICKLE.value ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1676050646.0 aiomcache-0.8.1/tests/pool_test.py0000644000175100001730000001142514371500326016562 0ustar00runnerdockerimport asyncio import random import socket import pytest from aiomcache.client import Client, acquire from aiomcache.pool import Connection, MemcachePool from .conftest import McacheParams async def test_pool_creation(mcache_params: McacheParams) -> None: pool = MemcachePool(minsize=1, maxsize=5, **mcache_params) assert pool.size() == 0 assert pool._minsize == 1 async def test_pool_acquire_release(mcache_params: McacheParams) -> None: pool = MemcachePool(minsize=1, maxsize=5, **mcache_params) conn = await pool.acquire() assert isinstance(conn.reader, asyncio.StreamReader) assert isinstance(conn.writer, asyncio.StreamWriter) pool.release(conn) await pool.clear() async def test_pool_acquire_release2(mcache_params: McacheParams) -> None: pool = MemcachePool(minsize=1, maxsize=5, **mcache_params) reader, writer = await asyncio.open_connection( mcache_params["host"], mcache_params["port"]) # put dead connection to the pool writer.close() reader.feed_eof() conn = Connection(reader, writer) await pool._pool.put(conn) conn = await pool.acquire() assert isinstance(conn.reader, asyncio.StreamReader) assert isinstance(conn.writer, asyncio.StreamWriter) pool.release(conn) await pool.clear() async def test_pool_clear(mcache_params: McacheParams) -> None: pool = MemcachePool(minsize=1, maxsize=5, **mcache_params) conn = await pool.acquire() pool.release(conn) assert pool.size() == 1 await pool.clear() assert pool._pool.qsize() == 0 async def test_acquire_dont_create_new_connection_if_have_conn_in_pool( mcache_params: McacheParams, ) -> None: pool = MemcachePool(minsize=1, maxsize=5, **mcache_params) assert pool.size() == 0 # Add a valid connection _conn = await pool._create_new_conn() assert _conn is not None await pool._pool.put(_conn) assert pool.size() == 1 conn = await pool.acquire() assert conn is _conn assert pool.size() == 1 pool.release(conn) await pool.clear() async def test_acquire_limit_maxsize(mcache_params: McacheParams) -> None: pool = MemcachePool(minsize=1, maxsize=1, **mcache_params) assert pool.size() == 0 # Create up to max connections _conn = await pool.acquire() assert pool.size() == 1 pool.release(_conn) async def acquire_wait_release() -> None: conn = await pool.acquire() assert conn is _conn await asyncio.sleep(0.01) assert len(pool._in_use) == 1 assert pool.size() == 1 assert pool._pool.qsize() == 0 pool.release(conn) await asyncio.gather(*([acquire_wait_release()] * 50)) assert pool.size() == 1 assert len(pool._in_use) == 0 assert pool._pool.qsize() == 1 await pool.clear() async def test_acquire_task_cancellation(mcache_params: McacheParams) -> None: class TestClient(Client): def __init__(self, pool_size: int = 4): self._pool = MemcachePool( minsize=pool_size, maxsize=pool_size, **mcache_params) @acquire async def acquire_wait_release(self, conn: Connection) -> str: assert self._pool.size() <= pool_size await asyncio.sleep(random.uniform(0.01, 0.02)) # noqa: S311 return "foo" pool_size = 4 client = TestClient(pool_size=pool_size) tasks = [ asyncio.wait_for( client.acquire_wait_release(), random.uniform(1, 2)) for x in range(1000) # noqa: S311 ] results = await asyncio.gather(*tasks, return_exceptions=True) assert client._pool.size() <= pool_size assert len(client._pool._in_use) == 0 assert "foo" in results await client._pool.clear() async def test_maxsize_greater_than_minsize(mcache_params: McacheParams) -> None: pool = MemcachePool(minsize=5, maxsize=1, **mcache_params) conn = await pool.acquire() assert isinstance(conn.reader, asyncio.StreamReader) assert isinstance(conn.writer, asyncio.StreamWriter) pool.release(conn) await pool.clear() async def test_0_minsize(mcache_params: McacheParams) -> None: pool = MemcachePool(minsize=0, maxsize=5, **mcache_params) conn = await pool.acquire() assert isinstance(conn.reader, asyncio.StreamReader) assert isinstance(conn.writer, asyncio.StreamWriter) pool.release(conn) await pool.clear() async def test_bad_connection(mcache_params: McacheParams) -> None: pool = MemcachePool(minsize=5, maxsize=1, **mcache_params) pool._host = "INVALID_HOST" assert pool.size() == 0 with pytest.raises(socket.gaierror): conn = await pool.acquire() assert isinstance(conn.reader, asyncio.StreamReader) assert isinstance(conn.writer, asyncio.StreamWriter) pool.release(conn) assert pool.size() == 0