diff --git a/.flake8 b/.flake8 index 9d1c63c2..2e675565 100644 --- a/.flake8 +++ b/.flake8 @@ -5,9 +5,10 @@ max-line-length = 90 select = A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,B901,B902,B903,B950 # E226: Missing whitespace around arithmetic operators can help group things together. # E501,W505: Superseeded by B950 (from Bugbear) +# E704: Allow overloaded function to use Ellipsis as body # E722: Superseeded by B001 (from Bugbear) # W503: Mutually exclusive with W504. -ignore = E226,E501,E722,W503,W505 +ignore = E226,E501,E704,E722,W503,W505 per-file-ignores = # S*: Bandit security checks not useful in tests. tests/*:S diff --git a/aiocache/backends/memcached.py b/aiocache/backends/memcached.py index 76ac34e1..feac923a 100644 --- a/aiocache/backends/memcached.py +++ b/aiocache/backends/memcached.py @@ -1,43 +1,129 @@ import asyncio -from typing import Optional +import sys +from typing import Any, Iterable, Literal, Union, overload import aiomcache -from aiocache.base import BaseCache +from aiocache.base import BaseCache, BaseCacheArgs, _Conn from aiocache.serializers import JsonSerializer +if sys.version_info >= (3, 11): + from typing import Unpack +else: + from typing_extensions import Unpack # noqa: I900 -class MemcachedBackend(BaseCache[bytes]): - def __init__(self, host="127.0.0.1", port=11211, pool_size=2, **kwargs): + +class MemcachedCache(BaseCache[bytes]): + """ + Memcached cache implementation with the following components as defaults: + - serializer: :class:`aiocache.serializers.JsonSerializer` + - plugins: [] + + Config options are: + + :param serializer: obj derived from :class:`aiocache.serializers.BaseSerializer`. + :param plugins: list of :class:`aiocache.plugins.BasePlugin` derived classes. + :param namespace: string to use as default prefix for the key used in all operations of + the backend. Default is an empty string, "". + :param timeout: int or float in seconds specifying maximum timeout for the operations to last. + By default its 5. + :param endpoint: str with the endpoint to connect to. Default is 127.0.0.1. + :param port: int with the port to connect to. Default is 11211. + :param pool_size: int size for memcached connections pool. Default is 2. + """ + + NAME = "memcached" + + def __init__( + self, + host: str = "127.0.0.1", + port: int = 11211, + pool_size: int = 2, + **kwargs: Unpack[BaseCacheArgs], + ) -> None: + if "serializer" not in kwargs: + kwargs["serializer"] = JsonSerializer() super().__init__(**kwargs) self.host = host self.port = port self.pool_size = int(pool_size) - self.client = aiomcache.Client( - self.host, self.port, pool_size=self.pool_size - ) - - async def _get(self, key, encoding="utf-8", _conn=None): + self.client = aiomcache.Client(self.host, self.port, pool_size=self.pool_size) + + @overload + async def _get( + self, + key: bytes, + encoding: str = "utf-8", + _conn: Union[_Conn, None] = None, + ) -> Union[str, None]: ... + + @overload + async def _get( + self, key: bytes, encoding: None, _conn: Union[_Conn, None] = None + ) -> Union[bytes, None]: ... + + async def _get( + self, + key: bytes, + encoding: Union[str, None] = "utf-8", + _conn: Union[_Conn, None] = None, + ) -> Union[bytes, str, None]: value = await self.client.get(key) if encoding is None or value is None: return value return value.decode(encoding) - async def _gets(self, key, encoding="utf-8", _conn=None): + async def _gets( + self, + key: Union[bytes, str], + encoding: str = "utf-8", + _conn: Union[_Conn, None] = None, + ) -> Union[int, None]: key = key.encode() if isinstance(key, str) else key _, token = await self.client.gets(key) return token - async def _multi_get(self, keys, encoding="utf-8", _conn=None): - values = [] - for value in await self.client.multi_get(*keys): - if encoding is None or value is None: - values.append(value) - else: - values.append(value.decode(encoding)) - return values - - async def _set(self, key, value, ttl=0, _cas_token=None, _conn=None): + @overload + async def _multi_get( + self, + keys: Iterable[bytes], + encoding: str = "utf-8", + _conn: Union[_Conn, None] = None, + ) -> list[Union[str, None]]: ... + + @overload + async def _multi_get( + self, + keys: Iterable[bytes], + encoding: None, + _conn: Union[_Conn, None] = None, + ) -> list[Union[bytes, None]]: ... + + async def _multi_get( + self, + keys: Iterable[bytes], + encoding: Union[str, None] = "utf-8", + _conn: Union[_Conn, None] = None, + ) -> Union[ + list[Union[str, None]], + list[Union[bytes, None]], + ]: + raw_values = await self.client.multi_get(*keys) + if encoding is None: + return list(raw_values) + + return [ + None if value is None else value.decode(encoding) for value in raw_values + ] + + async def _set( + self, + key: bytes, + value: Union[str, bytes], + ttl: int = 0, + _cas_token: Union[int, None] = None, + _conn: Union[_Conn, None] = None, + ) -> bool: value = value.encode() if isinstance(value, str) else value if _cas_token is not None: return await self._cas(key, value, _cas_token, ttl=ttl, _conn=_conn) @@ -46,10 +132,23 @@ async def _set(self, key, value, ttl=0, _cas_token=None, _conn=None): except aiomcache.exceptions.ValidationException as e: raise TypeError("aiomcache error: {}".format(str(e))) - async def _cas(self, key, value, token, ttl=None, _conn=None): + async def _cas( + self, + key: bytes, + value: Union[str, bytes], + token: int, + ttl: Union[int, None] = None, + _conn: Union[_Conn, None] = None, + ) -> bool: + value = str.encode(value) if isinstance(value, str) else value return await self.client.cas(key, value, token, exptime=ttl or 0) - async def _multi_set(self, pairs, ttl=0, _conn=None): + async def _multi_set( + self, + pairs: Iterable[tuple[bytes, Union[str, bytes]]], + ttl: int = 0, + _conn: Union[_Conn, None] = None, + ) -> bool: tasks = [] for key, value in pairs: value = str.encode(value) if isinstance(value, str) else value @@ -62,21 +161,31 @@ async def _multi_set(self, pairs, ttl=0, _conn=None): return True - async def _add(self, key, value, ttl=0, _conn=None): - value = str.encode(value) if isinstance(value, str) else value + async def _add( + self, + key: bytes, + value: Union[str, bytes], + ttl: int = 0, + _conn: Union[_Conn, None] = None, + ) -> bool: + value_bytes = str.encode(value) if isinstance(value, str) else value try: - ret = await self.client.add(key, value, exptime=ttl or 0) + ret = await self.client.add(key, value_bytes, exptime=ttl or 0) except aiomcache.exceptions.ValidationException as e: raise TypeError("aiomcache error: {}".format(str(e))) if not ret: - raise ValueError("Key {} already exists, use .set to update the value".format(key)) + raise ValueError( + "Key {!r} already exists, use .set to update the value".format(key) + ) return True - async def _exists(self, key, _conn=None): + async def _exists(self, key: bytes, _conn: Union[_Conn, None] = None) -> bool: return await self.client.append(key, b"") - async def _increment(self, key, delta, _conn=None): + async def _increment( + self, key: bytes, delta: int, _conn: Union[_Conn, None] = None + ) -> int: incremented = None try: if delta > 0: @@ -91,66 +200,56 @@ async def _increment(self, key, delta, _conn=None): return incremented or delta - async def _expire(self, key, ttl, _conn=None): + async def _expire( + self, key: bytes, ttl: int, _conn: Union[_Conn, None] = None + ) -> bool: return await self.client.touch(key, ttl) - async def _delete(self, key, _conn=None): + async def _delete( + self, key: bytes, _conn: Union[str, None] = None + ) -> Literal[1, 0]: return 1 if await self.client.delete(key) else 0 - async def _clear(self, namespace=None, _conn=None): + async def _clear( + self, namespace: Union[str, None] = None, _conn: Union[_Conn, None] = None + ) -> bool: if namespace: - raise ValueError("MemcachedBackend doesnt support flushing by namespace") + raise ValueError("MemcachedCache doesnt support flushing by namespace") else: await self.client.flush_all() return True - async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): + async def _raw( + self, + command: str, + *args: Any, + encoding: str = "utf-8", + _conn: Union[_Conn, None] = None, + **kwargs: Any, + ) -> Any: value = await getattr(self.client, command)(*args, **kwargs) if command in {"get", "multi_get"}: if encoding is not None and value is not None: return value.decode(encoding) return value - async def _redlock_release(self, key, _): + async def _redlock_release(self, key: bytes, _: Any) -> Literal[1, 0]: # Not ideal, should check the value coincides first but this would introduce # race conditions return await self._delete(key) - async def _close(self, *args, _conn=None, **kwargs): + async def _close( + self, *args: Any, _conn: Union[_Conn, None] = None, **kwargs: Any + ) -> None: await self.client.close() - def build_key(self, key: str, namespace: Optional[str] = None) -> bytes: + def build_key(self, key: str, namespace: Union[str, None] = None) -> bytes: ns_key = self._str_build_key(key, namespace).replace(" ", "_") return str.encode(ns_key) - -class MemcachedCache(MemcachedBackend): - """ - Memcached cache implementation with the following components as defaults: - - serializer: :class:`aiocache.serializers.JsonSerializer` - - plugins: [] - - Config options are: - - :param serializer: obj derived from :class:`aiocache.serializers.BaseSerializer`. - :param plugins: list of :class:`aiocache.plugins.BasePlugin` derived classes. - :param namespace: string to use as default prefix for the key used in all operations of - the backend. Default is an empty string, "". - :param timeout: int or float in seconds specifying maximum timeout for the operations to last. - By default its 5. - :param endpoint: str with the endpoint to connect to. Default is 127.0.0.1. - :param port: int with the port to connect to. Default is 11211. - :param pool_size: int size for memcached connections pool. Default is 2. - """ - - NAME = "memcached" - - def __init__(self, serializer=None, **kwargs): - super().__init__(serializer=serializer or JsonSerializer(), **kwargs) - @classmethod - def parse_uri_path(cls, path): + def parse_uri_path(cls, path: str) -> dict[Any, Any]: return {} - def __repr__(self): # pragma: no cover + def __repr__(self) -> str: # pragma: no cover return "MemcachedCache ({}:{})".format(self.host, self.port) diff --git a/aiocache/backends/memory.py b/aiocache/backends/memory.py index 61cd90aa..aeb7d353 100644 --- a/aiocache/backends/memory.py +++ b/aiocache/backends/memory.py @@ -1,34 +1,71 @@ import asyncio -from typing import Any, Dict, Optional +import sys +from typing import Any, Iterable, Literal, Union -from aiocache.base import BaseCache +from aiocache.base import BaseCache, BaseCacheArgs, _Conn from aiocache.serializers import NullSerializer +if sys.version_info >= (3, 11): + from typing import Unpack +else: + from typing_extensions import Unpack # noqa: I900 -class SimpleMemoryBackend(BaseCache[str]): + +class SimpleMemoryCache(BaseCache[str]): """ - Wrapper around dict operations to use it as a cache backend + Memory cache implementation with the following components as defaults: + - serializer: :class:`aiocache.serializers.NullSerializer` + - plugins: None + - backend: dict + + Config options are: + + :param serializer: obj derived from :class:`aiocache.serializers.BaseSerializer`. + :param plugins: list of :class:`aiocache.plugins.BasePlugin` derived classes. + :param namespace: string to use as default prefix for the key used in all operations of + the backend. Default is an empty string, "". + :param timeout: int or float in seconds specifying maximum timeout for the operations to last. + By default, its 5. """ - # TODO(PY312): https://peps.python.org/pep-0692/ - def __init__(self, **kwargs: Any): + NAME = "memory" + + def __init__(self, **kwargs: Unpack[BaseCacheArgs]): + if "serializer" not in kwargs: + kwargs["serializer"] = NullSerializer() super().__init__(**kwargs) - self._cache: Dict[str, object] = {} - self._handlers: Dict[str, asyncio.TimerHandle] = {} + self._cache: dict[str, object] = {} + self._handlers: dict[str, asyncio.TimerHandle] = {} - async def _get(self, key, encoding="utf-8", _conn=None): + async def _get( + self, key: str, encoding: str = "utf-8", _conn: Union[_Conn, None] = None + ) -> Union[object, None]: return self._cache.get(key) - async def _gets(self, key, encoding="utf-8", _conn=None): + async def _gets( + self, key: str, encoding: str = "utf-8", _conn: Union[_Conn, None] = None + ) -> Union[object, None]: return await self._get(key, encoding=encoding, _conn=_conn) - async def _multi_get(self, keys, encoding="utf-8", _conn=None): + async def _multi_get( + self, + keys: Iterable[str], + encoding: str = "utf-8", + _conn: Union[_Conn, None] = None, + ) -> Union[object, None]: return [self._cache.get(key) for key in keys] - async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): + async def _set( + self, + key: str, + value: object, + ttl: Union[int, None] = None, + _cas_token: Union[int, None] = None, + _conn: Union[_Conn, None] = None, + ) -> bool: if _cas_token is not None and _cas_token != self._cache.get(key): - return 0 + return False if key in self._handlers: self._handlers[key].cancel() @@ -39,32 +76,49 @@ async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): self._handlers[key] = loop.call_later(ttl, self.__delete, key) return True - async def _multi_set(self, pairs, ttl=None, _conn=None): + async def _multi_set( + self, + pairs: Iterable[tuple[str, object]], + ttl: Union[int, None] = None, + _conn: Union[_Conn, None] = None, + ) -> bool: for key, value in pairs: await self._set(key, value, ttl=ttl) return True - async def _add(self, key, value, ttl=None, _conn=None): + async def _add( + self, + key: str, + value: object, + ttl: Union[int, None] = None, + _conn: Union[_Conn, None] = None, + ) -> bool: if key in self._cache: - raise ValueError("Key {} already exists, use .set to update the value".format(key)) + raise ValueError( + "Key {} already exists, use .set to update the value".format(key) + ) await self._set(key, value, ttl=ttl) return True - async def _exists(self, key, _conn=None): + async def _exists(self, key: str, _conn: Union[_Conn, None] = None) -> bool: return key in self._cache - async def _increment(self, key, delta, _conn=None): + async def _increment( + self, key: str, delta: int, _conn: Union[_Conn, None] = None + ) -> int: if key not in self._cache: self._cache[key] = delta else: try: - self._cache[key] = int(self._cache[key]) + delta + self._cache[key] = int(self._cache[key]) + delta # type: ignore[call-overload] except ValueError: raise TypeError("Value is not an integer") from None - return self._cache[key] + return self._cache[key] # type: ignore[return-value] - async def _expire(self, key, ttl, _conn=None): + async def _expire( + self, key: str, ttl: int, _conn: Union[_Conn, None] = None + ) -> bool: if key in self._cache: handle = self._handlers.pop(key, None) if handle: @@ -76,10 +130,14 @@ async def _expire(self, key, ttl, _conn=None): return False - async def _delete(self, key, _conn=None): + async def _delete( + self, key: str, _conn: Union[_Conn, None] = None + ) -> Literal[1, 0]: return self.__delete(key) - async def _clear(self, namespace=None, _conn=None): + async def _clear( + self, namespace: Union[str, None] = None, _conn: Union[_Conn, None] = None + ) -> bool: if namespace: for key in list(self._cache): if key.startswith(namespace): @@ -89,15 +147,22 @@ async def _clear(self, namespace=None, _conn=None): self._handlers = {} return True - async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): + async def _raw( + self, + command: str, + *args: Any, + encoding: str = "utf-8", + _conn: Union[_Conn, None] = None, + **kwargs: Any, + ) -> Any: return getattr(self._cache, command)(*args, **kwargs) - async def _redlock_release(self, key, value): + async def _redlock_release(self, key: str, value: object) -> Literal[1, 0]: if self._cache.get(key) == value: return self.__delete(key) return 0 - def __delete(self, key): + def __delete(self, key: str) -> Literal[1, 0]: if self._cache.pop(key, None) is not None: handle = self._handlers.pop(key, None) if handle: @@ -106,31 +171,9 @@ def __delete(self, key): return 0 - def build_key(self, key: str, namespace: Optional[str] = None) -> str: + def build_key(self, key: str, namespace: Union[str, None] = None) -> str: return self._str_build_key(key, namespace) - -class SimpleMemoryCache(SimpleMemoryBackend): - """ - Memory cache implementation with the following components as defaults: - - serializer: :class:`aiocache.serializers.NullSerializer` - - plugins: None - - Config options are: - - :param serializer: obj derived from :class:`aiocache.serializers.BaseSerializer`. - :param plugins: list of :class:`aiocache.plugins.BasePlugin` derived classes. - :param namespace: string to use as default prefix for the key used in all operations of - the backend. Default is an empty string, "". - :param timeout: int or float in seconds specifying maximum timeout for the operations to last. - By default its 5. - """ - - NAME = "memory" - - def __init__(self, serializer=None, **kwargs): - super().__init__(serializer=serializer or NullSerializer(), **kwargs) - @classmethod - def parse_uri_path(cls, path): + def parse_uri_path(cls, path: str) -> dict[Any, Any]: return {} diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index f55a7eaf..fb9fcc54 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -1,6 +1,6 @@ import logging import sys -from typing import Any, Callable, Optional +from typing import Any, Iterable, Literal, Mapping, Optional, TypedDict, Union, overload from glide import ( Batch, @@ -12,31 +12,81 @@ ) from glide.exceptions import RequestError as IncrbyException -from aiocache.base import BaseCache -from aiocache.serializers import BaseSerializer, JsonSerializer +from aiocache.base import BaseCache, BaseCacheArgs, _Conn +from aiocache.serializers import JsonSerializer if sys.version_info >= (3, 11): - from typing import Self + from typing import Self, Unpack else: - from typing import Any as Self + from typing_extensions import Self, Unpack # noqa: I900 logger = logging.getLogger(__name__) -class ValkeyBackend(BaseCache[str]): - def __init__(self, config: GlideClientConfiguration, **kwargs): +class _AddKwargs(TypedDict, total=False): + conditional_set: ConditionalChange + expiry: Optional[ExpirySet] + return_old_value: bool + + +class ValkeyCache(BaseCache[str]): + """ + Valkey cache implementation with the following components as defaults: + - serializer: :class:`aiocache.serializers.JsonSerializer` + - plugins: [] + + Config options are: + + :param serializer: obj derived from :class:`aiocache.serializers.BaseSerializer`. + :param plugins: list of :class:`aiocache.plugins.BasePlugin` derived classes. + :param namespace: string to use as default prefix for the key used in all operations of + the backend. Default is an empty string, "". + :param timeout: int or float in seconds specifying maximum timeout for the operations to last. + By default its 5. + :param client: glide.GlideClient which is an active client for working with valkey + """ + + NAME = "valkey" + + def __init__( + self, config: GlideClientConfiguration, **kwargs: Unpack[BaseCacheArgs] + ) -> None: self.config = config + + if "serializer" not in kwargs: + kwargs["serializer"] = JsonSerializer() + if "key_builder" not in kwargs: + kwargs["key_builder"] = lambda k, ns: f"{ns}:{k}" if ns else k + super().__init__(**kwargs) async def __aenter__(self) -> Self: self.client = await GlideClient.create(self.config) return self - async def __aexit__(self, *args, **kwargs) -> None: + async def __aexit__(self, *args: Any, **kwargs: Any) -> None: await self.client.close() - async def _get(self, key, encoding="utf-8", _conn=None): + @overload + async def _get( + self, + key: str, + encoding: str = "utf-8", + _conn: Union[_Conn, None] = None, + ) -> Union[str, None]: ... + + @overload + async def _get( + self, key: str, encoding: None, _conn: Union[_Conn, None] = None + ) -> Union[bytes, None]: ... + + async def _get( + self, + key: str, + encoding: Union[str, None] = "utf-8", + _conn: Union[_Conn, None] = None, + ) -> Union[str, bytes, None]: value = await self.client.get(key) if encoding is None or value is None: return value @@ -44,29 +94,76 @@ async def _get(self, key, encoding="utf-8", _conn=None): _gets = _get - async def _multi_get(self, keys, encoding="utf-8", _conn=None): - values = await self.client.mget(keys) + @overload + async def _multi_get( + self, + keys: list[str], + encoding: str = "utf-8", + _conn: Union[_Conn, None] = None, + ) -> list[Union[str, None]]: ... + + @overload + async def _multi_get( + self, + keys: list[str], + encoding: None, + _conn: Union[_Conn, None] = None, + ) -> list[Union[bytes, None]]: ... + + async def _multi_get( + self, + keys: list[str], + encoding: Union[str, None] = "utf-8", + _conn: Union[_Conn, None] = None, + ) -> Union[ + list[Union[str, None]], + list[Union[bytes, None]], + ]: + values = await self.client.mget(keys) # type: ignore[arg-type] if encoding is None: return values return [v if v is None else v.decode(encoding) for v in values] - async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): + async def _set( + self, + key: str, + value: Union[str, bytes], + ttl: Union[float, int, None] = None, + _cas_token: Union[str, None] = None, + _conn: Union[_Conn, None] = None, + ) -> bool: if isinstance(ttl, float): - ttl = ExpirySet(ExpiryType.MILLSEC, int(ttl * 1000)) + ttl_ = ExpirySet(ExpiryType.MILLSEC, int(ttl * 1000)) elif ttl: - ttl = ExpirySet(ExpiryType.SEC, ttl) + ttl_ = ExpirySet(ExpiryType.SEC, ttl) + else: + ttl_ = None if _cas_token is not None: - return await self._cas(key, value, _cas_token, ttl=ttl, _conn=_conn) + return await self._cas(key, value, _cas_token, ttl=ttl_, _conn=_conn) - return await self.client.set(key, value, expiry=ttl) == "OK" + return await self.client.set(key, value, expiry=ttl_) == "OK" # type: ignore[comparison-overlap] - async def _cas(self, key, value, token, ttl=None, _conn=None): + async def _cas( + self, + key: str, + value: Union[str, bytes], + token: str, + ttl: Union[ExpirySet, int, None] = None, + _conn: Union[_Conn, None] = None, + ) -> bool: if await self._get(key) == token: - return await self.client.set(key, value, expiry=ttl) == "OK" - return 0 + if isinstance(ttl, int): + ttl = ExpirySet(ExpiryType.SEC, ttl) + return await self.client.set(key, value, expiry=ttl) == "OK" # type: ignore[comparison-overlap] + return False - async def _multi_set(self, pairs, ttl=None, _conn=None): + async def _multi_set( + self, + pairs: Iterable[tuple[str, Union[str, bytes]]], + ttl: Union[int, None] = None, + _conn: Union[_Conn, None] = None, + ) -> bool: values = dict(pairs) if ttl: @@ -76,9 +173,11 @@ async def _multi_set(self, pairs, ttl=None, _conn=None): return True - async def __multi_set_ttl(self, values, ttl): + async def __multi_set_ttl( + self, values: Mapping[str, Union[str, bytes]], ttl: int + ) -> None: transaction = Batch(is_atomic=True) - transaction.mset(values) + transaction.mset(values) # type: ignore[arg-type] ttl, exp = ( (int(ttl * 1000), transaction.pexpire) if isinstance(ttl, float) @@ -88,54 +187,76 @@ async def __multi_set_ttl(self, values, ttl): exp(key, ttl) await self.client.exec(transaction, raise_on_error=True) - async def _add(self, key, value, ttl=None, _conn=None): - kwargs = {"conditional_set": ConditionalChange.ONLY_IF_DOES_NOT_EXIST} + async def _add( + self, + key: str, + value: Union[str, bytes], + ttl: Union[float, int, None] = None, + _conn: Union[_Conn, None] = None, + ) -> Literal["OK"]: + kwargs: _AddKwargs = { + "conditional_set": ConditionalChange.ONLY_IF_DOES_NOT_EXIST + } if isinstance(ttl, float): kwargs["expiry"] = ExpirySet(ExpiryType.MILLSEC, int(ttl * 1000)) elif ttl: kwargs["expiry"] = ExpirySet(ExpiryType.SEC, ttl) + was_set = await self.client.set(key, value, **kwargs) - if was_set != "OK": + if was_set != "OK": # type: ignore[comparison-overlap] raise ValueError( "Key {} already exists, use .set to update the value".format(key) ) - return was_set + return was_set # type: ignore[return-value] - async def _exists(self, key, _conn=None): + async def _exists(self, key: str, _conn: Union[_Conn, None] = None) -> bool: return bool(await self.client.exists([key])) - async def _increment(self, key, delta, _conn=None): + async def _increment( + self, key: str, delta: int, _conn: Union[_Conn, None] = None + ) -> int: try: return await self.client.incrby(key, delta) except IncrbyException: raise TypeError("Value is not an integer") from None - async def _expire(self, key, ttl, _conn=None): + async def _expire( + self, key: str, ttl: int, _conn: Union[_Conn, None] = None + ) -> bool: if ttl == 0: return await self.client.persist(key) return await self.client.expire(key, ttl) - async def _delete(self, key, _conn=None): + async def _delete(self, key: str, _conn: Union[_Conn, None] = None) -> int: return await self.client.delete([key]) - async def _clear(self, namespace=None, _conn=None): + async def _clear( + self, namespace: Union[str, None] = None, _conn: Union[_Conn, None] = None + ) -> Union[Literal["OK"], bool]: if not namespace: return await self.client.flushdb() _, keys = await self.client.scan(b"0", "{}:*".format(namespace)) if keys: - return bool(await self.client.delete(keys)) + return bool(await self.client.delete(keys)) # type: ignore[arg-type] return True - async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): + async def _raw( + self, + command: str, + *args: Any, + encoding: Union[str, None] = "utf-8", + _conn: Union[_Conn, None] = None, + **kwargs: Any, + ) -> Any: value = await getattr(self.client, command)(*args, **kwargs) if encoding is not None: if command == "get" and value is not None: value = value.decode(encoding) return value - async def _redlock_release(self, key, value): + async def _redlock_release(self, key: str, value: Union[str, bytes]) -> int: if await self._get(key) == value: return await self.client.delete([key]) return 0 @@ -143,44 +264,8 @@ async def _redlock_release(self, key, value): def build_key(self, key: str, namespace: Optional[str] = None) -> str: return self._str_build_key(key, namespace) - -class ValkeyCache(ValkeyBackend): - """ - Valkey cache implementation with the following components as defaults: - - serializer: :class:`aiocache.serializers.JsonSerializer` - - plugins: [] - - Config options are: - - :param serializer: obj derived from :class:`aiocache.serializers.BaseSerializer`. - :param plugins: list of :class:`aiocache.plugins.BasePlugin` derived classes. - :param namespace: string to use as default prefix for the key used in all operations of - the backend. Default is an empty string, "". - :param timeout: int or float in seconds specifying maximum timeout for the operations to last. - By default its 5. - :param client: glide.GlideClient which is an active client for working with valkey - """ - - NAME = "valkey" - - def __init__( - self, - config: GlideClientConfiguration, - serializer: Optional[BaseSerializer] = None, - namespace: str = "", - key_builder: Callable[[str, str], str] = lambda k, ns: f"{ns}:{k}" if ns else k, - **kwargs: Any, - ): - super().__init__( - config, - serializer=serializer or JsonSerializer(), - namespace=namespace, - key_builder=key_builder, - **kwargs, - ) - @classmethod - def parse_uri_path(cls, path): + def parse_uri_path(cls, path: str) -> dict[str, str]: """ Given a uri path, return the Valkey specific configuration options in that path string according to iana definition @@ -195,7 +280,7 @@ def parse_uri_path(cls, path): options["db"] = db return options - def __repr__(self): # pragma: no cover + def __repr__(self) -> str: # pragma: no cover return ( f"ValkeyCache ({self.client.config.addresses[0].host}" f":{self.client.config.addresses[0].port})" diff --git a/aiocache/base.py b/aiocache/base.py index 8aed6570..d3292012 100644 --- a/aiocache/base.py +++ b/aiocache/base.py @@ -6,7 +6,7 @@ from abc import ABC, abstractmethod from enum import Enum from types import TracebackType -from typing import Callable, Generic, List, Optional, Set, TYPE_CHECKING, Type, TypeVar +from typing import Callable, Generic, List, Optional, Set, TYPE_CHECKING, Type, TypeVar, TypedDict from aiocache.serializers import StringSerializer @@ -93,6 +93,15 @@ async def _plugins(self, *args, **kwargs): return _plugins +class BaseCacheArgs(TypedDict, total=False): + serializer: Optional["BaseSerializer"] + plugins: Optional[List["BasePlugin"]] + namespace: str + key_builder: Callable[[str, str], str] + timeout: Optional[float] + ttl: Optional[float] + + class BaseCache(Generic[CacheKeyType], ABC): """ Base class that agregates the common logic for the different caches that may exist. Cache diff --git a/docs/v1_migration.rst b/docs/v1_migration.rst index f787ce2b..ccd4c4fe 100644 --- a/docs/v1_migration.rst +++ b/docs/v1_migration.rst @@ -13,3 +13,4 @@ The abstraction and factories around cache instantiation have been removed in fa * The `aiocache.Cache` class has been removed. Instead, use the specific cache class directly. For example, use `aiocache.RedisCache` instead of `aiocache.Cache.REDIS`. * Caches should be fully instantiated when passed to decorators, rather than being instantiated with a factory function. * Cache aliases have been removed. Create an instance of the cache class directly instead. +* MemcachedBackend, MemoryBackend have been removed. Use MemcachedCache, MemoryCache respectively. diff --git a/pyproject.toml b/pyproject.toml index dd5f1323..06aceb7c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,3 @@ [tool.black] line-length = 99 -target-version = ['py38', 'py39', 'py310', 'py311'] +target-version = ['py39', 'py310', 'py311'] diff --git a/setup.py b/setup.py index f66abf47..b78c27ec 100644 --- a/setup.py +++ b/setup.py @@ -32,7 +32,9 @@ ], python_requires=">=3.9", packages=("aiocache",), - install_requires=None, + install_requires=[ + "typing-extensions>=4.14.0; python_version<'3.11'", + ], extras_require={ "valkey": ["valkey-glide>=2.0.0"], "memcached": ["aiomcache>=0.5.2"], diff --git a/tests/ut/backends/test_memcached.py b/tests/ut/backends/test_memcached.py index f0de04cb..f1a96d09 100644 --- a/tests/ut/backends/test_memcached.py +++ b/tests/ut/backends/test_memcached.py @@ -3,7 +3,7 @@ import aiomcache import pytest -from aiocache.backends.memcached import MemcachedBackend, MemcachedCache +from aiocache.backends.memcached import MemcachedCache from aiocache.base import BaseCache from aiocache.serializers import JsonSerializer from ...utils import Keys, ensure_key @@ -11,7 +11,7 @@ @pytest.fixture def memcached(): - memcached = MemcachedBackend() + memcached = MemcachedCache() with patch.object(memcached, "client", autospec=True) as m: # Autospec messes up the signature on the decorated methods. for method in ( @@ -25,10 +25,16 @@ def memcached(): yield memcached -class TestMemcachedBackend: +class TestMemcachedCache: + @pytest.fixture + def set_test_namespace(self, memcached_cache): + memcached_cache.namespace = "test" + yield + memcached_cache.namespace = None + def test_setup(self): with patch.object(aiomcache, "Client", autospec=True) as aiomcache_client: - memcached = MemcachedBackend() + memcached = MemcachedCache() aiomcache_client.assert_called_with("127.0.0.1", 11211, pool_size=2) @@ -38,7 +44,7 @@ def test_setup(self): def test_setup_override(self): with patch.object(aiomcache, "Client", autospec=True) as aiomcache_client: - memcached = MemcachedBackend(host="127.0.0.2", port=2, pool_size=10) + memcached = MemcachedCache(host="127.0.0.2", port=2, pool_size=10) aiomcache_client.assert_called_with("127.0.0.2", 2, pool_size=10) @@ -48,7 +54,7 @@ def test_setup_override(self): def test_setup_casts(self): with patch.object(aiomcache, "Client", autospec=True) as aiomcache_client: - memcached = MemcachedBackend(pool_size="10") + memcached = MemcachedCache(pool_size="10") aiomcache_client.assert_called_with("127.0.0.1", 11211, pool_size=10) @@ -227,14 +233,6 @@ async def test_close(self, memcached): await memcached._close() assert memcached.client.close.call_count == 1 - -class TestMemcachedCache: - @pytest.fixture - def set_test_namespace(self, memcached_cache): - memcached_cache.namespace = "test" - yield - memcached_cache.namespace = None - def test_name(self): assert MemcachedCache.NAME == "memcached" diff --git a/tests/ut/backends/test_memory.py b/tests/ut/backends/test_memory.py index 59a8504f..94bd7196 100644 --- a/tests/ut/backends/test_memory.py +++ b/tests/ut/backends/test_memory.py @@ -3,7 +3,7 @@ import pytest -from aiocache.backends.memory import SimpleMemoryBackend, SimpleMemoryCache +from aiocache.backends.memory import SimpleMemoryCache from aiocache.base import BaseCache from aiocache.serializers import NullSerializer from ...utils import Keys @@ -11,12 +11,12 @@ @pytest.fixture def memory(mocker): - memory = SimpleMemoryBackend() + memory = SimpleMemoryCache() mocker.spy(memory, "_cache") return memory -class TestSimpleMemoryBackend: +class TestSimpleMemoryCache: async def test_get(self, memory): await memory._get(Keys.KEY) memory._cache.get.assert_called_with(Keys.KEY) @@ -187,8 +187,6 @@ async def test_redlock_release_nokey(self, memory): memory._cache.get.assert_called_with(Keys.KEY) assert memory._cache.pop.call_count == 0 - -class TestSimpleMemoryCache: def test_name(self): assert SimpleMemoryCache.NAME == "memory" diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index f29171fe..d360886b 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -4,7 +4,7 @@ from glide import Batch, ConditionalChange, ExpirySet, ExpiryType from glide.exceptions import RequestError -from aiocache.backends.valkey import ValkeyBackend, ValkeyCache +from aiocache.backends.valkey import ValkeyCache from aiocache.base import BaseCache from aiocache.serializers import JsonSerializer from ...utils import Keys, ensure_key @@ -12,7 +12,7 @@ @pytest.fixture async def valkey(valkey_config): - async with ValkeyBackend(config=valkey_config) as valkey: + async with ValkeyCache(config=valkey_config) as valkey: with patch.object(valkey, "client", autospec=True) as m: # These methods actually return an awaitable. for method in ( @@ -33,7 +33,13 @@ async def valkey(valkey_config): yield valkey -class TestValkeyBackend: +class TestValkeyCache: + @pytest.fixture + def set_test_namespace(self, valkey_cache): + valkey_cache.namespace = "test" + yield + valkey_cache.namespace = None + async def test_get(self, valkey): valkey.client.get.return_value = b"value" assert await valkey._get(Keys.KEY) == "value" @@ -203,14 +209,6 @@ async def test_redlock_release(self, mocker, valkey): valkey._get.assert_called_once_with(Keys.KEY) valkey.client.delete.assert_called_once_with([Keys.KEY]) - -class TestValkeyCache: - @pytest.fixture - def set_test_namespace(self, valkey_cache): - valkey_cache.namespace = "test" - yield - valkey_cache.namespace = None - def test_name(self): assert ValkeyCache.NAME == "valkey"