From 67f367b48b7639b94d52ab64536aef209564a344 Mon Sep 17 00:00:00 2001 From: Mike O'Connor Date: Sat, 18 Jan 2025 15:44:31 +0000 Subject: [PATCH] Remove aiocache.Cache and alias support (#948) --- README.rst | 2 +- aiocache/__init__.py | 14 +- aiocache/decorators.py | 136 +-------- aiocache/factory.py | 274 ----------------- docs/caches.rst | 13 +- docs/configuration.rst | 26 -- docs/index.rst | 20 +- docs/plugins.rst | 4 +- docs/serializers.rst | 4 +- docs/v1_migration.rst | 15 + examples/alt_key_builder.py | 20 +- examples/cached_alias_config.py | 69 ----- examples/cached_decorator.py | 11 +- examples/frameworks/aiohttp_example.py | 8 +- examples/frameworks/sanic_example.py | 6 +- examples/frameworks/tornado_example.py | 6 +- examples/marshmallow_serializer_class.py | 4 +- examples/multicached_decorator.py | 9 +- examples/optimistic_lock.py | 5 +- examples/plugins.py | 4 +- examples/python_object.py | 4 +- examples/redlock.py | 4 +- examples/serializer_class.py | 4 +- examples/serializer_function.py | 5 +- examples/simple_redis.py | 5 +- tests/acceptance/conftest.py | 23 +- tests/acceptance/test_decorators.py | 50 ++-- tests/acceptance/test_factory.py | 52 ---- tests/performance/conftest.py | 8 +- tests/performance/server.py | 34 ++- tests/ut/conftest.py | 13 - tests/ut/test_decorators.py | 190 +++--------- tests/ut/test_factory.py | 361 ----------------------- 33 files changed, 185 insertions(+), 1218 deletions(-) delete mode 100644 aiocache/factory.py delete mode 100644 docs/configuration.rst create mode 100644 docs/v1_migration.rst delete mode 100644 examples/cached_alias_config.py delete mode 100644 tests/acceptance/test_factory.py delete mode 100644 tests/ut/test_factory.py diff --git a/README.rst b/README.rst index e1c9271b..9b7c65e5 100644 --- a/README.rst +++ b/README.rst @@ -85,7 +85,7 @@ Or as a decorator @cached( - ttl=10, cache=Cache.REDIS, key="key", serializer=PickleSerializer(), port=6379, namespace="main") + cache=RedisCache(), key="key", serializer=PickleSerializer(), port=6379, namespace="main") async def cached_call(): print("Sleeping for three seconds zzzz.....") await asyncio.sleep(3) diff --git a/aiocache/__init__.py b/aiocache/__init__.py index c2b5b765..4b5abe2f 100644 --- a/aiocache/__init__.py +++ b/aiocache/__init__.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Dict, Type +from typing import Any, Type from .backends.memory import SimpleMemoryCache from .base import BaseCache @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) -AIOCACHE_CACHES: Dict[str, Type[BaseCache[Any]]] = {SimpleMemoryCache.NAME: SimpleMemoryCache} +_AIOCACHE_CACHES: list[Type[BaseCache[Any]]] = [SimpleMemoryCache] try: import redis @@ -17,7 +17,7 @@ else: from aiocache.backends.redis import RedisCache - AIOCACHE_CACHES[RedisCache.NAME] = RedisCache + _AIOCACHE_CACHES.append(RedisCache) del redis try: @@ -27,18 +27,14 @@ else: from aiocache.backends.memcached import MemcachedCache - AIOCACHE_CACHES[MemcachedCache.NAME] = MemcachedCache + _AIOCACHE_CACHES.append(MemcachedCache) del aiomcache from .decorators import cached, cached_stampede, multi_cached # noqa: E402,I202 -from .factory import Cache, caches # noqa: E402 - __all__ = ( - "caches", - "Cache", "cached", "cached_stampede", "multi_cached", - *(c.__name__ for c in AIOCACHE_CACHES.values()), + *sorted(c.__name__ for c in _AIOCACHE_CACHES), ) diff --git a/aiocache/decorators.py b/aiocache/decorators.py index d5cdac5d..d2c41b24 100644 --- a/aiocache/decorators.py +++ b/aiocache/decorators.py @@ -4,7 +4,6 @@ import logging from aiocache.base import SENTINEL -from aiocache.factory import Cache, caches from aiocache.lock import RedLock logger = logging.getLogger(__name__) @@ -15,27 +14,8 @@ class cached: Caches the functions return value into a key generated with module_name, function_name and args. The cache is available in the function object as ``.cache``. - In some cases you will need to send more args to configure the cache object. - An example would be endpoint and port for the Redis cache. You can send those args as - kwargs and they will be propagated accordingly. - - Only one cache instance is created per decorated call. If you expect high concurrency of - calls to the same function, you should adapt the pool size as needed. - - Extra args that are injected in the function that you can use to control the cache - behavior are: - - - ``cache_read``: Controls whether the function call will try to read from cache first or - not. Enabled by default. - - ``cache_write``: Controls whether the function call will try to write in the cache once - the result has been retrieved. Enabled by default. - - ``aiocache_wait_for_write``: Controls whether the call of the function will wait for the - value in the cache to be written. If set to False, the write - happens in the background. Enabled by default - + :param cache: cache instance to use when calling the ``set``/``get`` operations. :param ttl: int seconds to store the function call. Default is None which means no expiration. - :param namespace: string to use as default prefix for the key used in all operations of - the backend. Default is an empty string, "". :param key_builder: Callable that allows to build the function dynamically. It receives the function plus same args and kwargs passed to the function. This behavior is necessarily different than ``BaseCache.build_key()`` @@ -43,15 +23,6 @@ class cached: wrapped function and should return `True` if the value should skip the cache (or `False` to store in the cache). e.g. to avoid caching `None` results: `lambda r: r is None` - :param cache: cache class to use when calling the ``set``/``get`` operations. - Default is :class:`aiocache.SimpleMemoryCache`. - :param serializer: serializer instance to use when calling the ``dumps``/``loads``. - If its None, default one from the cache backend is used. - :param plugins: list plugins to use when calling the cmd hooks - Default is pulled from the cache class being used. - :param alias: str specifying the alias to load the config from. If alias is passed, other - config parameters are ignored. Same cache identified by alias is used on every call. If - you need a per function cache, specify the parameters explicitly without using alias. :param noself: bool if you are decorating a class function, by default self is also used to generate the key. This will result in same function calls done by different class instances to use different cache keys. Use noself=True if you want to ignore it. @@ -59,45 +30,20 @@ class cached: def __init__( self, + cache, + *, ttl=SENTINEL, - namespace="", key_builder=None, skip_cache_func=lambda x: False, - cache=Cache.MEMORY, - serializer=None, - plugins=None, - alias=None, noself=False, - **kwargs, ): self.ttl = ttl self.key_builder = key_builder self.skip_cache_func = skip_cache_func self.noself = noself - self.alias = alias - self.cache = None - - self._cache = cache - self._serializer = serializer - self._namespace = namespace - self._plugins = plugins - self._kwargs = kwargs + self.cache = cache def __call__(self, f): - if self.alias: - self.cache = caches.get(self.alias) - for arg in ("serializer", "namespace", "plugins"): - if getattr(self, f'_{arg}', None) is not None: - logger.warning(f"Using cache alias; ignoring {arg!r} argument.") - else: - self.cache = _get_cache( - cache=self._cache, - serializer=self._serializer, - namespace=self._namespace, - plugins=self._plugins, - **self._kwargs, - ) - @functools.wraps(f) async def wrapper(*args, **kwargs): return await self.decorator(f, *args, **kwargs) @@ -163,20 +109,13 @@ class cached_stampede(cached): Caches the functions return value into a key generated with module_name, function_name and args while avoids for cache stampede effects. - In some cases you will need to send more args to configure the cache object. - An example would be endpoint and port for the Redis cache. You can send those args as - kwargs and they will be propagated accordingly. - - Only one cache instance is created per decorated function. If you expect high concurrency - of calls to the same function, you should adapt the pool size as needed. - + :param cache: cache instance to use when calling the ``set``/``get`` operations. + Default is :class:`aiocache.SimpleMemoryCache`. :param lease: int seconds to lock function call to avoid cache stampede effects. If 0 or None, no locking happens (default is 2). redis and memory backends support float ttls :param ttl: int seconds to store the function call. Default is None which means no expiration. :param key_from_attr: str arg or kwarg name from the function to use as a key. - :param namespace: string to use as default prefix for the key used in all operations of - the backend. Default is an empty string, "". :param key_builder: Callable that allows to build the function dynamically. It receives the function plus same args and kwargs passed to the function. This behavior is necessarily different than ``BaseCache.build_key()`` @@ -184,21 +123,13 @@ class cached_stampede(cached): wrapped function and should return `True` if the value should skip the cache (or `False` to store in the cache). e.g. to avoid caching `None` results: `lambda r: r is None` - :param cache: cache class to use when calling the ``set``/``get`` operations. - Default is :class:`aiocache.SimpleMemoryCache`. - :param serializer: serializer instance to use when calling the ``dumps``/``loads``. - Default is JsonSerializer. - :param plugins: list plugins to use when calling the cmd hooks - Default is pulled from the cache class being used. - :param alias: str specifying the alias to load the config from. If alias is passed, - other config parameters are ignored. New cache is created every time. :param noself: bool if you are decorating a class function, by default self is also used to generate the key. This will result in same function calls done by different class instances to use different cache keys. Use noself=True if you want to ignore it. """ - def __init__(self, lease=2, **kwargs): - super().__init__(**kwargs) + def __init__(self, cache, lease=2, **kwargs): + super().__init__(cache, **kwargs) self.lease = lease async def decorator(self, f, *args, **kwargs): @@ -223,10 +154,6 @@ async def decorator(self, f, *args, **kwargs): return result -def _get_cache(cache=Cache.MEMORY, serializer=None, plugins=None, **cache_kwargs): - return Cache(cache, serializer=serializer, plugins=plugins, **cache_kwargs) - - def _get_args_dict(func, args, kwargs): defaults = { arg_name: arg.default @@ -261,9 +188,6 @@ class multi_cached: The cache is available in the function object as ``.cache``. - Only one cache instance is created per decorated function. If you expect high concurrency - of calls to the same function, you should adapt the pool size as needed. - Extra args that are injected in the function that you can use to control the cache behavior are: @@ -275,10 +199,9 @@ class multi_cached: value in the cache to be written. If set to False, the write happens in the background. Enabled by default + :param cache: cache instance to use when calling the ``multi_set``/``multi_get`` operations. :param keys_from_attr: name of the arg or kwarg in the decorated callable that contains an iterable that yields the keys returned by the decorated callable. - :param namespace: string to use as default prefix for the key used in all operations of - the backend. Default is an empty string, "". :param key_builder: Callable that enables mapping the decorated function's keys to the keys used by the cache. Receives a key from the iterable corresponding to ``keys_from_attr``, the decorated callable, and the positional and keyword arguments @@ -288,59 +211,24 @@ class multi_cached: if that key-value pair should not be cached (or False to store in cache). The keys and values to be passed are taken from the wrapped function result. :param ttl: int seconds to store the keys. Default is 0 which means no expiration. - :param cache: cache class to use when calling the ``multi_set``/``multi_get`` operations. - Default is :class:`aiocache.SimpleMemoryCache`. - :param serializer: serializer instance to use when calling the ``dumps``/``loads``. - If its None, default one from the cache backend is used. - :param plugins: plugins to use when calling the cmd hooks - Default is pulled from the cache class being used. - :param alias: str specifying the alias to load the config from. If alias is passed, - other config parameters are ignored. Same cache identified by alias is used on - every call. If you need a per function cache, specify the parameters explicitly - without using alias. """ def __init__( self, + cache=None, + *, keys_from_attr, - namespace="", key_builder=None, skip_cache_func=lambda k, v: False, ttl=SENTINEL, - cache=Cache.MEMORY, - serializer=None, - plugins=None, - alias=None, - **kwargs, ): + self.cache = cache self.keys_from_attr = keys_from_attr self.key_builder = key_builder or (lambda key, f, *args, **kwargs: key) self.skip_cache_func = skip_cache_func self.ttl = ttl - self.alias = alias - self.cache = None - - self._cache = cache - self._serializer = serializer - self._namespace = namespace - self._plugins = plugins - self._kwargs = kwargs def __call__(self, f): - if self.alias: - self.cache = caches.get(self.alias) - for arg in ("serializer", "namespace", "plugins"): - if getattr(self, f'_{arg}', None) is not None: - logger.warning(f"Using cache alias; ignoring {arg!r} argument.") - else: - self.cache = _get_cache( - cache=self._cache, - serializer=self._serializer, - namespace=self._namespace, - plugins=self._plugins, - **self._kwargs, - ) - @functools.wraps(f) async def wrapper(*args, **kwargs): return await self.decorator(f, *args, **kwargs) diff --git a/aiocache/factory.py b/aiocache/factory.py deleted file mode 100644 index 1a4346a4..00000000 --- a/aiocache/factory.py +++ /dev/null @@ -1,274 +0,0 @@ -import logging -import urllib -from contextlib import suppress -from copy import deepcopy -from typing import Dict - -from aiocache import AIOCACHE_CACHES -from aiocache.base import BaseCache -from aiocache.exceptions import InvalidCacheType - -with suppress(ImportError): - import redis.asyncio as redis - - -logger = logging.getLogger(__name__) - - -def _class_from_string(class_path): - class_name = class_path.split(".")[-1] - module_name = class_path.rstrip(class_name).rstrip(".") - return getattr(__import__(module_name, fromlist=[class_name]), class_name) - - -def _create_cache(cache, serializer=None, plugins=None, **kwargs): - kwargs = deepcopy(kwargs) - if serializer is not None: - cls = serializer.pop("class") - cls = _class_from_string(cls) if isinstance(cls, str) else cls - serializer = cls(**serializer) - - plugins_instances = [] - if plugins is not None: - for plugin in plugins: - cls = plugin.pop("class") - cls = _class_from_string(cls) if isinstance(cls, str) else cls - plugins_instances.append(cls(**plugin)) - cache = _class_from_string(cache) if isinstance(cache, str) else cache - if cache == AIOCACHE_CACHES.get("redis"): - return cache( - serializer=serializer, - plugins=plugins_instances, - namespace=kwargs.pop('namespace', ''), - ttl=kwargs.pop('ttl', None), - client=redis.Redis(**kwargs) - ) - else: - return cache(serializer=serializer, plugins=plugins_instances, **kwargs) - - -class Cache: - """ - This class is just a proxy to the specific cache implementations like - :class:`aiocache.SimpleMemoryCache`, :class:`aiocache.RedisCache` and - :class:`aiocache.MemcachedCache`. It is the preferred method of - instantiating new caches over using the backend specific classes. - - You can instatiate a new one using the ``cache_type`` attribute like: - - >>> from aiocache import Cache - >>> Cache(Cache.REDIS) - RedisCache (127.0.0.1:6379) - - If you don't specify anything, ``Cache.MEMORY`` is used. - - Only ``Cache.MEMORY``, ``Cache.REDIS`` and ``Cache.MEMCACHED`` types - are allowed. If the type passed is invalid, it will raise a - :class:`aiocache.exceptions.InvalidCacheType` exception. - """ - - MEMORY = AIOCACHE_CACHES["memory"] - REDIS = AIOCACHE_CACHES.get("redis") - MEMCACHED = AIOCACHE_CACHES.get("memcached") - - def __new__(cls, cache_class=MEMORY, **kwargs): - if not issubclass(cache_class, BaseCache): - raise InvalidCacheType( - "Invalid cache type, you can only use {}".format(list(AIOCACHE_CACHES.keys())) - ) - instance = cache_class.__new__(cache_class, **kwargs) - instance.__init__(**kwargs) - return instance - - @classmethod - def _get_cache_class(cls, scheme): - return AIOCACHE_CACHES[scheme] - - @classmethod - def get_scheme_class(cls, scheme): - try: - return cls._get_cache_class(scheme) - except KeyError as e: - raise InvalidCacheType( - "Invalid cache type, you can only use {}".format(list(AIOCACHE_CACHES.keys())) - ) from e - - @classmethod - def from_url(cls, url): - """ - Given a resource uri, return an instance of that cache initialized with the given - parameters. An example usage: - - >>> from aiocache import Cache - >>> Cache.from_url('memory://') - - - a more advanced usage using queryparams to configure the cache: - - >>> from aiocache import Cache - >>> cache = Cache.from_url('redis://localhost:10/1?pool_max_size=1') - >>> cache - RedisCache (localhost:10) - >>> cache.db - 1 - >>> cache.pool_max_size - 1 - - :param url: string identifying the resource uri of the cache to connect to - """ - parsed_url = urllib.parse.urlparse(url) - kwargs = dict(urllib.parse.parse_qsl(parsed_url.query)) - cache_class = Cache.get_scheme_class(parsed_url.scheme) - - if parsed_url.path: - kwargs.update(cache_class.parse_uri_path(parsed_url.path)) - - if parsed_url.hostname: - kwargs["host"] = parsed_url.hostname - - if parsed_url.port: - kwargs["port"] = parsed_url.port - - if parsed_url.password: - kwargs["password"] = parsed_url.password - - for arg in ['max_connections', 'socket_connect_timeout']: - if arg in kwargs: - kwargs[arg] = int(kwargs[arg]) - if cache_class == cls.REDIS: - return Cache(cache_class, client=redis.Redis(**kwargs)) - else: - return Cache(cache_class, **kwargs) - - -class CacheHandler: - - _config: Dict[str, Dict[str, object]] = { - "default": { - "cache": "aiocache.SimpleMemoryCache", - "serializer": {"class": "aiocache.serializers.StringSerializer"}, - } - } - - def __init__(self): - self._caches = {} - - def add(self, alias: str, config: Dict[str, object]) -> None: - """ - Add a cache to the current config. If the key already exists, it - will overwrite it:: - - >>> caches.add('default', { - 'cache': "aiocache.SimpleMemoryCache", - 'serializer': { - 'class': "aiocache.serializers.StringSerializer" - } - }) - - :param alias: The alias for the cache - :param config: Mapping containing the cache configuration - """ - self._config[alias] = config - - def get(self, alias: str) -> object: - """ - Retrieve cache identified by alias. Will return always the same instance - - If the cache was not instantiated yet, it will do it lazily the first time - this is called. - - :param alias: str cache alias - :return: cache instance - """ - try: - return self._caches[alias] - except KeyError: - pass - - config = self.get_alias_config(alias) - cache = _create_cache(**deepcopy(config)) - self._caches[alias] = cache - return cache - - def create(self, alias, **kwargs): - """Create a new cache. - - You can use kwargs to pass extra parameters to configure the cache. - - :param alias: alias to pull configuration from - :return: New cache instance - """ - config = self.get_alias_config(alias) - # TODO(PY39): **config | kwargs - return _create_cache(**{**config, **kwargs}) - - def get_alias_config(self, alias): - config = self.get_config() - if alias not in config: - raise KeyError( - "Could not find config for '{0}', ensure you include {0} when calling" - "caches.set_config specifying the config for that cache".format(alias) - ) - - return config[alias] - - def get_config(self): - """ - Return copy of current stored config - """ - return deepcopy(self._config) - - def set_config(self, config): - """ - Set (override) the default config for cache aliases from a dict-like structure. - The structure is the following:: - - { - 'default': { - 'cache': "aiocache.SimpleMemoryCache", - 'serializer': { - 'class': "aiocache.serializers.StringSerializer" - } - }, - 'redis_alt': { - 'cache': "aiocache.RedisCache", - 'host': "127.0.0.10", - 'port': 6378, - 'serializer': { - 'class': "aiocache.serializers.PickleSerializer" - }, - 'plugins': [ - {'class': "aiocache.plugins.HitMissRatioPlugin"}, - {'class': "aiocache.plugins.TimingPlugin"} - ] - } - } - - 'default' key must always exist when passing a new config. Default configuration - is:: - - { - 'default': { - 'cache': "aiocache.SimpleMemoryCache", - 'serializer': { - 'class': "aiocache.serializers.StringSerializer" - } - } - } - - You can set your own classes there. - The class params accept both str and class types. - - All keys in the config are optional, if they are not passed the defaults - for the specified class will be used. - - If a config key already exists, it will be updated with the new values. - """ - if "default" not in config: - raise ValueError("default config must be provided") - for config_name in config.keys(): - self._caches.pop(config_name, None) - self._config = config - - -caches = CacheHandler() diff --git a/docs/caches.rst b/docs/caches.rst index 3ce6bdf4..82133a3d 100644 --- a/docs/caches.rst +++ b/docs/caches.rst @@ -49,21 +49,12 @@ BaseCache :members: -.. _cache: - -Cache ------ - -.. autoclass:: aiocache.Cache - :members: - - .. _rediscache: RedisCache ---------- -.. autoclass:: aiocache.RedisCache +.. autoclass:: aiocache.backends.redis.RedisCache :members: @@ -81,5 +72,5 @@ SimpleMemoryCache MemcachedCache -------------- -.. autoclass:: aiocache.MemcachedCache +.. autoclass:: aiocache.backends.memcached.MemcachedCache :members: diff --git a/docs/configuration.rst b/docs/configuration.rst deleted file mode 100644 index f8dc3b84..00000000 --- a/docs/configuration.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. _configuration: - -Configuration -============= - -The caches module allows to setup cache configurations and then use them either using an alias or retrieving the config explicitly. To set the config, call ``caches.set_config``: - -.. automethod:: aiocache.caches.set_config - -To retrieve a copy of the current config, you can use ``caches.get_config`` or ``caches.get_alias_config`` for an alias config. - - -Next snippet shows an example usage: - -.. literalinclude:: ../examples/cached_alias_config.py - :language: python - :linenos: - :emphasize-lines: 6-26 - -When you do ``caches.get('alias_name')``, the cache instance is built lazily the first time. Next accesses will return the **same** instance. If instead of reusing the same instance, you need a new one every time, use ``caches.create('alias_name')``. One of the advantages of ``caches.create`` is that it accepts extra args that then are passed to the cache constructor. This way you can override args like namespace, endpoint, etc. - -.. automethod:: aiocache.caches.add - -.. automethod:: aiocache.caches.get - -.. automethod:: aiocache.caches.create diff --git a/docs/index.rst b/docs/index.rst index 5966e1a9..323fd4e1 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -24,15 +24,16 @@ Using a cache is as simple as .. code-block:: python >>> import asyncio - >>> from aiocache import Cache - >>> cache = Cache() + >>> from aiocache import SimpleMemoryCache + >>> cache = SimpleMemoryCache() >>> with asyncio.Runner() as runner: >>> runner.run(cache.set("key", "value")) True >>> runner.run(cache.get("key")) 'value' -Here we are using the :ref:`simplememorycache` but you can use any other listed in :ref:`caches`. All caches contain the same minimum interface which consists on the following functions: +Here we are using the :ref:`simplememorycache` but you can use any other supported backends as listed in :ref:`caches`. +All caches contain the same minimum interface which consists of the following functions: - ``add``: Only adds key/value if key does not exist. Otherwise raises ValueError. - ``get``: Retrieve value identified by key. @@ -45,16 +46,7 @@ Here we are using the :ref:`simplememorycache` but you can use any other listed - ``clear``: Clears the items stored. - ``raw``: Executes the specified command using the underlying client. - -You can also setup cache aliases like in Django settings: - -.. literalinclude:: ../examples/cached_alias_config.py - :language: python - :linenos: - :emphasize-lines: 6-26 - - -In `examples folder `_ you can check different use cases: +See the `examples folder `_ for different use cases: - `Sanic, Aiohttp and Tornado `_ - `Python object in Redis `_ @@ -73,10 +65,10 @@ Contents caches serializers plugins - configuration decorators locking testing + v1_migration Indices and tables ================== diff --git a/docs/plugins.rst b/docs/plugins.rst index 180f1018..c433007b 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -5,9 +5,9 @@ Plugins Plugins can be used to enrich the behavior of the cache. By default all caches are configured without any plugin but can add new ones in the constructor or after initializing the cache class:: - >>> from aiocache import Cache + >>> from aiocache import SimpleMemoryCache >>> from aiocache.plugins import TimingPlugin - cache = Cache(plugins=[HitMissRatioPlugin()]) + cache = SimpleMemoryCache(plugins=[HitMissRatioPlugin()]) cache.plugins += [TimingPlugin()] You can define your custom plugin by inheriting from `BasePlugin`_ and overriding the needed methods (the overrides NEED to be async). All commands have ``pre_`` and ``post_`` hooks. diff --git a/docs/serializers.rst b/docs/serializers.rst index 76963d4a..b68b20f6 100644 --- a/docs/serializers.rst +++ b/docs/serializers.rst @@ -7,9 +7,9 @@ Serializers can be attached to backends in order to serialize/deserialize data s To use a specific serializer:: - >>> from aiocache import Cache + >>> from aiocache import SimpleMemoryCache >>> from aiocache.serializers import PickleSerializer - cache = Cache(Cache.MEMORY, serializer=PickleSerializer()) + cache = SimpleMemoryCache(serializer=PickleSerializer()) Currently the following are built in: diff --git a/docs/v1_migration.rst b/docs/v1_migration.rst new file mode 100644 index 00000000..f787ce2b --- /dev/null +++ b/docs/v1_migration.rst @@ -0,0 +1,15 @@ +.. _v1_migration: + +Migrating from v0.x to v1 +====== + +The v1 release of aiocache is a major release that introduces several breaking changes. + +Changes to Cache Instantiation +--------- + +The abstraction and factories around cache instantiation have been removed in favor of a more direct approach. + +* The `aiocache.Cache` class has been removed. Instead, use the specific cache class directly. For example, use `aiocache.RedisCache` instead of `aiocache.Cache.REDIS`. +* Caches should be fully instantiated when passed to decorators, rather than being instantiated with a factory function. +* Cache aliases have been removed. Create an instance of the cache class directly instead. diff --git a/examples/alt_key_builder.py b/examples/alt_key_builder.py index de13a9ea..b71a3f2d 100644 --- a/examples/alt_key_builder.py +++ b/examples/alt_key_builder.py @@ -47,7 +47,7 @@ import asyncio from typing import List, Dict -from aiocache import Cache, cached +from aiocache import cached, SimpleMemoryCache async def demo_key_builders(): @@ -82,7 +82,7 @@ def fixed_key(key, namespace): async def demo_cache_key_builders(namespace=None): """Demonstrate usage and behavior of the custom key_builder functions""" cache_ns = "cache_namespace" - async with Cache(Cache.MEMORY, key_builder=ensure_no_spaces, namespace=cache_ns) as cache: + async with SimpleMemoryCache(key_builder=ensure_no_spaces, namespace=cache_ns) as cache: raw_key = "Key With Unwanted Spaces" return_value = 42 await cache.add(raw_key, return_value, namespace=namespace) @@ -102,7 +102,7 @@ async def demo_cache_key_builders(namespace=None): assert cached_value == return_value await cache.delete(raw_key, namespace=namespace) - async with Cache(Cache.MEMORY, key_builder=bytes_key) as cache: + async with SimpleMemoryCache(key_builder=bytes_key) as cache: raw_key = "string-key" return_value = 42 await cache.add(raw_key, return_value, namespace=namespace) @@ -114,7 +114,7 @@ async def demo_cache_key_builders(namespace=None): assert cached_value == return_value await cache.delete(raw_key, namespace=namespace) - async with Cache(Cache.MEMORY, key_builder=fixed_key) as cache: + async with SimpleMemoryCache(key_builder=fixed_key) as cache: unchanging_key = "universal key" for raw_key, return_value in zip( @@ -202,7 +202,7 @@ async def demo_decorator_key_builders(): async def demo_ignore_kwargs_decorator(): """Cache key from positional arguments in call to decorated function""" - @cached(key_builder=ignore_kwargs) + @cached(SimpleMemoryCache(), key_builder=ignore_kwargs) async def fn(a, b=2, c=3): return (a, b) @@ -221,7 +221,7 @@ async def fn(a, b=2, c=3): await fn(*args, **kwargs) cache = fn.cache - decorator = cached(key_builder=ignore_kwargs) + decorator = cached(SimpleMemoryCache(), key_builder=ignore_kwargs) key = decorator.get_cache_key(fn, args=args, kwargs=kwargs) exists = await cache.exists(key) assert exists is True @@ -241,7 +241,7 @@ async def fn(a, b=2, c=3): async def demo_module_override_decorator(): """Cache key uses custom module name for decorated function""" - @cached(key_builder=module_override) + @cached(SimpleMemoryCache(), key_builder=module_override) async def fn(a, b=2, c=3): return (a, b) @@ -252,7 +252,7 @@ async def fn(a, b=2, c=3): await fn(*args, **kwargs) cache = fn.cache - decorator = cached(key_builder=module_override) + decorator = cached(SimpleMemoryCache, key_builder=module_override) key = decorator.get_cache_key(fn, args=args, kwargs=kwargs) exists = await cache.exists(key) assert exists is True @@ -264,7 +264,7 @@ async def fn(a, b=2, c=3): async def demo_structured_key_decorator(): """Cache key expresses structure of decorated function call""" - @cached(key_builder=structured_key) + @cached(SimpleMemoryCache(), key_builder=structured_key) async def fn(a, b=2, c=3): return (a, b) @@ -278,7 +278,7 @@ async def fn(a, b=2, c=3): await fn(*args, **kwargs) cache = fn.cache - decorator = cached(key_builder=structured_key) + decorator = cached(SimpleMemoryCache(), key_builder=structured_key) key = decorator.get_cache_key(fn, args=args, kwargs=kwargs) exists = await cache.exists(key) assert exists is True diff --git a/examples/cached_alias_config.py b/examples/cached_alias_config.py deleted file mode 100644 index 27aea69b..00000000 --- a/examples/cached_alias_config.py +++ /dev/null @@ -1,69 +0,0 @@ -import asyncio - -import redis.asyncio as redis - -from aiocache import caches, Cache -from aiocache.serializers import StringSerializer, PickleSerializer - -caches.set_config({ - 'default': { - 'cache': "aiocache.SimpleMemoryCache", - 'serializer': { - 'class': "aiocache.serializers.StringSerializer" - } - }, - 'redis_alt': { - 'cache': "aiocache.RedisCache", - "host": "127.0.0.1", - 'port': 6379, - "socket_connect_timeout": 1, - 'serializer': { - 'class': "aiocache.serializers.PickleSerializer" - }, - 'plugins': [ - {'class': "aiocache.plugins.HitMissRatioPlugin"}, - {'class': "aiocache.plugins.TimingPlugin"} - ] - } -}) - - -async def default_cache(): - cache = caches.get('default') # This always returns the same instance - await cache.set("key", "value") - - assert await cache.get("key") == "value" - assert isinstance(cache, Cache.MEMORY) - assert isinstance(cache.serializer, StringSerializer) - - -async def alt_cache(): - # This generates a new instance every time! You can also use - # `caches.create("alt", namespace="test", etc...)` to override extra args - cache = caches.create("redis_alt") - await cache.set("key", "value") - - assert await cache.get("key") == "value" - assert isinstance(cache, Cache.REDIS) - assert isinstance(cache.serializer, PickleSerializer) - assert len(cache.plugins) == 2 - connection_args = cache.client.connection_pool.connection_kwargs - assert connection_args["host"] == "127.0.0.1" - assert connection_args["socket_connect_timeout"] == 1 - assert connection_args["port"] == 6379 - await cache.close() - - -async def test_alias(): - await default_cache() - await alt_cache() - - cache = Cache(Cache.REDIS, client=redis.Redis()) - await cache.delete("key") - await cache.close() - - await caches.get("default").close() - - -if __name__ == "__main__": - asyncio.run(test_alias()) diff --git a/examples/cached_decorator.py b/examples/cached_decorator.py index 78d1cb11..f5b9f02f 100644 --- a/examples/cached_decorator.py +++ b/examples/cached_decorator.py @@ -3,21 +3,22 @@ from collections import namedtuple import redis.asyncio as redis -from aiocache import cached, Cache +from aiocache import cached +from aiocache import RedisCache from aiocache.serializers import PickleSerializer Result = namedtuple('Result', "content, status") +cache = RedisCache(namespace="main", client=redis.Redis(), serializer=PickleSerializer()) -@cached( - ttl=10, cache=Cache.REDIS, key_builder=lambda *args, **kw: "key", - serializer=PickleSerializer(), namespace="main", client=redis.Redis()) + +@cached(cache, ttl=10, key_builder=lambda *args, **kw: "key") async def cached_call(): return Result("content", 200) async def test_cached(): - async with Cache(Cache.REDIS, namespace="main", client=redis.Redis()) as cache: + async with cache: await cached_call() exists = await cache.exists("key") assert exists is True diff --git a/examples/frameworks/aiohttp_example.py b/examples/frameworks/aiohttp_example.py index 7220c711..4a439bfa 100644 --- a/examples/frameworks/aiohttp_example.py +++ b/examples/frameworks/aiohttp_example.py @@ -2,11 +2,13 @@ import logging from datetime import datetime from aiohttp import web -from aiocache import cached +from aiocache import cached, SimpleMemoryCache from aiocache.serializers import JsonSerializer +cache = SimpleMemoryCache(serializer=JsonSerializer()) -@cached(key="function_key", serializer=JsonSerializer()) + +@cached(cache, key_builder=lambda x: "time") async def time(): return {"time": datetime.now().isoformat()} @@ -38,7 +40,7 @@ async def get_from_cache(self, key): return None -@CachedOverride(key="route_key", serializer=JsonSerializer()) +@CachedOverride(cache, key_builder="route") async def handle2(request): return web.json_response(await asyncio.sleep(3)) diff --git a/examples/frameworks/sanic_example.py b/examples/frameworks/sanic_example.py index 91451880..2fec3a38 100644 --- a/examples/frameworks/sanic_example.py +++ b/examples/frameworks/sanic_example.py @@ -10,13 +10,13 @@ from sanic import Sanic from sanic.response import json from sanic.log import logger -from aiocache import cached, Cache +from aiocache import cached, SimpleMemoryCache from aiocache.serializers import JsonSerializer app = Sanic(__name__) -@cached(key="my_custom_key", serializer=JsonSerializer()) +@cached(SimpleMemoryCache(), key_builder=lambda x: "my_custom_key") async def expensive_call(): logger.info("Expensive has been called") await asyncio.sleep(3) @@ -24,7 +24,7 @@ async def expensive_call(): async def reuse_data(): - cache = Cache(serializer=JsonSerializer()) # Not ideal to define here + cache = SimpleMemoryCache(serializer=JsonSerializer()) # Not ideal to define here data = await cache.get("my_custom_key") # Note the key is defined in `cached` decorator return data diff --git a/examples/frameworks/tornado_example.py b/examples/frameworks/tornado_example.py index 83bbb7b2..e7127c29 100644 --- a/examples/frameworks/tornado_example.py +++ b/examples/frameworks/tornado_example.py @@ -1,15 +1,15 @@ import tornado.web import tornado.ioloop from datetime import datetime -from aiocache import cached +from aiocache import cached, SimpleMemoryCache from aiocache.serializers import JsonSerializer class MainHandler(tornado.web.RequestHandler): - # Due some incompatibilities between tornado and asyncio, caches can't use the "timeout" feature + # Due some incompatibilities between tornado and asyncio, caches can't use the "ttl" feature # in order to make it work, you will have to specify it always to 0 - @cached(key="my_custom_key", serializer=JsonSerializer(), timeout=0) + @cached(SimpleMemoryCache(serializer=JsonSerializer, timeout=0), key_builder=lambda x: "my_custom_key") async def time(self): return {"time": datetime.now().isoformat()} diff --git a/examples/marshmallow_serializer_class.py b/examples/marshmallow_serializer_class.py index f45a2ed7..47d77c43 100644 --- a/examples/marshmallow_serializer_class.py +++ b/examples/marshmallow_serializer_class.py @@ -5,7 +5,7 @@ from marshmallow import fields, Schema, post_load -from aiocache import Cache +from aiocache import SimpleMemoryCache from aiocache.serializers import BaseSerializer @@ -48,7 +48,7 @@ def loads(self, value: str) -> Any: return self.schema.loads(value) -cache = Cache(serializer=MarshmallowSerializer(), namespace="main") +cache = SimpleMemoryCache(serializer=MarshmallowSerializer(), namespace="main") async def serializer(): diff --git a/examples/multicached_decorator.py b/examples/multicached_decorator.py index 59c0db80..5fd83494 100644 --- a/examples/multicached_decorator.py +++ b/examples/multicached_decorator.py @@ -2,7 +2,8 @@ import redis.asyncio as redis -from aiocache import multi_cached, Cache +from aiocache import multi_cached +from aiocache import RedisCache DICT = { 'a': "Z", @@ -11,15 +12,15 @@ 'd': "W" } -cache = Cache(Cache.REDIS, namespace="main", client=redis.Redis()) +cache = RedisCache(namespace="main", client=redis.Redis()) -@multi_cached("ids", cache=Cache.REDIS, namespace="main", client=cache.client) +@multi_cached(cache, keys_from_attr="ids") async def multi_cached_ids(ids=None): return {id_: DICT[id_] for id_ in ids} -@multi_cached("keys", cache=Cache.REDIS, namespace="main", client=cache.client) +@multi_cached(cache, keys_from_attr="keys") async def multi_cached_keys(keys=None): return {id_: DICT[id_] for id_ in keys} diff --git a/examples/optimistic_lock.py b/examples/optimistic_lock.py index 422973e4..8b62f917 100644 --- a/examples/optimistic_lock.py +++ b/examples/optimistic_lock.py @@ -3,12 +3,11 @@ import random import redis.asyncio as redis - -from aiocache import Cache +from aiocache import RedisCache from aiocache.lock import OptimisticLock, OptimisticLockError logger = logging.getLogger(__name__) -cache = Cache(Cache.REDIS, namespace="main", client=redis.Redis()) +cache = RedisCache(namespace="main", client=redis.Redis()) async def expensive_function(): diff --git a/examples/plugins.py b/examples/plugins.py index b870d82b..430d364f 100644 --- a/examples/plugins.py +++ b/examples/plugins.py @@ -2,7 +2,7 @@ import random import logging -from aiocache import Cache +from aiocache import SimpleMemoryCache from aiocache.plugins import HitMissRatioPlugin, TimingPlugin, BasePlugin @@ -18,7 +18,7 @@ async def post_set(self, *args, **kwargs): logger.info("I'm the post_set hook being called with %s %s" % (args, kwargs)) -cache = Cache( +cache = SimpleMemoryCache( plugins=[HitMissRatioPlugin(), TimingPlugin(), MyCustomPlugin()], namespace="main") diff --git a/examples/python_object.py b/examples/python_object.py index 984fad4c..881b69c4 100644 --- a/examples/python_object.py +++ b/examples/python_object.py @@ -4,11 +4,11 @@ import redis.asyncio as redis -from aiocache import Cache +from aiocache import RedisCache from aiocache.serializers import PickleSerializer MyObject = namedtuple("MyObject", ["x", "y"]) -cache = Cache(Cache.REDIS, serializer=PickleSerializer(), namespace="main", client=redis.Redis()) +cache = RedisCache(serializer=PickleSerializer(), namespace="main", client=redis.Redis()) async def complex_object(): diff --git a/examples/redlock.py b/examples/redlock.py index 38d703d7..51cbbd73 100644 --- a/examples/redlock.py +++ b/examples/redlock.py @@ -3,11 +3,11 @@ import redis.asyncio as redis -from aiocache import Cache +from aiocache import RedisCache from aiocache.lock import RedLock logger = logging.getLogger(__name__) -cache = Cache(Cache.REDIS, namespace="main", client=redis.Redis()) +cache = RedisCache(namespace="main", client=redis.Redis()) async def expensive_function(): diff --git a/examples/serializer_class.py b/examples/serializer_class.py index a9154843..2c25ff60 100644 --- a/examples/serializer_class.py +++ b/examples/serializer_class.py @@ -3,7 +3,7 @@ import redis.asyncio as redis -from aiocache import Cache +from aiocache import RedisCache from aiocache.serializers import BaseSerializer @@ -27,7 +27,7 @@ def loads(self, value): return decompressed -cache = Cache(Cache.REDIS, serializer=CompressionSerializer(), namespace="main", client=redis.Redis()) +cache = RedisCache(serializer=CompressionSerializer(), namespace="main", client=redis.Redis()) async def serializer(): diff --git a/examples/serializer_function.py b/examples/serializer_function.py index 05c5ba04..d85b3eb9 100644 --- a/examples/serializer_function.py +++ b/examples/serializer_function.py @@ -4,7 +4,8 @@ import redis.asyncio as redis from marshmallow import Schema, fields, post_load -from aiocache import Cache + +from aiocache import RedisCache class MyType: @@ -30,7 +31,7 @@ def loads(value): return MyTypeSchema().loads(value) -cache = Cache(Cache.REDIS, namespace="main", client=redis.Redis()) +cache = RedisCache(namespace="main", client=redis.Redis()) async def serializer_function(): diff --git a/examples/simple_redis.py b/examples/simple_redis.py index 1f429623..6d5553d8 100644 --- a/examples/simple_redis.py +++ b/examples/simple_redis.py @@ -1,10 +1,11 @@ import asyncio -from aiocache import Cache import redis.asyncio as redis -cache = Cache(Cache.REDIS, namespace="main", client=redis.Redis()) +from aiocache import RedisCache + +cache = RedisCache(namespace="main", client=redis.Redis()) async def redis(): diff --git a/tests/acceptance/conftest.py b/tests/acceptance/conftest.py index 0d5a306f..28224293 100644 --- a/tests/acceptance/conftest.py +++ b/tests/acceptance/conftest.py @@ -2,40 +2,29 @@ import pytest -from aiocache import Cache, caches from ..utils import KEY_LOCK, Keys -@pytest.fixture(autouse=True) -def reset_caches(): - caches._caches = {} - caches.set_config( - { - "default": { - "cache": "aiocache.SimpleMemoryCache", - "serializer": {"class": "aiocache.serializers.NullSerializer"}, - } - } - ) - - @pytest.fixture async def redis_cache(redis_client): - async with Cache(Cache.REDIS, namespace="test", client=redis_client) as cache: + from aiocache.backends.redis import RedisCache + async with RedisCache(namespace="test", client=redis_client) as cache: yield cache await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) @pytest.fixture async def memory_cache(): - async with Cache(namespace="test") as cache: + from aiocache.backends.memory import SimpleMemoryCache + async with SimpleMemoryCache(namespace="test") as cache: yield cache await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) @pytest.fixture async def memcached_cache(): - async with Cache(Cache.MEMCACHED, namespace="test") as cache: + from aiocache.backends.memcached import MemcachedCache + async with MemcachedCache(namespace="test") as cache: yield cache await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) diff --git a/tests/acceptance/test_decorators.py b/tests/acceptance/test_decorators.py index ad99aca7..23b4a92a 100644 --- a/tests/acceptance/test_decorators.py +++ b/tests/acceptance/test_decorators.py @@ -21,12 +21,8 @@ async def stub(arg: float, seconds: int = 0) -> str: class TestCached: - @pytest.fixture(autouse=True) - def default_cache(self, mocker, cache): - mocker.patch("aiocache.decorators._get_cache", autospec=True, return_value=cache) - async def test_cached_ttl(self, cache): - @cached(ttl=2, key_builder=lambda *args, **kw: Keys.KEY) + @cached(cache=cache, ttl=2, key_builder=lambda *args, **kw: Keys.KEY) async def fn(): return str(random.randint(1, 50)) @@ -41,7 +37,7 @@ async def test_cached_key_builder(self, cache): def build_key(f, self, a, b): return "{}_{}_{}_{}".format(self, f.__name__, a, b) - @cached(key_builder=build_key) + @cached(cache=cache, key_builder=build_key) async def fn(self, a, b=2): return "1" @@ -50,7 +46,7 @@ async def fn(self, a, b=2): @pytest.mark.parametrize("decorator", (cached, cached_stampede)) async def test_cached_skip_cache_func(self, cache, decorator): - @decorator(skip_cache_func=lambda r: r is None) + @decorator(cache=cache, skip_cache_func=lambda r: r is None) async def sk_func(x): return x if x > 0 else None @@ -58,7 +54,7 @@ async def sk_func(x): res = await sk_func(arg) assert res - key = decorator().get_cache_key(sk_func, args=(1,), kwargs={}) + key = decorator(cache=cache).get_cache_key(sk_func, args=(1,), kwargs={}) assert key assert await cache.exists(key) @@ -68,45 +64,45 @@ async def sk_func(x): await sk_func(arg) - key = decorator().get_cache_key(sk_func, args=(-1,), kwargs={}) + key = decorator(cache=cache).get_cache_key(sk_func, args=(-1,), kwargs={}) assert key assert not await cache.exists(key) async def test_cached_without_namespace(self, cache): """Default cache key is created when no namespace is provided""" - @cached(namespace=None) + cache.namespace = None + + @cached(cache=cache) async def fn(): return "1" await fn() - decorator = cached(namespace=None) + decorator = cached(cache=cache) key = decorator.get_cache_key(fn, args=(), kwargs={}) assert await cache.exists(key, namespace=None) is True async def test_cached_with_namespace(self, cache): """Cache key is prefixed with provided namespace""" key_prefix = "test" + cache.namespace = key_prefix - @cached(namespace=key_prefix) + @cached(cache=cache) async def ns_fn(): return "1" await ns_fn() - decorator = cached(namespace=key_prefix) + decorator = cached(cache=cache) key = decorator.get_cache_key(ns_fn, args=(), kwargs={}) assert await cache.exists(key, namespace=key_prefix) is True class TestCachedStampede: - @pytest.fixture(autouse=True) - def default_cache(self, mocker, cache): - mocker.patch("aiocache.decorators._get_cache", autospec=True, return_value=cache) async def test_cached_stampede(self, mocker, cache): mocker.spy(cache, "get") mocker.spy(cache, "set") - decorator = cached_stampede(ttl=10, lease=3) + decorator = cached_stampede(cache=cache, ttl=10, lease=3) await asyncio.gather(decorator(stub)(0.5), decorator(stub)(0.5)) @@ -119,7 +115,7 @@ async def test_cached_stampede(self, mocker, cache): async def test_locking_dogpile_lease_expiration(self, mocker, cache): mocker.spy(cache, "get") mocker.spy(cache, "set") - decorator = cached_stampede(ttl=10, lease=3) + decorator = cached_stampede(cache=cache, ttl=10, lease=3) await asyncio.gather( decorator(stub)(1, seconds=1), @@ -131,7 +127,7 @@ async def test_locking_dogpile_lease_expiration(self, mocker, cache): assert cache.set.call_count == 3 async def test_locking_dogpile_task_cancellation(self, cache): - @cached_stampede() + @cached_stampede(cache=cache) async def cancel_task(): raise asyncio.CancelledError() @@ -140,12 +136,8 @@ async def cancel_task(): class TestMultiCachedDecorator: - @pytest.fixture(autouse=True) - def default_cache(self, mocker, cache): - mocker.patch("aiocache.decorators._get_cache", autospec=True, return_value=cache) - async def test_multi_cached(self, cache): - multi_cached_decorator = multi_cached("keys") + multi_cached_decorator = multi_cached(cache, keys_from_attr="keys") default_keys = {Keys.KEY, Keys.KEY_1} await multi_cached_decorator(return_dict)(keys=default_keys) @@ -154,7 +146,7 @@ async def test_multi_cached(self, cache): assert await cache.get(key) is not None async def test_keys_without_kwarg(self, cache): - @multi_cached("keys") + @multi_cached(cache, keys_from_attr="keys") async def fn(keys): return {Keys.KEY: 1} @@ -165,7 +157,7 @@ async def test_multi_cached_key_builder(self, cache): def build_key(key, f, self, keys, market="ES"): return "{}_{}_{}".format(f.__name__, ensure_key(key), market) - @multi_cached(keys_from_attr="keys", key_builder=build_key) + @multi_cached(keys_from_attr="keys", key_builder=build_key, cache=cache) async def fn(self, keys, market="ES"): return {Keys.KEY: 1, Keys.KEY_1: 2} @@ -174,7 +166,7 @@ async def fn(self, keys, market="ES"): assert await cache.exists("fn_" + ensure_key(Keys.KEY_1) + "_ES") is True async def test_multi_cached_skip_keys(self, cache): - @multi_cached(keys_from_attr="keys", skip_cache_func=lambda _, v: v is None) + @multi_cached(cache, keys_from_attr="keys", skip_cache_func=lambda _, v: v is None) async def multi_sk_fn(keys, values): return {k: v for k, v in zip(keys, values)} @@ -187,7 +179,7 @@ async def multi_sk_fn(keys, values): assert not await cache.exists(Keys.KEY_1) async def test_fn_with_args(self, cache): - @multi_cached("keys") + @multi_cached(cache, keys_from_attr="keys") async def fn(keys, *args): assert len(args) == 1 return {Keys.KEY: 1} @@ -203,7 +195,7 @@ async def wrapper(*args, **kwargs): return wrapper @dummy_d - @multi_cached("keys") + @multi_cached(cache, keys_from_attr="keys") async def fn(keys): return {Keys.KEY: 1} diff --git a/tests/acceptance/test_factory.py b/tests/acceptance/test_factory.py deleted file mode 100644 index 4a3bb4f8..00000000 --- a/tests/acceptance/test_factory.py +++ /dev/null @@ -1,52 +0,0 @@ -import pytest - -from aiocache import Cache -from aiocache.backends.memory import SimpleMemoryCache - - -class TestCache: - async def test_from_url_memory(self): - async with Cache.from_url("memory://") as cache: - assert isinstance(cache, SimpleMemoryCache) - - def test_from_url_memory_no_endpoint(self): - with pytest.raises(TypeError): - Cache.from_url("memory://host:10") - - @pytest.mark.redis - async def test_from_url_redis(self): - from aiocache.backends.redis import RedisCache - - url = ("redis://endpoint:1000/0/?password=pass" - + "&max_connections=50&socket_connect_timeout=20") - - async with Cache.from_url(url) as cache: - assert isinstance(cache, RedisCache) - connection_args = cache.client.connection_pool.connection_kwargs - assert connection_args["host"] == "endpoint" - assert connection_args["port"] == 1000 - assert connection_args["password"] == "pass" - assert cache.client.connection_pool.max_connections == 50 - assert connection_args["socket_connect_timeout"] == 20 - - @pytest.mark.memcached - async def test_from_url_memcached(self): - from aiocache.backends.memcached import MemcachedCache - - url = "memcached://endpoint:1000?pool_size=10" - - async with Cache.from_url(url) as cache: - assert isinstance(cache, MemcachedCache) - assert cache.host == "endpoint" - assert cache.port == 1000 - assert cache.pool_size == 10 - - @pytest.mark.parametrize( - "scheme", - (pytest.param("redis", marks=pytest.mark.redis), - "memory", - pytest.param("memcached", marks=pytest.mark.memcached), - )) - def test_from_url_unexpected_param(self, scheme): - with pytest.raises(TypeError): - Cache.from_url("{}://?arg1=arg1".format(scheme)) diff --git a/tests/performance/conftest.py b/tests/performance/conftest.py index e64ed796..03066cbb 100644 --- a/tests/performance/conftest.py +++ b/tests/performance/conftest.py @@ -1,17 +1,17 @@ import pytest -from aiocache import Cache - @pytest.fixture async def redis_cache(redis_client): # redis connection pool raises ConnectionError but doesn't wait for conn reuse # when exceeding max pool size. - async with Cache(Cache.REDIS, namespace="test", client=redis_client) as cache: + from aiocache.backends.redis import RedisCache + async with RedisCache(namespace="test", client=redis_client) as cache: yield cache @pytest.fixture async def memcached_cache(): - async with Cache(Cache.MEMCACHED, namespace="test", pool_size=1) as cache: + from aiocache.backends.memcached import MemcachedCache + async with MemcachedCache(namespace="test", pool_size=1) as cache: yield cache diff --git a/tests/performance/server.py b/tests/performance/server.py index eab648a1..c5d53670 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -5,29 +5,31 @@ import redis.asyncio as redis from aiohttp import web -from aiocache import Cache - logging.getLogger("aiohttp.access").propagate = False class CacheManager: def __init__(self, backend: str): - backends = { - "memory": Cache.MEMORY, - "redis": Cache.REDIS, - "memcached": Cache.MEMCACHED, - } if backend == "redis": - cache_kwargs = {"client": redis.Redis( - host="127.0.0.1", - port=6379, - db=0, - password=None, - decode_responses=False, - )} + from aiocache.backends.redis import RedisCache + cache = RedisCache( + client=redis.Redis( + host="127.0.0.1", + port=6379, + db=0, + password=None, + decode_responses=False, + ) + ) + elif backend == "memcached": + from aiocache.backends.memcached import MemcachedCache + cache = MemcachedCache() + elif backend == "memory": + from aiocache.backends.memory import SimpleMemoryCache + cache = SimpleMemoryCache() else: - cache_kwargs = dict() - self.cache = Cache(backends[backend], **cache_kwargs) + raise ValueError("Invalid backend") + self.cache = cache async def get(self, key): return await self.cache.get(key, timeout=0.1) diff --git a/tests/ut/conftest.py b/tests/ut/conftest.py index 591f1c44..38dae44a 100644 --- a/tests/ut/conftest.py +++ b/tests/ut/conftest.py @@ -3,23 +3,10 @@ import pytest -from aiocache import caches from aiocache.plugins import BasePlugin from ..utils import AbstractBaseCache, ConcreteBaseCache -@pytest.fixture(autouse=True) -def reset_caches(): - caches.set_config( - { - "default": { - "cache": "aiocache.SimpleMemoryCache", - "serializer": {"class": "aiocache.serializers.NullSerializer"}, - } - } - ) - - @pytest.fixture def mock_cache(mocker): return create_autospec(ConcreteBaseCache()) diff --git a/tests/ut/test_decorators.py b/tests/ut/test_decorators.py index 0b7a9421..7fe4c68e 100644 --- a/tests/ut/test_decorators.py +++ b/tests/ut/test_decorators.py @@ -11,7 +11,6 @@ from aiocache.base import SENTINEL from aiocache.decorators import _get_args_dict from aiocache.lock import RedLock -from ..utils import AbstractBaseCache async def stub(*args, value=None, seconds=0, **kwargs): @@ -24,8 +23,7 @@ async def stub(*args, value=None, seconds=0, **kwargs): class TestCached: @pytest.fixture def decorator(self, mock_cache): - with patch("aiocache.decorators._get_cache", autospec=True, return_value=mock_cache): - yield cached() + yield cached(cache=mock_cache) @pytest.fixture def decorator_call(self, decorator): @@ -38,41 +36,17 @@ def spy_stub(self, mocker): mocker.spy(module, "stub") def test_init(self): + cache = SimpleMemoryCache() c = cached( ttl=1, key_builder=lambda *args, **kw: "key", - cache=SimpleMemoryCache, - plugins=None, - alias=None, + cache=cache, noself=False, - namespace="test", - unused_kwarg="unused", ) assert c.ttl == 1 assert c.key_builder() == "key" - assert c.cache is None - assert c._cache == SimpleMemoryCache - assert c._serializer is None - assert c._namespace == "test" - assert c._kwargs == {"unused_kwarg": "unused"} - - def test_fails_at_instantiation(self): - with pytest.raises(TypeError): - - @cached(wrong_param=1) - async def fn() -> None: - """Dummy function.""" - - def test_alias_takes_precedence(self, mock_cache): - with patch( - "aiocache.decorators.caches.get", autospec=True, return_value=mock_cache - ) as mock_get: - c = cached(alias="default", cache=SimpleMemoryCache, namespace="test") - c(stub) - - mock_get.assert_called_with("default") - assert c.cache is mock_cache + assert c.cache is cache def test_get_cache_key_with_key(self, decorator): decorator.key_builder = lambda *args, **kw: "key" @@ -185,59 +159,39 @@ async def test_set_catches_exception(self, decorator, decorator_call): async def test_decorate(self, mock_cache): mock_cache.get.return_value = None - with patch("aiocache.decorators._get_cache", autospec=True, return_value=mock_cache): - @cached() - async def fn(n): - return n + @cached(cache=mock_cache) + async def fn(n): + return n - assert await fn(1) == 1 - assert await fn(2) == 2 - assert fn.cache == mock_cache + assert await fn(1) == 1 + assert await fn(2) == 2 + assert fn.cache is mock_cache async def test_keeps_signature(self, mock_cache): - with patch("aiocache.decorators._get_cache", autospec=True, return_value=mock_cache): - - @cached() - async def what(self, a, b): - """Dummy function.""" - - assert what.__name__ == "what" - assert str(inspect.signature(what)) == "(self, a, b)" - assert inspect.getfullargspec(what.__wrapped__).args == ["self", "a", "b"] - - async def test_reuses_cache_instance(self): - with patch("aiocache.decorators._get_cache", autospec=True) as get_c: - cache = create_autospec(AbstractBaseCache, instance=True) - get_c.side_effect = [cache, None] - - @cached() - async def what(): - """Dummy function.""" - - await what() - await what() + @cached(cache=mock_cache) + async def what(self, a, b): + """Dummy function.""" - assert get_c.call_count == 1 - assert cache.get.call_count == 2 + assert what.__name__ == "what" + assert str(inspect.signature(what)) == "(self, a, b)" + assert inspect.getfullargspec(what.__wrapped__).args == ["self", "a", "b"] - async def test_cache_per_function(self): - @cached() - async def foo(): - """First function.""" + async def test_reuses_cache_instance(self, mock_cache): + @cached(cache=mock_cache) + async def what(): + """Dummy function.""" - @cached() - async def bar(): - """Second function.""" + await what() + await what() - assert foo.cache != bar.cache + assert mock_cache.get.call_count == 2 class TestCachedStampede: @pytest.fixture def decorator(self, mock_cache): - with patch("aiocache.decorators._get_cache", autospec=True, return_value=mock_cache): - yield cached_stampede() + yield cached_stampede(cache=mock_cache) @pytest.fixture def decorator_call(self, decorator): @@ -248,30 +202,22 @@ def spy_stub(self, mocker): module = sys.modules[globals()["__name__"]] mocker.spy(module, "stub") - def test_inheritance(self): - assert isinstance(cached_stampede(), cached) + def test_inheritance(self, mock_cache): + assert isinstance(cached_stampede(mock_cache), cached) def test_init(self): + cache = SimpleMemoryCache() c = cached_stampede( lease=3, ttl=1, key_builder=lambda *args, **kw: "key", - cache=SimpleMemoryCache, - plugins=None, - alias=None, - noself=False, - namespace="test", - unused_kwarg="unused", + cache=cache, ) assert c.ttl == 1 assert c.key_builder() == "key" - assert c.cache is None - assert c._cache == SimpleMemoryCache - assert c._serializer is None + assert c.cache is cache assert c.lease == 3 - assert c._namespace == "test" - assert c._kwargs == {"unused_kwarg": "unused"} async def test_calls_get_and_returns(self, decorator, decorator_call): decorator.cache.get.return_value = 1 @@ -331,8 +277,7 @@ async def stub_dict(*args, keys=None, **kwargs): class TestMultiCached: @pytest.fixture def decorator(self, mock_cache): - with patch("aiocache.decorators._get_cache", autospec=True, return_value=mock_cache): - yield multi_cached(keys_from_attr="keys") + yield multi_cached(cache=mock_cache, keys_from_attr="keys") @pytest.fixture def decorator_call(self, decorator): @@ -346,15 +291,12 @@ def spy_stub_dict(self, mocker): mocker.spy(module, "stub_dict") def test_init(self): + cache = SimpleMemoryCache() mc = multi_cached( keys_from_attr="keys", key_builder=None, ttl=1, - cache=SimpleMemoryCache, - plugins=None, - alias=None, - namespace="test", - unused_kwarg="unused", + cache=cache, ) def f(): @@ -363,30 +305,7 @@ def f(): assert mc.ttl == 1 assert mc.key_builder("key", f) == "key" assert mc.keys_from_attr == "keys" - assert mc.cache is None - assert mc._cache == SimpleMemoryCache - assert mc._serializer is None - assert mc._namespace == "test" - assert mc._kwargs == {"unused_kwarg": "unused"} - - def test_fails_at_instantiation(self): - with pytest.raises(TypeError): - - @multi_cached(wrong_param=1) - async def fn() -> None: - """Dummy function.""" - - def test_alias_takes_precedence(self, mock_cache): - with patch( - "aiocache.decorators.caches.get", autospec=True, return_value=mock_cache - ) as mock_get: - mc = multi_cached( - keys_from_attr="keys", alias="default", cache=SimpleMemoryCache, namespace="test" - ) - mc(stub_dict) - - mock_get.assert_called_with("default") - assert mc.cache is mock_cache + assert mc.cache is cache def test_get_cache_keys(self, decorator): keys = decorator.get_cache_keys(stub_dict, (), {"keys": ["a", "b"]}) @@ -540,15 +459,14 @@ async def test_set_in_cache_exception(self, decorator, decorator_call): async def test_decorate(self, mock_cache): mock_cache.multi_get.return_value = [None] - with patch("aiocache.decorators._get_cache", autospec=True, return_value=mock_cache): - @multi_cached(keys_from_attr="keys") - async def fn(keys=None): - return {"test": 1} + @multi_cached(cache=mock_cache, keys_from_attr="keys") + async def fn(keys=None): + return {"test": 1} - assert await fn(keys=["test"]) == {"test": 1} - assert await fn(["test"]) == {"test": 1} - assert fn.cache == mock_cache + assert await fn(keys=["test"]) == {"test": 1} + assert await fn(["test"]) == {"test": 1} + assert fn.cache == mock_cache async def test_keeps_signature(self): @multi_cached(keys_from_attr="keys") @@ -559,35 +477,9 @@ async def what(self, keys=None, what=1): assert str(inspect.signature(what)) == "(self, keys=None, what=1)" assert inspect.getfullargspec(what.__wrapped__).args == ["self", "keys", "what"] - async def test_reuses_cache_instance(self): - with patch("aiocache.decorators._get_cache", autospec=True) as get_c: - cache = create_autospec(AbstractBaseCache, instance=True) - cache.multi_get.return_value = [None] - get_c.side_effect = [cache, None] - - @multi_cached("keys") - async def what(keys=None): - return {} - - await what(keys=["a"]) - await what(keys=["a"]) - - assert get_c.call_count == 1 - assert cache.multi_get.call_count == 2 - - async def test_cache_per_function(self): - @multi_cached("keys") - async def foo(): - """First function.""" - - @multi_cached("keys") - async def bar(): - """Second function.""" - - assert foo.cache != bar.cache - async def test_key_builder(self): - @multi_cached("keys", key_builder=lambda key, _, keys: key + 1) + @multi_cached(cache=SimpleMemoryCache(), keys_from_attr="keys", + key_builder=lambda key, _, keys: key + 1) async def f(keys=None): return {k: k * 3 for k in keys} diff --git a/tests/ut/test_factory.py b/tests/ut/test_factory.py deleted file mode 100644 index 7b33b8b3..00000000 --- a/tests/ut/test_factory.py +++ /dev/null @@ -1,361 +0,0 @@ -from unittest.mock import Mock, patch - -import pytest - -from aiocache import AIOCACHE_CACHES, Cache, caches -from aiocache.backends.memory import SimpleMemoryCache -from aiocache.exceptions import InvalidCacheType -from aiocache.factory import _class_from_string, _create_cache -from aiocache.plugins import HitMissRatioPlugin, TimingPlugin -from aiocache.serializers import JsonSerializer, PickleSerializer - - -CACHE_NAMES = [Cache.MEMORY.NAME] - -try: - from aiocache.backends.memcached import MemcachedCache -except ImportError: - MemcachedCache = None -else: - assert Cache.MEMCACHED is not None - CACHE_NAMES.append(Cache.MEMCACHED.NAME) - -try: - from aiocache.backends.redis import RedisCache -except ImportError: - RedisCache = None -else: - assert Cache.REDIS is not None - CACHE_NAMES.append(Cache.REDIS.NAME) - - -@pytest.mark.redis -def test_class_from_string(): - assert _class_from_string("aiocache.RedisCache") == RedisCache - - -def test_create_cache_with_everything(): - cache = _create_cache( - SimpleMemoryCache, - serializer={"class": PickleSerializer, "encoding": "encoding"}, - plugins=[{"class": "aiocache.plugins.TimingPlugin"}], - ) - - assert isinstance(cache.serializer, PickleSerializer) - assert cache.serializer.encoding == "encoding" - assert isinstance(cache.plugins[0], TimingPlugin) - - -class TestCache: - def test_cache_types(self): - assert Cache.MEMORY == SimpleMemoryCache - assert Cache.REDIS == RedisCache - assert Cache.MEMCACHED == MemcachedCache - - @pytest.mark.parametrize("cache_type", CACHE_NAMES) - async def test_new(self, cache_type): - kwargs = {"a": 1, "b": 2} - cache_class = Cache.get_scheme_class(cache_type) - - with patch("aiocache.{}.__init__".format(cache_class.__name__)) as init: - cache = Cache(cache_class, **kwargs) - assert isinstance(cache, cache_class) - init.assert_called_once_with(**kwargs) - - def test_new_defaults_to_memory(self): - assert isinstance(Cache(), Cache.MEMORY) - - def test_new_invalid_cache_raises(self): - with pytest.raises(InvalidCacheType) as e: - Cache(object) - assert str(e.value) == "Invalid cache type, you can only use {}".format( - list(AIOCACHE_CACHES.keys()) - ) - - @pytest.mark.parametrize("scheme", CACHE_NAMES) - def test_get_scheme_class(self, scheme): - assert Cache.get_scheme_class(scheme) == AIOCACHE_CACHES[scheme] - - def test_get_scheme_class_invalid(self): - with pytest.raises(InvalidCacheType): - Cache.get_scheme_class("http") - - @pytest.mark.parametrize("scheme", CACHE_NAMES) - def test_from_url_returns_cache_from_scheme(self, scheme): - assert isinstance(Cache.from_url("{}://".format(scheme)), Cache.get_scheme_class(scheme)) - - @pytest.mark.parametrize( - "url,expected_args", - [ - ("redis://", {}), - ("redis://localhost", {"host": "localhost"}), - ("redis://localhost/", {"host": "localhost"}), - ("redis://localhost:6379", {"host": "localhost", "port": 6379}), - ( - "redis://localhost/?arg1=arg1&arg2=arg2", - {"host": "localhost", "arg1": "arg1", "arg2": "arg2"}, - ), - ( - "redis://localhost:6379/?arg1=arg1&arg2=arg2", - {"host": "localhost", "port": 6379, "arg1": "arg1", "arg2": "arg2"}, - ), - ("redis:///?arg1=arg1", {"arg1": "arg1"}), - ("redis:///?arg2=arg2", {"arg2": "arg2"}), - ( - "redis://:password@localhost:6379", - {"host": "localhost", "password": "password", "port": 6379}, - ), - ( - "redis://:password@localhost:6379?password=pass", - {"host": "localhost", "password": "password", "port": 6379}, - ), - ], - ) - def test_from_url_calls_cache_with_args(self, url, expected_args): - with patch("aiocache.factory.Cache", autospec=True) as mock: - Cache.from_url(url) - - mock.assert_called_once_with(mock.get_scheme_class.return_value, **expected_args) - - def test_calls_parse_uri_path_from_cache(self): - p_mock = Mock(spec_set=(), return_value={"arg1": "arg1"}) - with patch("aiocache.factory.Cache", autospec=True) as mock: - mock.get_scheme_class.return_value.parse_uri_path = p_mock - Cache.from_url("redis:///") - - mock.get_scheme_class.return_value.parse_uri_path.assert_called_once_with("/") - mock.assert_called_once_with(mock.get_scheme_class.return_value, arg1="arg1") - - def test_from_url_invalid_protocol(self): - with pytest.raises(InvalidCacheType): - Cache.from_url("http://") - - -class TestCacheHandler: - @pytest.fixture(autouse=True) - def remove_caches(self): - caches._caches = {} - - def test_add_new_entry(self): - alias = "memory" - config = { - "cache": "aiocache.SimpleMemoryCache", - "serializer": {"class": "aiocache.serializers.StringSerializer"}, - } - caches.add(alias, config) - - assert caches.get_config()[alias] == config - - def test_add_updates_existing_entry(self): - alias = "memory" - config = { - "cache": "aiocache.SimpleMemoryCache", - "serializer": {"class": "aiocache.serializers.StringSerializer"}, - } - caches.add(alias, {}) - caches.add(alias, config) - - assert caches.get_config()[alias] == config - - def test_get_wrong_alias(self): - with pytest.raises(KeyError): - caches.get("wrong_cache") - - with pytest.raises(KeyError): - caches.create("wrong_cache") - - def test_reuse_instance(self): - assert caches.get("default") is caches.get("default") - - def test_create_not_reuse(self): - assert caches.create("default") is not caches.create("default") - - @pytest.mark.redis - def test_create_extra_args(self): - caches.set_config( - { - "default": { - "cache": "aiocache.RedisCache", - "host": "127.0.0.9", - "db": 10, - "port": 6378, - } - } - ) - cache = caches.create("default", namespace="whatever", host="127.0.0.10", db=10) - assert cache.namespace == "whatever" - assert cache.client.connection_pool.connection_kwargs["host"] == "127.0.0.10" - assert cache.client.connection_pool.connection_kwargs["db"] == 10 - - @pytest.mark.redis - def test_retrieve_cache(self): - caches.set_config( - { - "default": { - "cache": "aiocache.RedisCache", - "host": "127.0.0.10", - "port": 6378, - "ttl": 10, - "serializer": { - "class": "aiocache.serializers.PickleSerializer", - "encoding": "encoding", - }, - "plugins": [ - {"class": "aiocache.plugins.HitMissRatioPlugin"}, - {"class": "aiocache.plugins.TimingPlugin"}, - ], - } - } - ) - - cache = caches.get("default") - assert isinstance(cache, RedisCache) - assert cache.client.connection_pool.connection_kwargs["host"] == "127.0.0.10" - assert cache.client.connection_pool.connection_kwargs["port"] == 6378 - assert cache.ttl == 10 - assert isinstance(cache.serializer, PickleSerializer) - assert cache.serializer.encoding == "encoding" - assert len(cache.plugins) == 2 - - @pytest.mark.redis - def test_retrieve_cache_new_instance(self): - caches.set_config( - { - "default": { - "cache": "aiocache.RedisCache", - "host": "127.0.0.10", - "port": 6378, - "serializer": { - "class": "aiocache.serializers.PickleSerializer", - "encoding": "encoding", - }, - "plugins": [ - {"class": "aiocache.plugins.HitMissRatioPlugin"}, - {"class": "aiocache.plugins.TimingPlugin"}, - ], - } - } - ) - - cache = caches.create("default") - assert isinstance(cache, RedisCache) - assert cache.client.connection_pool.connection_kwargs["host"] == "127.0.0.10" - assert cache.client.connection_pool.connection_kwargs["port"] == 6378 - assert isinstance(cache.serializer, PickleSerializer) - assert cache.serializer.encoding == "encoding" - assert len(cache.plugins) == 2 - - @pytest.mark.redis - def test_multiple_caches(self): - caches.set_config( - { - "default": { - "cache": "aiocache.RedisCache", - "host": "127.0.0.10", - "port": 6378, - "serializer": {"class": "aiocache.serializers.PickleSerializer"}, - "plugins": [ - {"class": "aiocache.plugins.HitMissRatioPlugin"}, - {"class": "aiocache.plugins.TimingPlugin"}, - ], - }, - "alt": {"cache": "aiocache.SimpleMemoryCache"}, - } - ) - - default = caches.get("default") - alt = caches.get("alt") - - assert isinstance(default, RedisCache) - assert default.client.connection_pool.connection_kwargs["host"] == "127.0.0.10" - assert default.client.connection_pool.connection_kwargs["port"] == 6378 - assert isinstance(default.serializer, PickleSerializer) - assert len(default.plugins) == 2 - - assert isinstance(alt, SimpleMemoryCache) - - def test_default_caches(self): - assert caches.get_config() == { - "default": { - "cache": "aiocache.SimpleMemoryCache", - "serializer": {"class": "aiocache.serializers.NullSerializer"}, - } - } - - def test_get_alias_config(self): - assert caches.get_alias_config("default") == { - "cache": "aiocache.SimpleMemoryCache", - "serializer": {"class": "aiocache.serializers.NullSerializer"}, - } - - def test_set_empty_config(self): - with pytest.raises(ValueError): - caches.set_config({}) - - def test_set_config_updates_existing_values(self): - assert not isinstance(caches.get("default").serializer, JsonSerializer) - caches.set_config( - { - "default": { - "cache": "aiocache.SimpleMemoryCache", - "serializer": {"class": "aiocache.serializers.JsonSerializer"}, - } - } - ) - assert isinstance(caches.get("default").serializer, JsonSerializer) - - def test_set_config_removes_existing_caches(self): - caches.set_config( - { - "default": {"cache": "aiocache.SimpleMemoryCache"}, - "alt": {"cache": "aiocache.SimpleMemoryCache"}, - } - ) - caches.get("default") - caches.get("alt") - assert len(caches._caches) == 2 - - caches.set_config( - { - "default": {"cache": "aiocache.SimpleMemoryCache"}, - "alt": {"cache": "aiocache.SimpleMemoryCache"}, - } - ) - assert caches._caches == {} - - def test_set_config_no_default(self): - with pytest.raises(ValueError): - caches.set_config( - { - "no_default": { - "cache": "aiocache.RedisCache", - "host": "127.0.0.10", - "port": 6378, - "serializer": {"class": "aiocache.serializers.PickleSerializer"}, - "plugins": [ - {"class": "aiocache.plugins.HitMissRatioPlugin"}, - {"class": "aiocache.plugins.TimingPlugin"}, - ], - } - } - ) - - @pytest.mark.redis - def test_ensure_plugins_order(self): - caches.set_config( - { - "default": { - "cache": "aiocache.RedisCache", - "plugins": [ - {"class": "aiocache.plugins.HitMissRatioPlugin"}, - {"class": "aiocache.plugins.TimingPlugin"}, - ], - } - } - ) - - cache = caches.get("default") - assert isinstance(cache.plugins[0], HitMissRatioPlugin) - - cache = caches.create("default") - assert isinstance(cache.plugins[0], HitMissRatioPlugin)