Skip to content

Commit

Permalink
Fix formatting + tests
Browse files Browse the repository at this point in the history
  • Loading branch information
mikeoconnor0308 committed Jan 17, 2025
1 parent 4564137 commit 420e151
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 8 deletions.
2 changes: 1 addition & 1 deletion aiocache/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging
from typing import Any, Dict, Type
from typing import Any, Type

from .backends.memory import SimpleMemoryCache
from .base import BaseCache
Expand Down
10 changes: 5 additions & 5 deletions aiocache/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ class cached:
Caches the functions return value into a key generated with module_name, function_name
and args. The cache is available in the function object as ``<function_name>.cache``.
:param cache: cache instance to use when calling the ``set``/``get`` operations.
:param ttl: int seconds to store the function call. Default is None which means no expiration.
:param key_builder: Callable that allows to build the function dynamically. It receives
the function plus same args and kwargs passed to the function.
Expand All @@ -22,7 +23,6 @@ class cached:
wrapped function and should return `True` if the value should skip the
cache (or `False` to store in the cache).
e.g. to avoid caching `None` results: `lambda r: r is None`
:param cache: cache instance to use when calling the ``set``/``get`` operations.
:param noself: bool if you are decorating a class function, by default self is also used to
generate the key. This will result in same function calls done by different class instances
to use different cache keys. Use noself=True if you want to ignore it.
Expand Down Expand Up @@ -109,6 +109,8 @@ class cached_stampede(cached):
Caches the functions return value into a key generated with module_name, function_name and args
while avoids for cache stampede effects.
:param cache: cache instance to use when calling the ``set``/``get`` operations.
Default is :class:`aiocache.SimpleMemoryCache`.
:param lease: int seconds to lock function call to avoid cache stampede effects.
If 0 or None, no locking happens (default is 2). redis and memory backends support
float ttls
Expand All @@ -121,15 +123,13 @@ class cached_stampede(cached):
wrapped function and should return `True` if the value should skip the
cache (or `False` to store in the cache).
e.g. to avoid caching `None` results: `lambda r: r is None`
:param cache: cache instance to use when calling the ``set``/``get`` operations.
Default is :class:`aiocache.SimpleMemoryCache`.
:param noself: bool if you are decorating a class function, by default self is also used to
generate the key. This will result in same function calls done by different class instances
to use different cache keys. Use noself=True if you want to ignore it.
"""

def __init__(self, lease=2, **kwargs):
super().__init__(**kwargs)
def __init__(self, cache, lease=2, **kwargs):
super().__init__(cache, **kwargs)
self.lease = lease

async def decorator(self, f, *args, **kwargs):
Expand Down
5 changes: 3 additions & 2 deletions tests/ut/test_decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -477,8 +477,9 @@ async def what(self, keys=None, what=1):
assert str(inspect.signature(what)) == "(self, keys=None, what=1)"
assert inspect.getfullargspec(what.__wrapped__).args == ["self", "keys", "what"]

async def test_key_builder(self, mock_cache):
@multi_cached(mock_cache, keys_from_attr="keys", key_builder=lambda key, _, keys: key + 1)
async def test_key_builder(self):
@multi_cached(cache=SimpleMemoryCache(), keys_from_attr="keys",
key_builder=lambda key, _, keys: key + 1)
async def f(keys=None):
return {k: k * 3 for k in keys}

Expand Down

0 comments on commit 420e151

Please sign in to comment.