diff --git a/aiocache/backends/redis.py b/aiocache/backends/redis.py index 350da968..b150fbdd 100644 --- a/aiocache/backends/redis.py +++ b/aiocache/backends/redis.py @@ -37,7 +37,7 @@ class RedisBackend(BaseCache[str]): def __init__( self, - client, + client: redis.Redis, **kwargs, ): super().__init__(**kwargs) diff --git a/aiocache/factory.py b/aiocache/factory.py index bb14f1d0..dbe9d8bd 100644 --- a/aiocache/factory.py +++ b/aiocache/factory.py @@ -3,7 +3,7 @@ from copy import deepcopy from typing import Dict -import redis +import redis.asyncio as redis from aiocache import AIOCACHE_CACHES from aiocache.base import BaseCache @@ -230,7 +230,7 @@ def set_config(self, config): }, 'redis_alt': { 'cache': "aiocache.RedisCache", - 'endpoint': "127.0.0.10", + 'host': "127.0.0.10", 'port': 6378, 'serializer': { 'class': "aiocache.serializers.PickleSerializer" diff --git a/examples/cached_alias_config.py b/examples/cached_alias_config.py index a22678ad..b760dec0 100644 --- a/examples/cached_alias_config.py +++ b/examples/cached_alias_config.py @@ -1,7 +1,10 @@ import asyncio +import redis.asyncio as redis + from aiocache import caches, Cache from aiocache.serializers import StringSerializer, PickleSerializer +from examples.conftest import redis_kwargs_for_test caches.set_config({ 'default': { @@ -12,9 +15,9 @@ }, 'redis_alt': { 'cache': "aiocache.RedisCache", - 'endpoint': "127.0.0.1", + 'host': "127.0.0.1", 'port': 6379, - 'timeout': 1, + 'socket_connect_timeout': 1, 'serializer': { 'class': "aiocache.serializers.PickleSerializer" }, @@ -45,9 +48,9 @@ async def alt_cache(): assert isinstance(cache, Cache.REDIS) assert isinstance(cache.serializer, PickleSerializer) assert len(cache.plugins) == 2 - assert cache.endpoint == "127.0.0.1" - assert cache.timeout == 1 - assert cache.port == 6379 + assert cache.client.connection_pool.connection_kwargs['host'] == "127.0.0.1" + assert cache.client.connection_pool.connection_kwargs['socket_connect_timeout'] == 1 + assert cache.client.connection_pool.connection_kwargs['port'] == 6379 await cache.close() @@ -55,7 +58,7 @@ async def test_alias(): await default_cache() await alt_cache() - cache = Cache(Cache.REDIS) + cache = Cache(Cache.REDIS, client=redis.Redis(**redis_kwargs_for_test()) ) await cache.delete("key") await cache.close() diff --git a/examples/cached_decorator.py b/examples/cached_decorator.py index 01c5a46a..27cfb14d 100644 --- a/examples/cached_decorator.py +++ b/examples/cached_decorator.py @@ -1,22 +1,29 @@ import asyncio from collections import namedtuple +import redis.asyncio as redis from aiocache import cached, Cache from aiocache.serializers import PickleSerializer +from examples.conftest import redis_kwargs_for_test Result = namedtuple('Result', "content, status") @cached( ttl=10, cache=Cache.REDIS, key_builder=lambda *args, **kw: "key", - serializer=PickleSerializer(), port=6379, namespace="main") + serializer=PickleSerializer(), namespace="main", client = redis.Redis( + host="127.0.0.1", + port=6379, + db=0, + decode_responses=False, + )) async def cached_call(): return Result("content", 200) async def test_cached(): - async with Cache(Cache.REDIS, endpoint="127.0.0.1", port=6379, namespace="main") as cache: + async with Cache(Cache.REDIS, namespace="main", client=redis.Redis(**redis_kwargs_for_test())) as cache: await cached_call() exists = await cache.exists("key") assert exists is True diff --git a/examples/conftest.py b/examples/conftest.py new file mode 100644 index 00000000..63eb893c --- /dev/null +++ b/examples/conftest.py @@ -0,0 +1,9 @@ +def redis_kwargs_for_test(): + return dict( + host="127.0.0.1", + port=6379, + db=0, + password=None, + decode_responses=False, + socket_connect_timeout=None, + ) diff --git a/examples/multicached_decorator.py b/examples/multicached_decorator.py index d05d5f4a..05f3a6e0 100644 --- a/examples/multicached_decorator.py +++ b/examples/multicached_decorator.py @@ -1,6 +1,9 @@ import asyncio +import redis.asyncio as redis + from aiocache import multi_cached, Cache +from examples.conftest import redis_kwargs_for_test DICT = { 'a': "Z", @@ -9,18 +12,19 @@ 'd': "W" } +cache = Cache(Cache.REDIS, namespace="main", client=redis.Redis(**redis_kwargs_for_test())) + -@multi_cached("ids", cache=Cache.REDIS, namespace="main") +@multi_cached("ids", cache=Cache.REDIS, namespace="main", client=cache.client) async def multi_cached_ids(ids=None): return {id_: DICT[id_] for id_ in ids} -@multi_cached("keys", cache=Cache.REDIS, namespace="main") +@multi_cached("keys", cache=Cache.REDIS, namespace="main", client=cache.client) async def multi_cached_keys(keys=None): return {id_: DICT[id_] for id_ in keys} -cache = Cache(Cache.REDIS, endpoint="127.0.0.1", port=6379, namespace="main") async def test_multi_cached(): diff --git a/examples/optimistic_lock.py b/examples/optimistic_lock.py index 20624907..9abdfe1a 100644 --- a/examples/optimistic_lock.py +++ b/examples/optimistic_lock.py @@ -2,12 +2,14 @@ import logging import random +import redis.asyncio as redis + from aiocache import Cache from aiocache.lock import OptimisticLock, OptimisticLockError - +from examples.conftest import redis_kwargs_for_test logger = logging.getLogger(__name__) -cache = Cache(Cache.REDIS, endpoint='127.0.0.1', port=6379, namespace='main') +cache = Cache(Cache.REDIS, namespace="main", client=redis.Redis(**redis_kwargs_for_test())) async def expensive_function(): diff --git a/examples/python_object.py b/examples/python_object.py index 8eea8b3b..669c2a82 100644 --- a/examples/python_object.py +++ b/examples/python_object.py @@ -1,12 +1,15 @@ import asyncio from collections import namedtuple +import redis.asyncio as redis + + from aiocache import Cache from aiocache.serializers import PickleSerializer - +from examples.conftest import redis_kwargs_for_test MyObject = namedtuple("MyObject", ["x", "y"]) -cache = Cache(Cache.REDIS, serializer=PickleSerializer(), namespace="main") +cache = Cache(Cache.REDIS, serializer=PickleSerializer(), namespace="main", client=redis.Redis(**redis_kwargs_for_test())) async def complex_object(): diff --git a/examples/redlock.py b/examples/redlock.py index e763ddb3..396344da 100644 --- a/examples/redlock.py +++ b/examples/redlock.py @@ -1,12 +1,14 @@ import asyncio import logging +import redis.asyncio as redis + from aiocache import Cache from aiocache.lock import RedLock - +from examples.conftest import redis_kwargs_for_test logger = logging.getLogger(__name__) -cache = Cache(Cache.REDIS, endpoint='127.0.0.1', port=6379, namespace='main') +cache = Cache(Cache.REDIS, namespace='main', client=redis.Redis(**redis_kwargs_for_test())) async def expensive_function(): diff --git a/examples/serializer_class.py b/examples/serializer_class.py index 50562b12..e6055776 100644 --- a/examples/serializer_class.py +++ b/examples/serializer_class.py @@ -1,8 +1,11 @@ import asyncio import zlib +import redis.asyncio as redis + from aiocache import Cache from aiocache.serializers import BaseSerializer +from examples.conftest import redis_kwargs_for_test class CompressionSerializer(BaseSerializer): @@ -25,7 +28,7 @@ def loads(self, value): return decompressed -cache = Cache(Cache.REDIS, serializer=CompressionSerializer(), namespace="main") +cache = Cache(Cache.REDIS, serializer=CompressionSerializer(), namespace="main", client=redis.Redis(**redis_kwargs_for_test()) ) async def serializer(): diff --git a/examples/serializer_function.py b/examples/serializer_function.py index affa0b3b..11cc815c 100644 --- a/examples/serializer_function.py +++ b/examples/serializer_function.py @@ -1,8 +1,11 @@ import asyncio import json +import redis.asyncio as redis + from marshmallow import Schema, fields, post_load from aiocache import Cache +from examples.conftest import redis_kwargs_for_test class MyType: @@ -28,7 +31,7 @@ def loads(value): return MyTypeSchema().loads(value) -cache = Cache(Cache.REDIS, namespace="main") +cache = Cache(Cache.REDIS, namespace="main", client=redis.Redis(**redis_kwargs_for_test())) async def serializer_function(): diff --git a/examples/simple_redis.py b/examples/simple_redis.py index 2ff6278d..53e142f6 100644 --- a/examples/simple_redis.py +++ b/examples/simple_redis.py @@ -2,8 +2,11 @@ from aiocache import Cache +import redis.asyncio as redis -cache = Cache(Cache.REDIS, endpoint="127.0.0.1", port=6379, namespace="main") +from examples.conftest import redis_kwargs_for_test + +cache = Cache(Cache.REDIS, namespace="main" , client=redis.Redis(**redis_kwargs_for_test()) ) async def redis(): diff --git a/tests/acceptance/test_factory.py b/tests/acceptance/test_factory.py index 87ec7896..920ac3d4 100644 --- a/tests/acceptance/test_factory.py +++ b/tests/acceptance/test_factory.py @@ -11,7 +11,7 @@ async def test_from_url_memory(self): def test_from_url_memory_no_endpoint(self): with pytest.raises(TypeError): - Cache.from_url("memory://endpoint:10") + Cache.from_url("memory://host:10") @pytest.mark.redis async def test_from_url_redis(self): diff --git a/tests/performance/test_concurrency.py b/tests/performance/test_concurrency.py index 5112e0b9..8fcfaad3 100644 --- a/tests/performance/test_concurrency.py +++ b/tests/performance/test_concurrency.py @@ -27,7 +27,7 @@ def test_concurrency_error_rates(server): total_requests = 1500 # On some platforms, it's required to enlarge number of "open file descriptors" # with "ulimit -n number" before doing the benchmark. - cmd = ("ab", "-n", str(total_requests), "-c", "500", "http://127.0.0.1:8080/") + cmd = ("ab", "-n", str(total_requests), "-c", "500", "http://0.0.0.0:8080/") result = subprocess.run(cmd, capture_output=True, check=True, encoding="utf-8") m = re.search(r"Failed requests:\s+([0-9]+)", result.stdout)