Skip to content

Commit f83f43b

Browse files
Change RedisBackend to accept Redis client directly (#755)
Co-authored-by: Sam Bull <[email protected]>
1 parent 5d681de commit f83f43b

22 files changed

+171
-188
lines changed

CHANGES.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ Migration instructions
1212

1313
There are a number of backwards-incompatible changes. These points should help with migrating from an older release:
1414

15+
* ``RedisBackend`` now expects a ``redis.Redis`` instance as an argument, instead of creating one internally from keyword arguments.
1516
* The ``key_builder`` parameter for caches now expects a callback which accepts 2 strings and returns a string in all cache implementations, making the builders simpler and interchangeable.
1617
* The ``key`` parameter has been removed from the ``cached`` decorator. The behaviour can be easily reimplemented with ``key_builder=lambda *a, **kw: "foo"``
1718
* When using the ``key_builder`` parameter in ``@multicached``, the function will now return the original, unmodified keys, only using the transformed keys in the cache (this has always been the documented behaviour, but not the implemented behaviour).

aiocache/backends/memcached.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,13 @@
88

99

1010
class MemcachedBackend(BaseCache[bytes]):
11-
def __init__(self, endpoint="127.0.0.1", port=11211, pool_size=2, **kwargs):
11+
def __init__(self, host="127.0.0.1", port=11211, pool_size=2, **kwargs):
1212
super().__init__(**kwargs)
13-
self.endpoint = endpoint
13+
self.host = host
1414
self.port = port
1515
self.pool_size = int(pool_size)
1616
self.client = aiomcache.Client(
17-
self.endpoint, self.port, pool_size=self.pool_size
17+
self.host, self.port, pool_size=self.pool_size
1818
)
1919

2020
async def _get(self, key, encoding="utf-8", _conn=None):
@@ -153,4 +153,4 @@ def parse_uri_path(cls, path):
153153
return {}
154154

155155
def __repr__(self): # pragma: no cover
156-
return "MemcachedCache ({}:{})".format(self.endpoint, self.port)
156+
return "MemcachedCache ({}:{})".format(self.host, self.port)

aiocache/backends/redis.py

Lines changed: 9 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import itertools
2-
import warnings
32
from typing import Any, Callable, Optional, TYPE_CHECKING
43

54
import redis.asyncio as redis
@@ -38,41 +37,19 @@ class RedisBackend(BaseCache[str]):
3837

3938
def __init__(
4039
self,
41-
endpoint="127.0.0.1",
42-
port=6379,
43-
db=0,
44-
password=None,
45-
pool_min_size=_NOT_SET,
46-
pool_max_size=None,
47-
create_connection_timeout=None,
40+
client: redis.Redis,
4841
**kwargs,
4942
):
5043
super().__init__(**kwargs)
51-
if pool_min_size is not _NOT_SET:
52-
warnings.warn(
53-
"Parameter 'pool_min_size' is deprecated since aiocache 0.12",
54-
DeprecationWarning, stacklevel=2
55-
)
56-
57-
self.endpoint = endpoint
58-
self.port = int(port)
59-
self.db = int(db)
60-
self.password = password
61-
# TODO: Remove int() call some time after adding type annotations.
62-
self.pool_max_size = None if pool_max_size is None else int(pool_max_size)
63-
self.create_connection_timeout = (
64-
float(create_connection_timeout) if create_connection_timeout else None
65-
)
6644

6745
# NOTE: decoding can't be controlled on API level after switching to
6846
# redis, we need to disable decoding on global/connection level
6947
# (decode_responses=False), because some of the values are saved as
7048
# bytes directly, like pickle serialized values, which may raise an
7149
# exception when decoded with 'utf-8'.
72-
self.client = redis.Redis(host=self.endpoint, port=self.port, db=self.db,
73-
password=self.password, decode_responses=False,
74-
socket_connect_timeout=self.create_connection_timeout,
75-
max_connections=self.pool_max_size)
50+
if client.connection_pool.connection_kwargs['decode_responses']:
51+
raise ValueError("redis client must be constructed with decode_responses set to False")
52+
self.client = client
7653

7754
async def _get(self, key, encoding="utf-8", _conn=None):
7855
value = await self.client.get(key)
@@ -175,9 +152,6 @@ async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs):
175152
async def _redlock_release(self, key, value):
176153
return await self._raw("eval", self.RELEASE_SCRIPT, 1, key, value)
177154

178-
async def _close(self, *args, _conn=None, **kwargs):
179-
await self.client.close()
180-
181155
def build_key(self, key: str, namespace: Optional[str] = None) -> str:
182156
return self._str_build_key(key, namespace)
183157

@@ -196,24 +170,21 @@ class RedisCache(RedisBackend):
196170
the backend. Default is an empty string, "".
197171
:param timeout: int or float in seconds specifying maximum timeout for the operations to last.
198172
By default its 5.
199-
:param endpoint: str with the endpoint to connect to. Default is "127.0.0.1".
200-
:param port: int with the port to connect to. Default is 6379.
201-
:param db: int indicating database to use. Default is 0.
202-
:param password: str indicating password to use. Default is None.
203-
:param pool_max_size: int maximum pool size for the redis connections pool. Default is None.
204-
:param create_connection_timeout: int timeout for the creation of connection. Default is None
173+
:param client: redis.Redis which is an active client for working with redis
205174
"""
206175

207176
NAME = "redis"
208177

209178
def __init__(
210179
self,
180+
client: redis.Redis,
211181
serializer: Optional["BaseSerializer"] = None,
212182
namespace: str = "",
213183
key_builder: Callable[[str, str], str] = lambda k, ns: f"{ns}:{k}" if ns else k,
214184
**kwargs: Any,
215185
):
216186
super().__init__(
187+
client=client,
217188
serializer=serializer or JsonSerializer(),
218189
namespace=namespace,
219190
key_builder=key_builder,
@@ -237,4 +208,5 @@ def parse_uri_path(cls, path):
237208
return options
238209

239210
def __repr__(self): # pragma: no cover
240-
return "RedisCache ({}:{})".format(self.endpoint, self.port)
211+
connection_kwargs = self.client.connection_pool.connection_kwargs
212+
return "RedisCache ({}:{})".format(connection_kwargs['host'], connection_kwargs['port'])

aiocache/factory.py

Lines changed: 24 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,16 @@
11
import logging
22
import urllib
3+
from contextlib import suppress
34
from copy import deepcopy
45
from typing import Dict
56

67
from aiocache import AIOCACHE_CACHES
78
from aiocache.base import BaseCache
89
from aiocache.exceptions import InvalidCacheType
910

11+
with suppress(ImportError):
12+
import redis.asyncio as redis
13+
1014

1115
logger = logging.getLogger(__name__)
1216

@@ -18,6 +22,7 @@ def _class_from_string(class_path):
1822

1923

2024
def _create_cache(cache, serializer=None, plugins=None, **kwargs):
25+
kwargs = deepcopy(kwargs)
2126
if serializer is not None:
2227
cls = serializer.pop("class")
2328
cls = _class_from_string(cls) if isinstance(cls, str) else cls
@@ -29,10 +34,17 @@ def _create_cache(cache, serializer=None, plugins=None, **kwargs):
2934
cls = plugin.pop("class")
3035
cls = _class_from_string(cls) if isinstance(cls, str) else cls
3136
plugins_instances.append(cls(**plugin))
32-
3337
cache = _class_from_string(cache) if isinstance(cache, str) else cache
34-
instance = cache(serializer=serializer, plugins=plugins_instances, **kwargs)
35-
return instance
38+
if cache == AIOCACHE_CACHES.get("redis"):
39+
return cache(
40+
serializer=serializer,
41+
plugins=plugins_instances,
42+
namespace=kwargs.pop('namespace', ''),
43+
ttl=kwargs.pop('ttl', None),
44+
client=redis.Redis(**kwargs)
45+
)
46+
else:
47+
return cache(serializer=serializer, plugins=plugins_instances, **kwargs)
3648

3749

3850
class Cache:
@@ -112,15 +124,21 @@ def from_url(cls, url):
112124
kwargs.update(cache_class.parse_uri_path(parsed_url.path))
113125

114126
if parsed_url.hostname:
115-
kwargs["endpoint"] = parsed_url.hostname
127+
kwargs["host"] = parsed_url.hostname
116128

117129
if parsed_url.port:
118130
kwargs["port"] = parsed_url.port
119131

120132
if parsed_url.password:
121133
kwargs["password"] = parsed_url.password
122134

123-
return Cache(cache_class, **kwargs)
135+
for arg in ['max_connections', 'socket_connect_timeout']:
136+
if arg in kwargs:
137+
kwargs[arg] = int(kwargs[arg])
138+
if cache_class == cls.REDIS:
139+
return Cache(cache_class, client=redis.Redis(**kwargs))
140+
else:
141+
return Cache(cache_class, **kwargs)
124142

125143

126144
class CacheHandler:
@@ -214,7 +232,7 @@ def set_config(self, config):
214232
},
215233
'redis_alt': {
216234
'cache': "aiocache.RedisCache",
217-
'endpoint': "127.0.0.10",
235+
'host': "127.0.0.10",
218236
'port': 6378,
219237
'serializer': {
220238
'class': "aiocache.serializers.PickleSerializer"

examples/cached_alias_config.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
import asyncio
22

3+
import redis.asyncio as redis
4+
35
from aiocache import caches, Cache
46
from aiocache.serializers import StringSerializer, PickleSerializer
57

@@ -12,9 +14,9 @@
1214
},
1315
'redis_alt': {
1416
'cache': "aiocache.RedisCache",
15-
'endpoint': "127.0.0.1",
17+
"host": "127.0.0.1",
1618
'port': 6379,
17-
'timeout': 1,
19+
"socket_connect_timeout": 1,
1820
'serializer': {
1921
'class': "aiocache.serializers.PickleSerializer"
2022
},
@@ -45,17 +47,18 @@ async def alt_cache():
4547
assert isinstance(cache, Cache.REDIS)
4648
assert isinstance(cache.serializer, PickleSerializer)
4749
assert len(cache.plugins) == 2
48-
assert cache.endpoint == "127.0.0.1"
49-
assert cache.timeout == 1
50-
assert cache.port == 6379
50+
connection_args = cache.client.connection_pool.connection_kwargs
51+
assert connection_args["host"] == "127.0.0.1"
52+
assert connection_args["socket_connect_timeout"] == 1
53+
assert connection_args["port"] == 6379
5154
await cache.close()
5255

5356

5457
async def test_alias():
5558
await default_cache()
5659
await alt_cache()
5760

58-
cache = Cache(Cache.REDIS)
61+
cache = Cache(Cache.REDIS, client=redis.Redis())
5962
await cache.delete("key")
6063
await cache.close()
6164

examples/cached_decorator.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import asyncio
22

33
from collections import namedtuple
4+
import redis.asyncio as redis
45

56
from aiocache import cached, Cache
67
from aiocache.serializers import PickleSerializer
@@ -10,13 +11,13 @@
1011

1112
@cached(
1213
ttl=10, cache=Cache.REDIS, key_builder=lambda *args, **kw: "key",
13-
serializer=PickleSerializer(), port=6379, namespace="main")
14+
serializer=PickleSerializer(), namespace="main", client=redis.Redis())
1415
async def cached_call():
1516
return Result("content", 200)
1617

1718

1819
async def test_cached():
19-
async with Cache(Cache.REDIS, endpoint="127.0.0.1", port=6379, namespace="main") as cache:
20+
async with Cache(Cache.REDIS, namespace="main", client=redis.Redis()) as cache:
2021
await cached_call()
2122
exists = await cache.exists("key")
2223
assert exists is True

examples/multicached_decorator.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
import asyncio
22

3+
import redis.asyncio as redis
4+
35
from aiocache import multi_cached, Cache
46

57
DICT = {
@@ -9,20 +11,19 @@
911
'd': "W"
1012
}
1113

14+
cache = Cache(Cache.REDIS, namespace="main", client=redis.Redis())
15+
1216

13-
@multi_cached("ids", cache=Cache.REDIS, namespace="main")
17+
@multi_cached("ids", cache=Cache.REDIS, namespace="main", client=cache.client)
1418
async def multi_cached_ids(ids=None):
1519
return {id_: DICT[id_] for id_ in ids}
1620

1721

18-
@multi_cached("keys", cache=Cache.REDIS, namespace="main")
22+
@multi_cached("keys", cache=Cache.REDIS, namespace="main", client=cache.client)
1923
async def multi_cached_keys(keys=None):
2024
return {id_: DICT[id_] for id_ in keys}
2125

2226

23-
cache = Cache(Cache.REDIS, endpoint="127.0.0.1", port=6379, namespace="main")
24-
25-
2627
async def test_multi_cached():
2728
await multi_cached_ids(ids=("a", "b"))
2829
await multi_cached_ids(ids=("a", "c"))

examples/optimistic_lock.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,13 @@
22
import logging
33
import random
44

5+
import redis.asyncio as redis
6+
57
from aiocache import Cache
68
from aiocache.lock import OptimisticLock, OptimisticLockError
79

8-
910
logger = logging.getLogger(__name__)
10-
cache = Cache(Cache.REDIS, endpoint='127.0.0.1', port=6379, namespace='main')
11+
cache = Cache(Cache.REDIS, namespace="main", client=redis.Redis())
1112

1213

1314
async def expensive_function():

examples/python_object.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,14 @@
11
import asyncio
22

33
from collections import namedtuple
4+
import redis.asyncio as redis
5+
6+
47
from aiocache import Cache
58
from aiocache.serializers import PickleSerializer
69

7-
810
MyObject = namedtuple("MyObject", ["x", "y"])
9-
cache = Cache(Cache.REDIS, serializer=PickleSerializer(), namespace="main")
11+
cache = Cache(Cache.REDIS, serializer=PickleSerializer(), namespace="main", client=redis.Redis())
1012

1113

1214
async def complex_object():

examples/redlock.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
import asyncio
22
import logging
33

4+
import redis.asyncio as redis
5+
46
from aiocache import Cache
57
from aiocache.lock import RedLock
68

7-
89
logger = logging.getLogger(__name__)
9-
cache = Cache(Cache.REDIS, endpoint='127.0.0.1', port=6379, namespace='main')
10+
cache = Cache(Cache.REDIS, namespace="main", client=redis.Redis())
1011

1112

1213
async def expensive_function():

0 commit comments

Comments
 (0)