Decorators¶
pycached comes with a couple of decorators for caching results from functions. Do not use the decorator in asynchronous functions, it may lead to unexpected behavior.
cached¶
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
from collections import namedtuple
from pycached import cached, RedisCache
from pycached.serializers import PickleSerializer
Result = namedtuple('Result', "content, status")
@cached(
ttl=10, cache=RedisCache, key="key", serializer=PickleSerializer(), port=6379, namespace="main")
def cached_call():
return Result("content", 200)
def test_cached():
cache = RedisCache(endpoint="127.0.0.1", port=6379, namespace="main")
cached_call()
assert cache.exists("key") is True
cache.delete("key")
cache.close()
if __name__ == "__main__":
test_cached()
|
multi_cached¶
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 | from pycached import multi_cached, RedisCache
DICT = {
'a': "Z",
'b': "Y",
'c': "X",
'd': "W"
}
@multi_cached("ids", cache=RedisCache, namespace="main")
def multi_cached_ids(ids=None):
return {id_: DICT[id_] for id_ in ids}
@multi_cached("keys", cache=RedisCache, namespace="main")
def multi_cached_keys(keys=None):
return {id_: DICT[id_] for id_ in keys}
cache = RedisCache(endpoint="127.0.0.1", port=6379, namespace="main")
def test_multi_cached():
multi_cached_ids(ids=['a', 'b'])
multi_cached_ids(ids=['a', 'c'])
multi_cached_keys(keys=['d'])
assert cache.exists('a')
assert cache.exists('b')
assert cache.exists('c')
assert cache.exists('d')
cache.delete("a")
cache.delete("b")
cache.delete("c")
cache.delete("d")
cache.close()
if __name__ == "__main__":
test_multi_cached()
|