@JahMyst 的回答对我不起作用。Flask-Cache 不适用于 Flask restful 框架。@cache.Cached 和 @cache.memoize 无法根据他们的文档处理可变对象。
Using mutable objects (classes, etc) as part of the cache key can become tricky. It is suggested to not pass in an object instance into a memoized function. However, the memoize does perform a repr() on the passed in arguments so that if the object has a __repr__ function that returns a uniquely identifying string for that object, that will be used as part of the cache key.
不得不提出我自己的实现。留下这个代码片段以防其他人遇到同样的问题。
cache_key
函数将用户请求转换为哈希。
cache_res_pickled
函数用于腌制或取消腌制数据
|-flask-app
|-app.py
|-resource
|--some_resource.py
import json
import logging
import pickle
import time
import urllib
from flask import Response, abort, request
from redis import Redis
redis_client = Redis("127.0.0.1", "6379")
exp_setting_s = 1500
def json_serial(obj):
"""
JSON serializer for objects not serializable by default json code"
Args:
obj: JSON serialized object for dates
Returns:
serialized JSON data
"""
if isinstance(obj, datetime.datetime):
return obj.__str__()
def cache_key():
""" ""
Returns: Hashed string of request made by the user.
"""
args = request.args
key = (
request.path
+ "?"
+ urllib.parse.urlencode(
[(k, v) for k in sorted(args) for v in sorted(args.getlist(k))]
)
)
key_hashed = hashlib.sha256(key.encode())
return key_hashed.hexdigest()
def cache_res_pickled(data, encode):
"""
Args:
data (dict): Data in dict format
encode (Boolean): Encode (true) or decode (false) the data
Returns: Result after pickling
"""
if encode:
return pickle.dumps(data)
else:
data = pickle.loads(data)
return data
class SomeResource(Resource):
@auth.login_required
def get(self):
# Get the key for request in hashed format SHA256
key = cache_key()
result = redis_client.get(key)
def generate():
"""
A lagging generator to stream JSON so we don't have to hold everything in memory
This is a little tricky, as we need to omit the last comma to make valid JSON,
thus we use a lagging generator, similar to http://stackoverflow.com/questions/1630320/
"""
releases = res.__iter__()
try:
prev_release = next(releases) # get first result
# We have some releases. First, yield the opening json
yield '{"data": ['
# Iterate over the releases
for release in releases:
yield json.dumps(prev_release, default=json_serial) + ", "
prev_release = release
logging.info(f"For {key} # records returned = {len(res)}")
# Now yield the last iteration without comma but with the closing brackets
yield json.dumps(prev_release, default=json_serial) + "]}"
except StopIteration:
# StopIteration here means the length was zero, so yield a valid releases doc and stop
logging.info(f"For {key} # records returned = {len(res)}")
yield '{"data": []}'
if result is None:
# Secure a key on Redis server.
redis_client.set(key, cache_res_pickled({}, True), ex=exp_setting_s)
try:
# Do the querying to the DB or math here to get res. It should be in dict format as shown below
res = {"A": 1, "B": 2, "C": 2}
# Update the key on Redis server with the latest data
redis_client.set(key, cache_res_pickled(res, True), ex=exp_setting_s)
return Response(generate(), content_type="application/json")
except Exception as e:
logging.exception(e)
abort(505, description="Resource not found. error - {}".format(e))
else:
res = cache_res_pickled(result, False)
if res:
logging.info(
f"The data already exists! loading the data form Redis cache for Key - {key} "
)
return Response(generate(), content_type="application/json")
else:
logging.info(
f"There is already a request for this key. But there is no data in it. Key: {key}."
)
s = time.time()
counter = 0
# loops aimlessly till the data is available on the Redis
while not any(res):
result = redis_client.get(key)
res = cache_res_pickled(result, False)
counter += 1
logging.info(
f"The data was available after {time.time() - s} seconds. Had to loop {counter} times."
)
return Response(generate(), content_type="application/json")