Add logging infor to query_all_records_by_date
This commit is contained in:
8
main.py
8
main.py
@@ -2326,19 +2326,27 @@ async def fastapi_query_all_records_by_date(querydate: str) -> dict[str, list]:
|
|||||||
# 今天的不要去缓存
|
# 今天的不要去缓存
|
||||||
if not time_api.is_today_or_future(querydate):
|
if not time_api.is_today_or_future(querydate):
|
||||||
cache_key = f"queryallrecordsbydate_{querydate}"
|
cache_key = f"queryallrecordsbydate_{querydate}"
|
||||||
|
logger.info(f"cache_key: {cache_key}")
|
||||||
|
|
||||||
data = redis_client.get(cache_key)
|
data = redis_client.get(cache_key)
|
||||||
if data:
|
if data:
|
||||||
|
logger.info(f"return from cache redis")
|
||||||
# 使用自定义的反序列化函数
|
# 使用自定义的反序列化函数
|
||||||
return msgpack.unpackb(data, object_hook=decode_datetime)
|
return msgpack.unpackb(data, object_hook=decode_datetime)
|
||||||
|
|
||||||
|
logger.info(f"query from influxdb")
|
||||||
|
|
||||||
nodes_links: tuple = influxdb_api.query_all_records_by_date(query_date=querydate)
|
nodes_links: tuple = influxdb_api.query_all_records_by_date(query_date=querydate)
|
||||||
results = { "nodes": nodes_links[0],
|
results = { "nodes": nodes_links[0],
|
||||||
"links": nodes_links[1] }
|
"links": nodes_links[1] }
|
||||||
|
|
||||||
# 今天的不要去缓存
|
# 今天的不要去缓存
|
||||||
if not time_api.is_today_or_future(querydate):
|
if not time_api.is_today_or_future(querydate):
|
||||||
|
logger.info(f"save to cache redis")
|
||||||
redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime))
|
redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime))
|
||||||
|
|
||||||
|
logger.info(f"return results")
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@app.get("/queryallrecordsbytimerange/")
|
@app.get("/queryallrecordsbytimerange/")
|
||||||
|
|||||||
Reference in New Issue
Block a user