Cache data in fastapi side

This commit is contained in:
DingZQ
2025-03-09 00:23:27 +08:00
parent b905c0316d
commit 05ea9e7045
2 changed files with 20 additions and 20 deletions

22
main.py
View File

@@ -48,6 +48,8 @@ tmpDir = "C:/tmpfiles/"
lockedPrjs = {}
# 缓存 influxdb 查询结果提高性能
influxdb_cache = {}
if not os.path.exists(inpDir):
os.mkdir(inpDir)
@@ -2149,8 +2151,16 @@ async def fastapi_query_all_records_by_date(querydate: str) -> dict[str, list]:
# 返回 [{'time': '2024-01-01T00:00:00Z', 'ID': '1', 'value': 1.0}, {'time': '2024-01-01T00:00:00Z', 'ID': '2', 'value': 2.0}]
@app.get("/queryallrecordsbydateproperty/")
async def fastapi_query_all_records_by_date_property(querydate: str, querytype: str, property: str) -> list[dict]:
return influxdb_api.query_all_record_by_date_property(query_date=querydate, type=querytype, property=property, client=influx_client)
# 缓存查询结果提高性能
global influxdb_cache
cache_key = f"{querydate}_{querytype}_{property}"
if influxdb_cache.get(cache_key) is not None:
return influxdb_cache.get(cache_key)
result = influxdb_api.query_all_record_by_date_property(query_date=querydate, type=querytype, property=property, client=influx_client)
influxdb_cache[cache_key] = result
return result
# def query_curve_by_ID_property_daterange(ID: str, type: str, property: str, start_date: str, end_date: str, bucket: str="realtime_data", client: InfluxDBClient=client) -> list:
@@ -2185,8 +2195,16 @@ async def fastapi_query_scada_data_by_device_id_and_date(ids: str, querydate: st
# 返回所有SCADA设备在指定日期的所有记录
@app.get("/queryallscadarecordsbydate/")
async def fastapi_query_all_scada_records_by_date(querydate: str):
return influxdb_api.query_all_SCADA_records_by_date(query_date=querydate, client=influx_client)
# 缓存查询结果提高性能
global influxdb_cache
cache_key = f"{querydate}"
if influxdb_cache.get(cache_key) is not None:
return influxdb_cache.get(cache_key)
result = influxdb_api.query_all_SCADA_records_by_date(query_date=querydate, client=influx_client)
influxdb_cache[cache_key] = result
return result
@app.get("/queryinfluxdbbuckets/")