修复丢失的api;重新规划api结构

This commit is contained in:
2026-01-22 18:15:53 +08:00
parent 0d139f96f8
commit d21966e985
14 changed files with 710 additions and 151 deletions

View File

@@ -1,109 +1,34 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
import redis
from typing import Any, List, Dict, Optional
import logging
from datetime import datetime, timedelta, timezone, time as dt_time
import msgpack
import datetime
from app.services.tjnetwork import *
from app.infra.db.influxdb import api as influxdb_api
from app.infra.cache.redis_client import redis_client # Assuming redis_client is exposed or needs initialization
# If redis_client isn't in app.infra.db, we might need to initialize it here or import from main if it was global.
# Given the instructions, we'll assume it needs to be imported or initialized.
# For now, let's copy the redis initialization logic if it's not centralized, or better,
# if main.py had it global, we should move it to a shared module.
# Checking imports suggests it might be ad-hoc in main.py.
# Let's import standard stuff first.
from fastapi import APIRouter
from pydantic import BaseModel
from py_linq import Enumerable
# To properly handle the redis client, we should probably have a dependency or a singleton.
# Since we are refactoring, let's assume there is a `app.core.cache` or similar,
# OR we will just duplicate the init for now if it's simple, or use a placeholder.
# Looking at main.py (from context), it used `redis_client` global.
# I will create a simple redis wrapper in this file or use a try-except block if it's not critical,
# but for a refactor, it's better to put it in `app/infra/db/redis_utils.py`?
# For now, let's include the imports and logic from main.py.
# Need to handle date encoding for msgpack
def decode_datetime(obj):
if "__datetime__" in obj:
obj = datetime.datetime.strptime(obj["as_str"], "%Y%m%dT%H:%M:%S.%f")
return obj
def encode_datetime(obj):
if isinstance(obj, datetime.datetime):
return {"__datetime__": True, "as_str": obj.strftime("%Y%m%dT%H:%M:%S.%f")}
return obj
# Mocking or getting redis client. ideally this should be in app.core.config or similar
# For this file, I will initialize it if it's not imported.
try:
# Attempt to connect to default redis for cache
redis_client = redis.Redis(host='localhost', port=6379, db=0)
except Exception:
redis_client = None
import app.infra.db.influxdb.api as influxdb_api
import app.services.time_api as time_api
from app.infra.cache.redis_client import redis_client, encode_datetime, decode_datetime
router = APIRouter()
logger = logging.getLogger(__name__)
############################################################
# pipe_risk_probability 41
############################################################
@router.get("/getpiperiskprobabilitynow/")
async def fastapi_get_pipe_risk_probability_now(
network: str, pipe_id: str
) -> dict[str, Any]:
return get_pipe_risk_probability_now(network, pipe_id)
@router.get("/getpiperiskprobability/")
async def fastapi_get_pipe_risk_probability(
network: str, pipe_id: str
) -> dict[str, Any]:
return get_pipe_risk_probability(network, pipe_id)
@router.get("/getpipesriskprobability/")
async def fastapi_get_pipes_risk_probability(
network: str, pipe_ids: str
) -> list[dict[str, Any]]:
pipeids = pipe_ids.split(",")
return get_pipes_risk_probability(network, pipeids)
@router.get("/getnetworkpiperiskprobabilitynow/")
async def fastapi_get_network_pipe_risk_probability_now(
network: str,
) -> list[dict[str, Any]]:
return get_network_pipe_risk_probability_now(network)
@router.get("/getpiperiskprobabilitygeometries/")
async def fastapi_get_pipe_risk_probability_geometries(network: str) -> dict[str, Any]:
return get_pipe_risk_probability_geometries(network)
############################################################
# InfluxDB / Real-time Data
############################################################
@router.get("/getrealtimedata/")
async def fastapi_get_realtimedata():
# Placeholder random data from original main.py
import random
data = [random.randint(0, 100) for _ in range(100)]
return data
@router.get("/getsimulationresult/")
async def fastapi_get_simulationresult():
import random
data = [random.randint(0, 100) for _ in range(100)]
return data
# Basic Node/Link Latest Record Queries
@router.get("/querynodelatestrecordbyid/")
async def fastapi_query_node_latest_record_by_id(id: str):
async def fastapi_query_node_latest_record_by_id(id: str) -> Any:
return influxdb_api.query_latest_record_by_ID(id, type="node")
@router.get("/querylinklatestrecordbyid/")
async def fastapi_query_link_latest_record_by_id(id: str):
async def fastapi_query_link_latest_record_by_id(id: str) -> Any:
return influxdb_api.query_latest_record_by_ID(id, type="link")
@router.get("/queryscadalatestrecordbyid/")
async def fastapi_query_scada_latest_record_by_id(id: str):
async def fastapi_query_scada_latest_record_by_id(id: str) -> Any:
return influxdb_api.query_latest_record_by_ID(id, type="scada")
# Time-based Queries
@router.get("/queryallrecordsbytime/")
async def fastapi_query_all_records_by_time(querytime: str) -> dict[str, list]:
results: tuple = influxdb_api.query_all_records_by_time(query_time=querytime)
@@ -126,6 +51,9 @@ async def fastapi_query_all_scheme_record_by_time_property(
schemename: str,
bucket: str = "scheme_simulation_result",
) -> dict[str, list]:
"""
查询指定方案某一时刻的所有记录,查询 'node''link' 的某一属性值
"""
results: list = influxdb_api.query_all_scheme_record_by_time_property(
query_time=querytime,
type=type,
@@ -157,44 +85,40 @@ async def fastapi_query_scheme_simulation_record_by_ids_time(
)
return {"results": results}
# Date-based Queries with Caching
@router.get("/queryallrecordsbydate/")
async def fastapi_query_all_records_by_date(querydate: str) -> dict:
# Logic copied from main.py regarding redis caching
# NOTE: time_api needs to be imported from services or where it resides.
# Based on main.py analysis, it seems `app.services.tjnetwork` imports `time_api`?
# Or it was imported in main.py. Let's assume we can get it or use standard datetime.
# If `time_api.is_today_or_future` is custom, we need to find it.
# For now, let's look for it or try to import it.
# Importing time_api assuming it's available as seen in main.py context
from app.services.tjnetwork import time_api
# (If this fails we might need to adjust import path)
is_today_or_future = time_api.is_today_or_future(querydate)
logger.info(f"isToday or future: {is_today_or_future}")
cache_key = f"queryallrecordsbydate_{querydate}"
if not is_today_or_future and redis_client:
cache_key = f"queryallrecordsbydate_{querydate}"
if not is_today_or_future:
data = redis_client.get(cache_key)
if data:
results = msgpack.unpackb(data, object_hook=decode_datetime)
logger.info("return from cache redis")
return results
logger.info("query from influxdb")
nodes_links: tuple = influxdb_api.query_all_records_by_date(query_date=querydate)
results = {"nodes": nodes_links[0], "links": nodes_links[1]}
if not is_today_or_future and redis_client:
if not is_today_or_future:
logger.info("save to cache redis")
redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime))
logger.info("return results")
return results
@router.get("/queryallrecordsbytimerange/")
async def fastapi_query_all_records_by_time_range(
starttime: str, endtime: str
) -> dict[str, list]:
from app.services.tjnetwork import time_api
if not time_api.is_today_or_future(starttime) and redis_client:
cache_key = f"queryallrecordsbytimerange_{starttime}_{endtime}"
cache_key = f"queryallrecordsbytimerange_{starttime}_{endtime}"
if not time_api.is_today_or_future(starttime):
data = redis_client.get(cache_key)
if data:
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
@@ -204,8 +128,261 @@ async def fastapi_query_all_records_by_time_range(
starttime=starttime, endtime=endtime
)
results = {"nodes": nodes_links[0], "links": nodes_links[1]}
# Original code didn't seem to set cache for this? or I missed it.
# It's safer to not cache if not sure about logic, but pattern suggests it should.
if not time_api.is_today_or_future(starttime):
redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime))
return results
@router.get("/queryallrecordsbydatewithtype/")
async def fastapi_query_all_records_by_date_with_type(
querydate: str, querytype: str
) -> list:
cache_key = f"queryallrecordsbydatewithtype_{querydate}_{querytype}"
data = redis_client.get(cache_key)
if data:
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
return loaded_dict
results = influxdb_api.query_all_records_by_date_with_type(
query_date=querydate, query_type=querytype
)
packed = msgpack.packb(results, default=encode_datetime)
redis_client.set(cache_key, packed)
return results
@router.get("/queryallrecordsbyidsdatetype/")
async def fastapi_query_all_records_by_ids_date_type(
ids: str, querydate: str, querytype: str
) -> list:
cache_key = f"queryallrecordsbydatewithtype_{querydate}_{querytype}"
data = redis_client.get(cache_key)
results = []
if data:
results = msgpack.unpackb(data, object_hook=decode_datetime)
else:
results = influxdb_api.query_all_records_by_date_with_type(
query_date=querydate, query_type=querytype
)
packed = msgpack.packb(results, default=encode_datetime)
redis_client.set(cache_key, packed)
query_ids = ids.split(",")
# Using Enumerable from py_linq as in original code
e_results = Enumerable(results)
lst_results = e_results.where(lambda x: x["ID"] in query_ids).to_list()
return lst_results
@router.get("/queryallrecordsbydateproperty/")
async def fastapi_query_all_records_by_date_property(
querydate: str, querytype: str, property: str
) -> list[dict]:
cache_key = f"queryallrecordsbydateproperty_{querydate}_{querytype}_{property}"
data = redis_client.get(cache_key)
if data:
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
return loaded_dict
result_dict = influxdb_api.query_all_record_by_date_property(
query_date=querydate, type=querytype, property=property
)
packed = msgpack.packb(result_dict, default=encode_datetime)
redis_client.set(cache_key, packed)
return result_dict
# Curve Queries
@router.get("/querynodecurvebyidpropertydaterange/")
async def fastapi_query_node_curve_by_id_property_daterange(
id: str, prop: str, startdate: str, enddate: str
):
return influxdb_api.query_curve_by_ID_property_daterange(
id, type="node", property=prop, start_date=startdate, end_date=enddate
)
@router.get("/querylinkcurvebyidpropertydaterange/")
async def fastapi_query_link_curve_by_id_property_daterange(
id: str, prop: str, startdate: str, enddate: str
):
return influxdb_api.query_curve_by_ID_property_daterange(
id, type="link", property=prop, start_date=startdate, end_date=enddate
)
# SCADA Data Queries
@router.get("/queryscadadatabydeviceidandtime/")
async def fastapi_query_scada_data_by_device_id_and_time(ids: str, querytime: str):
query_ids = ids.split(",")
logger.info(querytime)
return influxdb_api.query_SCADA_data_by_device_ID_and_time(
query_ids_list=query_ids, query_time=querytime
)
@router.get("/queryscadadatabydeviceidandtimerange/")
async def fastapi_query_scada_data_by_device_id_and_time_range(
ids: str, starttime: str, endtime: str
):
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
query_ids = ids.split(",")
return influxdb_api.query_SCADA_data_by_device_ID_and_timerange(
query_ids_list=query_ids, start_time=starttime, end_time=endtime
)
@router.get("/queryfillingscadadatabydeviceidandtimerange/")
async def fastapi_query_filling_scada_data_by_device_id_and_time_range(
ids: str, starttime: str, endtime: str
):
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
query_ids = ids.split(",")
return influxdb_api.query_filling_SCADA_data_by_device_ID_and_timerange(
query_ids_list=query_ids, start_time=starttime, end_time=endtime
)
@router.get("/querycleaningscadadatabydeviceidandtimerange/")
async def fastapi_query_cleaning_scada_data_by_device_id_and_time_range(
ids: str, starttime: str, endtime: str
):
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
query_ids = ids.split(",")
return influxdb_api.query_cleaning_SCADA_data_by_device_ID_and_timerange(
query_ids_list=query_ids, start_time=starttime, end_time=endtime
)
@router.get("/querysimulationscadadatabydeviceidandtimerange/")
async def fastapi_query_simulation_scada_data_by_device_id_and_time_range(
ids: str, starttime: str, endtime: str
):
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
query_ids = ids.split(",")
return influxdb_api.query_simulation_SCADA_data_by_device_ID_and_timerange(
query_ids_list=query_ids, start_time=starttime, end_time=endtime
)
@router.get("/querycleanedscadadatabydeviceidandtimerange/")
async def fastapi_query_cleaned_scada_data_by_device_id_and_time_range(
ids: str, starttime: str, endtime: str
):
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
query_ids = ids.split(",")
return influxdb_api.query_cleaned_SCADA_data_by_device_ID_and_timerange(
query_ids_list=query_ids, start_time=starttime, end_time=endtime
)
@router.get("/queryscadadatabydeviceidanddate/")
async def fastapi_query_scada_data_by_device_id_and_date(ids: str, querydate: str):
query_ids = ids.split(",")
return influxdb_api.query_SCADA_data_by_device_ID_and_date(
query_ids_list=query_ids, query_date=querydate
)
@router.get("/queryallscadarecordsbydate/")
async def fastapi_query_all_scada_records_by_date(querydate: str):
is_today_or_future = time_api.is_today_or_future(querydate)
logger.info(f"isToday or future: {is_today_or_future}")
cache_key = f"queryallscadarecordsbydate_{querydate}"
if not is_today_or_future:
data = redis_client.get(cache_key)
if data:
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
logger.info("return from cache redis")
return loaded_dict
logger.info("query from influxdb")
result_dict = influxdb_api.query_all_SCADA_records_by_date(query_date=querydate)
if not is_today_or_future:
logger.info("save to cache redis")
packed = msgpack.packb(result_dict, default=encode_datetime)
redis_client.set(cache_key, packed)
logger.info("return results")
return result_dict
@router.get("/queryallschemeallrecords/")
async def fastapi_query_all_scheme_all_records(
schemetype: str, schemename: str, querydate: str
) -> tuple:
cache_key = f"queryallschemeallrecords_{schemetype}_{schemename}_{querydate}"
data = redis_client.get(cache_key)
if data:
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
return loaded_dict
results = influxdb_api.query_scheme_all_record(
scheme_Type=schemetype, scheme_Name=schemename, query_date=querydate
)
packed = msgpack.packb(results, default=encode_datetime)
redis_client.set(cache_key, packed)
return results
@router.get("/queryschemeallrecordsproperty/")
async def fastapi_query_all_scheme_all_records_property(
schemetype: str, schemename: str, querydate: str, querytype: str, queryproperty: str
) -> Optional[List]:
cache_key = f"queryallschemeallrecords_{schemetype}_{schemename}_{querydate}"
data = redis_client.get(cache_key)
all_results = None
if data:
all_results = msgpack.unpackb(data, object_hook=decode_datetime)
else:
all_results = influxdb_api.query_scheme_all_record(
scheme_Type=schemetype, scheme_Name=schemename, query_date=querydate
)
packed = msgpack.packb(all_results, default=encode_datetime)
redis_client.set(cache_key, packed)
results = None
if querytype == "node":
results = all_results[0]
elif querytype == "link":
results = all_results[1]
return results
@router.get("/queryinfluxdbbuckets/")
async def fastapi_query_influxdb_buckets():
return influxdb_api.query_buckets()
@router.get("/queryinfluxdbbucketmeasurements/")
async def fastapi_query_influxdb_bucket_measurements(bucket: str):
return influxdb_api.query_measurements(bucket=bucket)
############################################################
# download history data
############################################################
class Download_History_Data_Manually(BaseModel):
"""
download_date样式如 datetime(2025, 5, 4)
"""
download_date: datetime
@router.post("/download_history_data_manually/")
async def fastapi_download_history_data_manually(
data: Download_History_Data_Manually,
) -> None:
item = data.dict()
tz = timezone(timedelta(hours=8))
begin_dt = datetime.combine(item.get("download_date").date(), dt_time.min).replace(
tzinfo=tz
)
end_dt = datetime.combine(item.get("download_date").date(), dt_time(23, 59, 59)).replace(
tzinfo=tz
)
begin_time = begin_dt.isoformat()
end_time = end_dt.isoformat()
influxdb_api.download_history_data_manually(
begin_time=begin_time, end_time=end_time
)