修复丢失的api;重新规划api结构
This commit is contained in:
37
app/api/v1/endpoints/cache.py
Normal file
37
app/api/v1/endpoints/cache.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from fastapi import APIRouter
|
||||
from app.infra.cache.redis_client import redis_client
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.post("/clearrediskey/")
|
||||
async def fastapi_clear_redis_key(key: str):
|
||||
redis_client.delete(key)
|
||||
return True
|
||||
|
||||
|
||||
@router.post("/clearrediskeys/")
|
||||
async def fastapi_clear_redis_keys(keys: str):
|
||||
# delete keys contains the key
|
||||
matched_keys = redis_client.keys(f"*{keys}*")
|
||||
if matched_keys:
|
||||
redis_client.delete(*matched_keys)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@router.post("/clearallredis/")
|
||||
async def fastapi_clear_all_redis():
|
||||
redis_client.flushdb()
|
||||
return True
|
||||
|
||||
|
||||
@router.get("/queryredis/")
|
||||
async def fastapi_query_redis():
|
||||
# Helper to decode bytes to str for JSON response if needed,
|
||||
# but original just returned keys (which might be bytes in redis-py unless decode_responses=True)
|
||||
# create_redis_client usually sets decode_responses=False by default.
|
||||
# We will assume user handles bytes or we should decode.
|
||||
# Original just returned redis_client.keys("*")
|
||||
keys = redis_client.keys("*")
|
||||
# Clean output for API
|
||||
return [k.decode('utf-8') if isinstance(k, bytes) else k for k in keys]
|
||||
@@ -17,6 +17,22 @@ async def fastapi_set_quality_properties(network: str, req: Request) -> ChangeSe
|
||||
props = await req.json()
|
||||
return set_quality(network, ChangeSet(props))
|
||||
|
||||
@router.get("/getemitterschema")
|
||||
async def fastapi_get_emitter_schema(network: str) -> dict[str, dict[str, Any]]:
|
||||
return get_emitter_schema(network)
|
||||
|
||||
@router.get("/getemitterproperties/")
|
||||
async def fastapi_get_emitter_properties(network: str, junction: str) -> dict[str, Any]:
|
||||
return get_emitter(network, junction)
|
||||
|
||||
@router.post("/setemitterproperties/", response_model=None)
|
||||
async def fastapi_set_emitter_properties(
|
||||
network: str, junction: str, req: Request
|
||||
) -> ChangeSet:
|
||||
props = await req.json()
|
||||
ps = {"junction": junction} | props
|
||||
return set_emitter(network, ChangeSet(ps))
|
||||
|
||||
@router.get("/getsourcechema/")
|
||||
async def fastapi_get_source_schema(network: str) -> dict[str, dict[str, Any]]:
|
||||
return get_source_schema(network)
|
||||
|
||||
@@ -1,109 +1,34 @@
|
||||
from fastapi import APIRouter, Request
|
||||
from typing import Any, List, Dict, Union
|
||||
import redis
|
||||
from typing import Any, List, Dict, Optional
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone, time as dt_time
|
||||
import msgpack
|
||||
import datetime
|
||||
from app.services.tjnetwork import *
|
||||
from app.infra.db.influxdb import api as influxdb_api
|
||||
from app.infra.cache.redis_client import redis_client # Assuming redis_client is exposed or needs initialization
|
||||
# If redis_client isn't in app.infra.db, we might need to initialize it here or import from main if it was global.
|
||||
# Given the instructions, we'll assume it needs to be imported or initialized.
|
||||
# For now, let's copy the redis initialization logic if it's not centralized, or better,
|
||||
# if main.py had it global, we should move it to a shared module.
|
||||
# Checking imports suggests it might be ad-hoc in main.py.
|
||||
# Let's import standard stuff first.
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
from py_linq import Enumerable
|
||||
|
||||
# To properly handle the redis client, we should probably have a dependency or a singleton.
|
||||
# Since we are refactoring, let's assume there is a `app.core.cache` or similar,
|
||||
# OR we will just duplicate the init for now if it's simple, or use a placeholder.
|
||||
# Looking at main.py (from context), it used `redis_client` global.
|
||||
# I will create a simple redis wrapper in this file or use a try-except block if it's not critical,
|
||||
# but for a refactor, it's better to put it in `app/infra/db/redis_utils.py`?
|
||||
# For now, let's include the imports and logic from main.py.
|
||||
|
||||
# Need to handle date encoding for msgpack
|
||||
def decode_datetime(obj):
|
||||
if "__datetime__" in obj:
|
||||
obj = datetime.datetime.strptime(obj["as_str"], "%Y%m%dT%H:%M:%S.%f")
|
||||
return obj
|
||||
|
||||
def encode_datetime(obj):
|
||||
if isinstance(obj, datetime.datetime):
|
||||
return {"__datetime__": True, "as_str": obj.strftime("%Y%m%dT%H:%M:%S.%f")}
|
||||
return obj
|
||||
|
||||
# Mocking or getting redis client. ideally this should be in app.core.config or similar
|
||||
# For this file, I will initialize it if it's not imported.
|
||||
try:
|
||||
# Attempt to connect to default redis for cache
|
||||
redis_client = redis.Redis(host='localhost', port=6379, db=0)
|
||||
except Exception:
|
||||
redis_client = None
|
||||
import app.infra.db.influxdb.api as influxdb_api
|
||||
import app.services.time_api as time_api
|
||||
from app.infra.cache.redis_client import redis_client, encode_datetime, decode_datetime
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
############################################################
|
||||
# pipe_risk_probability 41
|
||||
############################################################
|
||||
|
||||
@router.get("/getpiperiskprobabilitynow/")
|
||||
async def fastapi_get_pipe_risk_probability_now(
|
||||
network: str, pipe_id: str
|
||||
) -> dict[str, Any]:
|
||||
return get_pipe_risk_probability_now(network, pipe_id)
|
||||
|
||||
@router.get("/getpiperiskprobability/")
|
||||
async def fastapi_get_pipe_risk_probability(
|
||||
network: str, pipe_id: str
|
||||
) -> dict[str, Any]:
|
||||
return get_pipe_risk_probability(network, pipe_id)
|
||||
|
||||
@router.get("/getpipesriskprobability/")
|
||||
async def fastapi_get_pipes_risk_probability(
|
||||
network: str, pipe_ids: str
|
||||
) -> list[dict[str, Any]]:
|
||||
pipeids = pipe_ids.split(",")
|
||||
return get_pipes_risk_probability(network, pipeids)
|
||||
|
||||
@router.get("/getnetworkpiperiskprobabilitynow/")
|
||||
async def fastapi_get_network_pipe_risk_probability_now(
|
||||
network: str,
|
||||
) -> list[dict[str, Any]]:
|
||||
return get_network_pipe_risk_probability_now(network)
|
||||
|
||||
@router.get("/getpiperiskprobabilitygeometries/")
|
||||
async def fastapi_get_pipe_risk_probability_geometries(network: str) -> dict[str, Any]:
|
||||
return get_pipe_risk_probability_geometries(network)
|
||||
|
||||
############################################################
|
||||
# InfluxDB / Real-time Data
|
||||
############################################################
|
||||
|
||||
@router.get("/getrealtimedata/")
|
||||
async def fastapi_get_realtimedata():
|
||||
# Placeholder random data from original main.py
|
||||
import random
|
||||
data = [random.randint(0, 100) for _ in range(100)]
|
||||
return data
|
||||
|
||||
@router.get("/getsimulationresult/")
|
||||
async def fastapi_get_simulationresult():
|
||||
import random
|
||||
data = [random.randint(0, 100) for _ in range(100)]
|
||||
return data
|
||||
# Basic Node/Link Latest Record Queries
|
||||
|
||||
@router.get("/querynodelatestrecordbyid/")
|
||||
async def fastapi_query_node_latest_record_by_id(id: str):
|
||||
async def fastapi_query_node_latest_record_by_id(id: str) -> Any:
|
||||
return influxdb_api.query_latest_record_by_ID(id, type="node")
|
||||
|
||||
@router.get("/querylinklatestrecordbyid/")
|
||||
async def fastapi_query_link_latest_record_by_id(id: str):
|
||||
async def fastapi_query_link_latest_record_by_id(id: str) -> Any:
|
||||
return influxdb_api.query_latest_record_by_ID(id, type="link")
|
||||
|
||||
@router.get("/queryscadalatestrecordbyid/")
|
||||
async def fastapi_query_scada_latest_record_by_id(id: str):
|
||||
async def fastapi_query_scada_latest_record_by_id(id: str) -> Any:
|
||||
return influxdb_api.query_latest_record_by_ID(id, type="scada")
|
||||
|
||||
# Time-based Queries
|
||||
|
||||
@router.get("/queryallrecordsbytime/")
|
||||
async def fastapi_query_all_records_by_time(querytime: str) -> dict[str, list]:
|
||||
results: tuple = influxdb_api.query_all_records_by_time(query_time=querytime)
|
||||
@@ -126,6 +51,9 @@ async def fastapi_query_all_scheme_record_by_time_property(
|
||||
schemename: str,
|
||||
bucket: str = "scheme_simulation_result",
|
||||
) -> dict[str, list]:
|
||||
"""
|
||||
查询指定方案某一时刻的所有记录,查询 'node' 或 'link' 的某一属性值
|
||||
"""
|
||||
results: list = influxdb_api.query_all_scheme_record_by_time_property(
|
||||
query_time=querytime,
|
||||
type=type,
|
||||
@@ -157,44 +85,40 @@ async def fastapi_query_scheme_simulation_record_by_ids_time(
|
||||
)
|
||||
return {"results": results}
|
||||
|
||||
# Date-based Queries with Caching
|
||||
|
||||
@router.get("/queryallrecordsbydate/")
|
||||
async def fastapi_query_all_records_by_date(querydate: str) -> dict:
|
||||
# Logic copied from main.py regarding redis caching
|
||||
# NOTE: time_api needs to be imported from services or where it resides.
|
||||
# Based on main.py analysis, it seems `app.services.tjnetwork` imports `time_api`?
|
||||
# Or it was imported in main.py. Let's assume we can get it or use standard datetime.
|
||||
# If `time_api.is_today_or_future` is custom, we need to find it.
|
||||
# For now, let's look for it or try to import it.
|
||||
|
||||
# Importing time_api assuming it's available as seen in main.py context
|
||||
from app.services.tjnetwork import time_api
|
||||
# (If this fails we might need to adjust import path)
|
||||
|
||||
is_today_or_future = time_api.is_today_or_future(querydate)
|
||||
logger.info(f"isToday or future: {is_today_or_future}")
|
||||
|
||||
cache_key = f"queryallrecordsbydate_{querydate}"
|
||||
|
||||
if not is_today_or_future and redis_client:
|
||||
cache_key = f"queryallrecordsbydate_{querydate}"
|
||||
if not is_today_or_future:
|
||||
data = redis_client.get(cache_key)
|
||||
if data:
|
||||
results = msgpack.unpackb(data, object_hook=decode_datetime)
|
||||
logger.info("return from cache redis")
|
||||
return results
|
||||
|
||||
logger.info("query from influxdb")
|
||||
nodes_links: tuple = influxdb_api.query_all_records_by_date(query_date=querydate)
|
||||
results = {"nodes": nodes_links[0], "links": nodes_links[1]}
|
||||
|
||||
if not is_today_or_future and redis_client:
|
||||
if not is_today_or_future:
|
||||
logger.info("save to cache redis")
|
||||
redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime))
|
||||
|
||||
logger.info("return results")
|
||||
return results
|
||||
|
||||
@router.get("/queryallrecordsbytimerange/")
|
||||
async def fastapi_query_all_records_by_time_range(
|
||||
starttime: str, endtime: str
|
||||
) -> dict[str, list]:
|
||||
from app.services.tjnetwork import time_api
|
||||
|
||||
if not time_api.is_today_or_future(starttime) and redis_client:
|
||||
cache_key = f"queryallrecordsbytimerange_{starttime}_{endtime}"
|
||||
cache_key = f"queryallrecordsbytimerange_{starttime}_{endtime}"
|
||||
|
||||
if not time_api.is_today_or_future(starttime):
|
||||
data = redis_client.get(cache_key)
|
||||
if data:
|
||||
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
|
||||
@@ -204,8 +128,261 @@ async def fastapi_query_all_records_by_time_range(
|
||||
starttime=starttime, endtime=endtime
|
||||
)
|
||||
results = {"nodes": nodes_links[0], "links": nodes_links[1]}
|
||||
|
||||
# Original code didn't seem to set cache for this? or I missed it.
|
||||
# It's safer to not cache if not sure about logic, but pattern suggests it should.
|
||||
|
||||
|
||||
if not time_api.is_today_or_future(starttime):
|
||||
redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime))
|
||||
|
||||
return results
|
||||
|
||||
@router.get("/queryallrecordsbydatewithtype/")
|
||||
async def fastapi_query_all_records_by_date_with_type(
|
||||
querydate: str, querytype: str
|
||||
) -> list:
|
||||
cache_key = f"queryallrecordsbydatewithtype_{querydate}_{querytype}"
|
||||
data = redis_client.get(cache_key)
|
||||
if data:
|
||||
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
|
||||
return loaded_dict
|
||||
|
||||
results = influxdb_api.query_all_records_by_date_with_type(
|
||||
query_date=querydate, query_type=querytype
|
||||
)
|
||||
|
||||
packed = msgpack.packb(results, default=encode_datetime)
|
||||
redis_client.set(cache_key, packed)
|
||||
|
||||
return results
|
||||
|
||||
@router.get("/queryallrecordsbyidsdatetype/")
|
||||
async def fastapi_query_all_records_by_ids_date_type(
|
||||
ids: str, querydate: str, querytype: str
|
||||
) -> list:
|
||||
cache_key = f"queryallrecordsbydatewithtype_{querydate}_{querytype}"
|
||||
data = redis_client.get(cache_key)
|
||||
|
||||
results = []
|
||||
if data:
|
||||
results = msgpack.unpackb(data, object_hook=decode_datetime)
|
||||
else:
|
||||
results = influxdb_api.query_all_records_by_date_with_type(
|
||||
query_date=querydate, query_type=querytype
|
||||
)
|
||||
packed = msgpack.packb(results, default=encode_datetime)
|
||||
redis_client.set(cache_key, packed)
|
||||
|
||||
query_ids = ids.split(",")
|
||||
# Using Enumerable from py_linq as in original code
|
||||
e_results = Enumerable(results)
|
||||
lst_results = e_results.where(lambda x: x["ID"] in query_ids).to_list()
|
||||
|
||||
return lst_results
|
||||
|
||||
@router.get("/queryallrecordsbydateproperty/")
|
||||
async def fastapi_query_all_records_by_date_property(
|
||||
querydate: str, querytype: str, property: str
|
||||
) -> list[dict]:
|
||||
cache_key = f"queryallrecordsbydateproperty_{querydate}_{querytype}_{property}"
|
||||
data = redis_client.get(cache_key)
|
||||
if data:
|
||||
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
|
||||
return loaded_dict
|
||||
|
||||
result_dict = influxdb_api.query_all_record_by_date_property(
|
||||
query_date=querydate, type=querytype, property=property
|
||||
)
|
||||
packed = msgpack.packb(result_dict, default=encode_datetime)
|
||||
redis_client.set(cache_key, packed)
|
||||
|
||||
return result_dict
|
||||
|
||||
# Curve Queries
|
||||
|
||||
@router.get("/querynodecurvebyidpropertydaterange/")
|
||||
async def fastapi_query_node_curve_by_id_property_daterange(
|
||||
id: str, prop: str, startdate: str, enddate: str
|
||||
):
|
||||
return influxdb_api.query_curve_by_ID_property_daterange(
|
||||
id, type="node", property=prop, start_date=startdate, end_date=enddate
|
||||
)
|
||||
|
||||
@router.get("/querylinkcurvebyidpropertydaterange/")
|
||||
async def fastapi_query_link_curve_by_id_property_daterange(
|
||||
id: str, prop: str, startdate: str, enddate: str
|
||||
):
|
||||
return influxdb_api.query_curve_by_ID_property_daterange(
|
||||
id, type="link", property=prop, start_date=startdate, end_date=enddate
|
||||
)
|
||||
|
||||
# SCADA Data Queries
|
||||
|
||||
@router.get("/queryscadadatabydeviceidandtime/")
|
||||
async def fastapi_query_scada_data_by_device_id_and_time(ids: str, querytime: str):
|
||||
query_ids = ids.split(",")
|
||||
logger.info(querytime)
|
||||
return influxdb_api.query_SCADA_data_by_device_ID_and_time(
|
||||
query_ids_list=query_ids, query_time=querytime
|
||||
)
|
||||
|
||||
@router.get("/queryscadadatabydeviceidandtimerange/")
|
||||
async def fastapi_query_scada_data_by_device_id_and_time_range(
|
||||
ids: str, starttime: str, endtime: str
|
||||
):
|
||||
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
|
||||
query_ids = ids.split(",")
|
||||
return influxdb_api.query_SCADA_data_by_device_ID_and_timerange(
|
||||
query_ids_list=query_ids, start_time=starttime, end_time=endtime
|
||||
)
|
||||
|
||||
@router.get("/queryfillingscadadatabydeviceidandtimerange/")
|
||||
async def fastapi_query_filling_scada_data_by_device_id_and_time_range(
|
||||
ids: str, starttime: str, endtime: str
|
||||
):
|
||||
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
|
||||
query_ids = ids.split(",")
|
||||
return influxdb_api.query_filling_SCADA_data_by_device_ID_and_timerange(
|
||||
query_ids_list=query_ids, start_time=starttime, end_time=endtime
|
||||
)
|
||||
|
||||
@router.get("/querycleaningscadadatabydeviceidandtimerange/")
|
||||
async def fastapi_query_cleaning_scada_data_by_device_id_and_time_range(
|
||||
ids: str, starttime: str, endtime: str
|
||||
):
|
||||
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
|
||||
query_ids = ids.split(",")
|
||||
return influxdb_api.query_cleaning_SCADA_data_by_device_ID_and_timerange(
|
||||
query_ids_list=query_ids, start_time=starttime, end_time=endtime
|
||||
)
|
||||
|
||||
@router.get("/querysimulationscadadatabydeviceidandtimerange/")
|
||||
async def fastapi_query_simulation_scada_data_by_device_id_and_time_range(
|
||||
ids: str, starttime: str, endtime: str
|
||||
):
|
||||
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
|
||||
query_ids = ids.split(",")
|
||||
return influxdb_api.query_simulation_SCADA_data_by_device_ID_and_timerange(
|
||||
query_ids_list=query_ids, start_time=starttime, end_time=endtime
|
||||
)
|
||||
|
||||
@router.get("/querycleanedscadadatabydeviceidandtimerange/")
|
||||
async def fastapi_query_cleaned_scada_data_by_device_id_and_time_range(
|
||||
ids: str, starttime: str, endtime: str
|
||||
):
|
||||
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
|
||||
query_ids = ids.split(",")
|
||||
return influxdb_api.query_cleaned_SCADA_data_by_device_ID_and_timerange(
|
||||
query_ids_list=query_ids, start_time=starttime, end_time=endtime
|
||||
)
|
||||
|
||||
@router.get("/queryscadadatabydeviceidanddate/")
|
||||
async def fastapi_query_scada_data_by_device_id_and_date(ids: str, querydate: str):
|
||||
query_ids = ids.split(",")
|
||||
return influxdb_api.query_SCADA_data_by_device_ID_and_date(
|
||||
query_ids_list=query_ids, query_date=querydate
|
||||
)
|
||||
|
||||
@router.get("/queryallscadarecordsbydate/")
|
||||
async def fastapi_query_all_scada_records_by_date(querydate: str):
|
||||
is_today_or_future = time_api.is_today_or_future(querydate)
|
||||
logger.info(f"isToday or future: {is_today_or_future}")
|
||||
|
||||
cache_key = f"queryallscadarecordsbydate_{querydate}"
|
||||
|
||||
if not is_today_or_future:
|
||||
data = redis_client.get(cache_key)
|
||||
if data:
|
||||
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
|
||||
logger.info("return from cache redis")
|
||||
return loaded_dict
|
||||
|
||||
logger.info("query from influxdb")
|
||||
result_dict = influxdb_api.query_all_SCADA_records_by_date(query_date=querydate)
|
||||
|
||||
if not is_today_or_future:
|
||||
logger.info("save to cache redis")
|
||||
packed = msgpack.packb(result_dict, default=encode_datetime)
|
||||
redis_client.set(cache_key, packed)
|
||||
|
||||
logger.info("return results")
|
||||
return result_dict
|
||||
|
||||
@router.get("/queryallschemeallrecords/")
|
||||
async def fastapi_query_all_scheme_all_records(
|
||||
schemetype: str, schemename: str, querydate: str
|
||||
) -> tuple:
|
||||
cache_key = f"queryallschemeallrecords_{schemetype}_{schemename}_{querydate}"
|
||||
data = redis_client.get(cache_key)
|
||||
if data:
|
||||
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
|
||||
return loaded_dict
|
||||
|
||||
results = influxdb_api.query_scheme_all_record(
|
||||
scheme_Type=schemetype, scheme_Name=schemename, query_date=querydate
|
||||
)
|
||||
packed = msgpack.packb(results, default=encode_datetime)
|
||||
redis_client.set(cache_key, packed)
|
||||
|
||||
return results
|
||||
|
||||
@router.get("/queryschemeallrecordsproperty/")
|
||||
async def fastapi_query_all_scheme_all_records_property(
|
||||
schemetype: str, schemename: str, querydate: str, querytype: str, queryproperty: str
|
||||
) -> Optional[List]:
|
||||
cache_key = f"queryallschemeallrecords_{schemetype}_{schemename}_{querydate}"
|
||||
data = redis_client.get(cache_key)
|
||||
all_results = None
|
||||
if data:
|
||||
all_results = msgpack.unpackb(data, object_hook=decode_datetime)
|
||||
else:
|
||||
all_results = influxdb_api.query_scheme_all_record(
|
||||
scheme_Type=schemetype, scheme_Name=schemename, query_date=querydate
|
||||
)
|
||||
packed = msgpack.packb(all_results, default=encode_datetime)
|
||||
redis_client.set(cache_key, packed)
|
||||
|
||||
results = None
|
||||
if querytype == "node":
|
||||
results = all_results[0]
|
||||
elif querytype == "link":
|
||||
results = all_results[1]
|
||||
|
||||
return results
|
||||
|
||||
@router.get("/queryinfluxdbbuckets/")
|
||||
async def fastapi_query_influxdb_buckets():
|
||||
return influxdb_api.query_buckets()
|
||||
|
||||
@router.get("/queryinfluxdbbucketmeasurements/")
|
||||
async def fastapi_query_influxdb_bucket_measurements(bucket: str):
|
||||
return influxdb_api.query_measurements(bucket=bucket)
|
||||
|
||||
############################################################
|
||||
# download history data
|
||||
############################################################
|
||||
|
||||
class Download_History_Data_Manually(BaseModel):
|
||||
"""
|
||||
download_date:样式如 datetime(2025, 5, 4)
|
||||
"""
|
||||
|
||||
download_date: datetime
|
||||
|
||||
|
||||
@router.post("/download_history_data_manually/")
|
||||
async def fastapi_download_history_data_manually(
|
||||
data: Download_History_Data_Manually,
|
||||
) -> None:
|
||||
item = data.dict()
|
||||
tz = timezone(timedelta(hours=8))
|
||||
begin_dt = datetime.combine(item.get("download_date").date(), dt_time.min).replace(
|
||||
tzinfo=tz
|
||||
)
|
||||
end_dt = datetime.combine(item.get("download_date").date(), dt_time(23, 59, 59)).replace(
|
||||
tzinfo=tz
|
||||
)
|
||||
|
||||
begin_time = begin_dt.isoformat()
|
||||
end_time = end_dt.isoformat()
|
||||
|
||||
influxdb_api.download_history_data_manually(
|
||||
begin_time=begin_time, end_time=end_time
|
||||
)
|
||||
|
||||
55
app/api/v1/endpoints/misc.py
Normal file
55
app/api/v1/endpoints/misc.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from typing import Any
|
||||
import random
|
||||
from fastapi import APIRouter
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi import status
|
||||
from pydantic import BaseModel
|
||||
from app.services.tjnetwork import (
|
||||
get_all_sensor_placements,
|
||||
get_all_burst_locate_results,
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/getjson/")
|
||||
async def fastapi_get_json():
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
content={
|
||||
"code": 400,
|
||||
"message": "this is message",
|
||||
"data": 123,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/getallsensorplacements/")
|
||||
async def fastapi_get_all_sensor_placements(network: str) -> list[dict[Any, Any]]:
|
||||
return get_all_sensor_placements(network)
|
||||
|
||||
|
||||
@router.get("/getallburstlocateresults/")
|
||||
async def fastapi_get_all_burst_locate_results(network: str) -> list[dict[Any, Any]]:
|
||||
return get_all_burst_locate_results(network)
|
||||
|
||||
|
||||
class Item(BaseModel):
|
||||
str_info: str
|
||||
|
||||
|
||||
@router.post("/test_dict/")
|
||||
async def fastapi_test_dict(data: Item) -> dict[str, str]:
|
||||
item = data.dict()
|
||||
return item
|
||||
|
||||
@router.get("/getrealtimedata/")
|
||||
async def fastapi_get_realtimedata():
|
||||
data = [random.randint(0, 100) for _ in range(100)]
|
||||
return data
|
||||
|
||||
|
||||
@router.get("/getsimulationresult/")
|
||||
async def fastapi_get_simulationresult():
|
||||
data = [random.randint(0, 100) for _ in range(100)]
|
||||
return data
|
||||
@@ -30,5 +30,26 @@ async def fastapi_set_demand_properties(
|
||||
############################################################
|
||||
# water distribution 36.[Water Distribution]
|
||||
############################################################
|
||||
# This section was empty in main.py, so skipping unless found elsewhere or needed.
|
||||
# If there are specific Water Distribution endpoints they should go here.
|
||||
@router.get("/calculatedemandtonodes/")
|
||||
async def fastapi_calculate_demand_to_nodes(
|
||||
network: str, req: Request
|
||||
) -> dict[str, float]:
|
||||
props = await req.json()
|
||||
demand = props["demand"]
|
||||
nodes = props["nodes"]
|
||||
return calculate_demand_to_nodes(network, demand, nodes)
|
||||
|
||||
@router.get("/calculatedemandtoregion/")
|
||||
async def fastapi_calculate_demand_to_region(
|
||||
network: str, req: Request
|
||||
) -> dict[str, float]:
|
||||
props = await req.json()
|
||||
demand = props["demand"]
|
||||
region = props["region"]
|
||||
return calculate_demand_to_region(network, demand, region)
|
||||
|
||||
@router.get("/calculatedemandtonetwork/")
|
||||
async def fastapi_calculate_demand_to_network(
|
||||
network: str, demand: float
|
||||
) -> dict[str, float]:
|
||||
return calculate_demand_to_network(network, demand)
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi import APIRouter, Request, Depends
|
||||
from typing import Any, List, Dict, Union
|
||||
from app.services.tjnetwork import *
|
||||
from app.api.v1.endpoints.auth import verify_token
|
||||
from app.infra.cache.redis_client import redis_client, encode_datetime, decode_datetime
|
||||
import msgpack
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@@ -22,6 +25,10 @@ async def fastapi_set_coord(network: str, req: Request) -> ChangeSet:
|
||||
props = await req.json()
|
||||
return set_coord(network, ChangeSet(props))
|
||||
|
||||
@router.get("/getnodecoord/")
|
||||
async def fastapi_get_node_coord(network: str, node: str) -> dict[str, float] | None:
|
||||
return get_node_coord(network, node)
|
||||
|
||||
# Additional geometry queries found in main.py logic (implicit or explicit)
|
||||
@router.get("/getnetworkinextent/")
|
||||
async def fastapi_get_network_in_extent(
|
||||
@@ -29,9 +36,38 @@ async def fastapi_get_network_in_extent(
|
||||
) -> dict[str, Any]:
|
||||
return get_network_in_extent(network, x1, y1, x2, y2)
|
||||
|
||||
@router.get("/getnetworkgeometries/", dependencies=[Depends(verify_token)])
|
||||
async def fastapi_get_network_geometries(network: str) -> dict[str, Any] | None:
|
||||
cache_key = f"getnetworkgeometries_{network}"
|
||||
data = redis_client.get(cache_key)
|
||||
if data:
|
||||
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
|
||||
return loaded_dict
|
||||
|
||||
coords = get_network_node_coords(network)
|
||||
nodes = []
|
||||
for node_id, coord in coords.items():
|
||||
nodes.append(f"{node_id}:{coord['type']}:{coord['x']}:{coord['y']}")
|
||||
links = get_network_link_nodes(network)
|
||||
scadas = get_all_scada_info(network)
|
||||
|
||||
results = {"nodes": nodes, "links": links, "scadas": scadas}
|
||||
redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime))
|
||||
return results
|
||||
|
||||
@router.get("/getmajornodecoords/")
|
||||
async def fastapi_get_majornode_coords(network: str) -> list[Any]:
|
||||
return get_majornode_coords(network)
|
||||
async def fastapi_get_majornode_coords(
|
||||
network: str, diameter: int
|
||||
) -> dict[str, dict[str, float]]:
|
||||
return get_major_node_coords(network, diameter)
|
||||
|
||||
@router.get("/getmajorpipenodes/")
|
||||
async def fastapi_get_major_pipe_nodes(network: str, diameter: int) -> list[str] | None:
|
||||
return get_major_pipe_nodes(network, diameter)
|
||||
|
||||
@router.get("/getnetworklinknodes/")
|
||||
async def fastapi_get_network_link_nodes(network: str) -> list[str] | None:
|
||||
return get_network_link_nodes(network)
|
||||
|
||||
@router.get("/getallcoords/")
|
||||
async def fastapi_get_all_coords(network: str) -> list[Any]:
|
||||
|
||||
@@ -231,3 +231,15 @@ async def fastapi_generate_virtual_district(
|
||||
) -> ChangeSet:
|
||||
props = await req.json()
|
||||
return generate_virtual_district(network, props["centers"], inflate_delta)
|
||||
|
||||
@router.get("/calculatedistrictmeteringareafornodes/")
|
||||
async def fastapi_calculate_district_metering_area_for_nodes(
|
||||
network: str, req: Request
|
||||
) -> list[list[str]]:
|
||||
props = await req.json()
|
||||
nodes = props["nodes"]
|
||||
part_count = props["part_count"]
|
||||
part_type = props["part_type"]
|
||||
return calculate_district_metering_area_for_nodes(
|
||||
network, nodes, part_count, part_type
|
||||
)
|
||||
|
||||
44
app/api/v1/endpoints/risk.py
Normal file
44
app/api/v1/endpoints/risk.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from typing import Any, List, Dict
|
||||
from fastapi import APIRouter
|
||||
from app.services.tjnetwork import (
|
||||
get_pipe_risk_probability_now,
|
||||
get_pipe_risk_probability,
|
||||
get_pipes_risk_probability,
|
||||
get_network_pipe_risk_probability_now,
|
||||
get_pipe_risk_probability_geometries,
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/getpiperiskprobabilitynow/")
|
||||
async def fastapi_get_pipe_risk_probability_now(
|
||||
network: str, pipe_id: str
|
||||
) -> dict[str, Any]:
|
||||
return get_pipe_risk_probability_now(network, pipe_id)
|
||||
|
||||
|
||||
@router.get("/getpiperiskprobability/")
|
||||
async def fastapi_get_pipe_risk_probability(
|
||||
network: str, pipe_id: str
|
||||
) -> dict[str, Any]:
|
||||
return get_pipe_risk_probability(network, pipe_id)
|
||||
|
||||
|
||||
@router.get("/getpipesriskprobability/")
|
||||
async def fastapi_get_pipes_risk_probability(
|
||||
network: str, pipe_ids: str
|
||||
) -> list[dict[str, Any]]:
|
||||
pipeids = pipe_ids.split(",")
|
||||
return get_pipes_risk_probability(network, pipeids)
|
||||
|
||||
|
||||
@router.get("/getnetworkpiperiskprobabilitynow/")
|
||||
async def fastapi_get_network_pipe_risk_probability_now(
|
||||
network: str,
|
||||
) -> list[dict[str, Any]]:
|
||||
return get_network_pipe_risk_probability_now(network)
|
||||
|
||||
|
||||
@router.get("/getpiperiskprobabilitygeometries/")
|
||||
async def fastapi_get_pipe_risk_probability_geometries(network: str) -> dict[str, Any]:
|
||||
return get_pipe_risk_probability_geometries(network)
|
||||
@@ -1,9 +1,32 @@
|
||||
from typing import Any
|
||||
from fastapi import APIRouter
|
||||
from fastapi import APIRouter, Request
|
||||
from app.native.api import ChangeSet
|
||||
from app.services.tjnetwork import (
|
||||
get_scada_info,
|
||||
get_all_scada_info,
|
||||
get_scada_device_schema,
|
||||
get_scada_device,
|
||||
set_scada_device,
|
||||
add_scada_device,
|
||||
delete_scada_device,
|
||||
clean_scada_device,
|
||||
get_all_scada_device_ids,
|
||||
get_all_scada_devices,
|
||||
get_scada_device_data_schema,
|
||||
get_scada_device_data,
|
||||
set_scada_device_data,
|
||||
add_scada_device_data,
|
||||
delete_scada_device_data,
|
||||
clean_scada_device_data,
|
||||
get_scada_element_schema,
|
||||
get_scada_element,
|
||||
set_scada_element,
|
||||
add_scada_element,
|
||||
delete_scada_element,
|
||||
clean_scada_element,
|
||||
get_all_scada_elements,
|
||||
get_scada_element_schema,
|
||||
get_scada_info_schema,
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
@@ -15,3 +38,132 @@ async def fast_get_scada_properties(network: str, scada: str) -> dict[str, Any]:
|
||||
@router.get("/getallscadaproperties/")
|
||||
async def fast_get_all_scada_properties(network: str) -> list[dict[str, Any]]:
|
||||
return get_all_scada_info(network)
|
||||
|
||||
|
||||
############################################################
|
||||
# scada_device 29
|
||||
############################################################
|
||||
|
||||
@router.get("/getscadadeviceschema/")
|
||||
async def fastapi_get_scada_device_schema(network: str) -> dict[str, dict[str, Any]]:
|
||||
return get_scada_device_schema(network)
|
||||
|
||||
@router.get("/getscadadevice/")
|
||||
async def fastapi_get_scada_device(network: str, id: str) -> dict[str, Any]:
|
||||
return get_scada_device(network, id)
|
||||
|
||||
@router.post("/setscadadevice/", response_model=None)
|
||||
async def fastapi_set_scada_device(network: str, req: Request) -> ChangeSet:
|
||||
props = await req.json()
|
||||
return set_scada_device(network, ChangeSet(props))
|
||||
|
||||
@router.post("/addscadadevice/", response_model=None)
|
||||
async def fastapi_add_scada_device(network: str, req: Request) -> ChangeSet:
|
||||
props = await req.json()
|
||||
return add_scada_device(network, ChangeSet(props))
|
||||
|
||||
@router.post("/deletescadadevice/", response_model=None)
|
||||
async def fastapi_delete_scada_device(network: str, req: Request) -> ChangeSet:
|
||||
props = await req.json()
|
||||
return delete_scada_device(network, ChangeSet(props))
|
||||
|
||||
@router.post("/cleanscadadevice/", response_model=None)
|
||||
async def fastapi_clean_scada_device(network: str) -> ChangeSet:
|
||||
return clean_scada_device(network)
|
||||
|
||||
@router.get("/getallscadadeviceids/")
|
||||
async def fastapi_get_all_scada_device_ids(network: str) -> list[str]:
|
||||
return get_all_scada_device_ids(network)
|
||||
|
||||
@router.get("/getallscadadevices/")
|
||||
async def fastapi_get_all_scada_devices(network: str) -> list[dict[str, Any]]:
|
||||
return get_all_scada_devices(network)
|
||||
|
||||
|
||||
############################################################
|
||||
# scada_device_data 30
|
||||
############################################################
|
||||
|
||||
@router.get("/getscadadevicedataschema/")
|
||||
async def fastapi_get_scada_device_data_schema(
|
||||
network: str,
|
||||
) -> dict[str, dict[str, Any]]:
|
||||
return get_scada_device_data_schema(network)
|
||||
|
||||
@router.get("/getscadadevicedata/")
|
||||
async def fastapi_get_scada_device_data(network: str, device_id: str) -> dict[str, Any]:
|
||||
return get_scada_device_data(network, device_id)
|
||||
|
||||
@router.post("/setscadadevicedata/", response_model=None)
|
||||
async def fastapi_set_scada_device_data(network: str, req: Request) -> ChangeSet:
|
||||
props = await req.json()
|
||||
return set_scada_device_data(network, ChangeSet(props))
|
||||
|
||||
@router.post("/addscadadevicedata/", response_model=None)
|
||||
async def fastapi_add_scada_device_data(network: str, req: Request) -> ChangeSet:
|
||||
props = await req.json()
|
||||
return add_scada_device_data(network, ChangeSet(props))
|
||||
|
||||
@router.post("/deletescadadevicedata/", response_model=None)
|
||||
async def fastapi_delete_scada_device_data(network: str, req: Request) -> ChangeSet:
|
||||
props = await req.json()
|
||||
return delete_scada_device_data(network, ChangeSet(props))
|
||||
|
||||
@router.post("/cleanscadadevicedata/", response_model=None)
|
||||
async def fastapi_clean_scada_device_data(network: str) -> ChangeSet:
|
||||
return clean_scada_device_data(network)
|
||||
|
||||
|
||||
############################################################
|
||||
# scada_element 31
|
||||
############################################################
|
||||
|
||||
@router.get("/getscadaelementschema/")
|
||||
async def fastapi_get_scada_element_schema(
|
||||
network: str,
|
||||
) -> dict[str, dict[str, Any]]:
|
||||
return get_scada_element_schema(network)
|
||||
|
||||
@router.get("/getscadaelements/")
|
||||
async def fastapi_get_scada_elements(network: str) -> list[dict[str, Any]]:
|
||||
return get_all_scada_elements(network)
|
||||
|
||||
@router.get("/getscadaelement/")
|
||||
async def fastapi_get_scada_element(network: str, id: str) -> dict[str, Any]:
|
||||
return get_scada_element(network, id)
|
||||
|
||||
@router.post("/setscadaelement/", response_model=None)
|
||||
async def fastapi_set_scada_element(network: str, req: Request) -> ChangeSet:
|
||||
props = await req.json()
|
||||
return set_scada_element(network, ChangeSet(props))
|
||||
|
||||
@router.post("/addscadaelement/", response_model=None)
|
||||
async def fastapi_add_scada_element(network: str, req: Request) -> ChangeSet:
|
||||
props = await req.json()
|
||||
return add_scada_element(network, ChangeSet(props))
|
||||
|
||||
@router.post("/deletescadaelement/", response_model=None)
|
||||
async def fastapi_delete_scada_element(network: str, req: Request) -> ChangeSet:
|
||||
props = await req.json()
|
||||
return delete_scada_element(network, ChangeSet(props))
|
||||
|
||||
@router.post("/cleanscadaelement/", response_model=None)
|
||||
async def fastapi_clean_scada_element(network: str) -> ChangeSet:
|
||||
return clean_scada_element(network)
|
||||
|
||||
|
||||
############################################################
|
||||
# scada_info 38
|
||||
############################################################
|
||||
|
||||
@router.get("/getscadainfoschema/")
|
||||
async def fastapi_get_scada_info_schema(network: str) -> dict[str, dict[str, Any]]:
|
||||
return get_scada_info_schema(network)
|
||||
|
||||
@router.get("/getscadainfo/")
|
||||
async def fastapi_get_scada_info(network: str, id: str) -> dict[str, Any]:
|
||||
return get_scada_info(network, id)
|
||||
|
||||
@router.get("/getallscadainfo/")
|
||||
async def fastapi_get_all_scada_info(network: str) -> list[dict[str, Any]]:
|
||||
return get_all_scada_info(network)
|
||||
|
||||
17
app/api/v1/endpoints/schemes.py
Normal file
17
app/api/v1/endpoints/schemes.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from fastapi import APIRouter
|
||||
from typing import Any, List, Dict
|
||||
from app.services.tjnetwork import get_scheme_schema, get_scheme, get_all_schemes
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/getschemeschema/")
|
||||
async def fastapi_get_scheme_schema(network: str) -> dict[str, dict[Any, Any]]:
|
||||
return get_scheme_schema(network)
|
||||
|
||||
@router.get("/getscheme/")
|
||||
async def fastapi_get_scheme(network: str, schema_name: str) -> dict[Any, Any]:
|
||||
return get_scheme(network, schema_name)
|
||||
|
||||
@router.get("/getallschemes/")
|
||||
async def fastapi_get_all_schemes(network: str) -> list[dict[Any, Any]]:
|
||||
return get_all_schemes(network)
|
||||
@@ -60,6 +60,13 @@ async def take_snapshot_for_operation_endpoint(
|
||||
async def take_snapshot_for_current_operation_endpoint(network: str, tag: str) -> None:
|
||||
return take_snapshot_for_current_operation(network, tag)
|
||||
|
||||
# 兼容旧拼写: takenapshotforcurrentoperation
|
||||
@router.post("/takenapshotforcurrentoperation")
|
||||
async def take_snapshot_for_current_operation_legacy_endpoint(
|
||||
network: str, tag: str
|
||||
) -> None:
|
||||
return take_snapshot_for_current_operation(network, tag)
|
||||
|
||||
@router.post("/takesnapshot/")
|
||||
async def take_snapshot_endpoint(network: str, tag: str) -> None:
|
||||
return take_snapshot(network, tag)
|
||||
|
||||
@@ -18,20 +18,4 @@ async def fastapi_get_user(network: str, user_name: str) -> dict[Any, Any]:
|
||||
|
||||
@router.get("/getallusers/")
|
||||
async def fastapi_get_all_users(network: str) -> list[dict[Any, Any]]:
|
||||
return get_all_users(network)
|
||||
|
||||
############################################################
|
||||
# scheme 40
|
||||
############################################################
|
||||
|
||||
@router.get("/getschemeschema/")
|
||||
async def fastapi_get_scheme_schema(network: str) -> dict[str, dict[Any, Any]]:
|
||||
return get_scheme_schema(network)
|
||||
|
||||
@router.get("/getscheme/")
|
||||
async def fastapi_get_scheme(network: str, schema_name: str) -> dict[Any, Any]:
|
||||
return get_scheme(network, schema_name)
|
||||
|
||||
@router.get("/getallschemes/")
|
||||
async def fastapi_get_all_schemes(network: str) -> list[dict[Any, Any]]:
|
||||
return get_all_schemes(network)
|
||||
return get_all_users(network)
|
||||
@@ -7,7 +7,11 @@ from app.api.v1.endpoints import (
|
||||
extension,
|
||||
snapshots,
|
||||
data_query,
|
||||
users
|
||||
users,
|
||||
schemes,
|
||||
misc,
|
||||
risk,
|
||||
cache,
|
||||
)
|
||||
from app.api.v1.endpoints.network import (
|
||||
general,
|
||||
@@ -65,7 +69,11 @@ api_router.include_router(simulation.router, tags=["Simulation Control"])
|
||||
api_router.include_router(data_query.router, tags=["Data Query & InfluxDB"])
|
||||
api_router.include_router(scada.router, tags=["SCADA"])
|
||||
api_router.include_router(snapshots.router, tags=["Snapshots"])
|
||||
api_router.include_router(users.router, tags=["Users & Schemes"])
|
||||
api_router.include_router(users.router, tags=["Users"])
|
||||
api_router.include_router(schemes.router, tags=["Schemes"])
|
||||
api_router.include_router(misc.router, tags=["Misc"])
|
||||
api_router.include_router(risk.router, tags=["Risk"])
|
||||
api_router.include_router(cache.router, tags=["Cache"])
|
||||
|
||||
# Extension
|
||||
api_router.include_router(extension.router, tags=["Extension"])
|
||||
|
||||
19
app/main.py
19
app/main.py
@@ -20,6 +20,11 @@ logger.setLevel(logging.INFO)
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
logger.info("**********************************************************")
|
||||
logger.info(str(datetime.now()))
|
||||
logger.info("TJWater CloudService is starting...")
|
||||
logger.info("**********************************************************")
|
||||
|
||||
# 初始化数据库连接池
|
||||
tsdb.init_pool()
|
||||
pgdb.init_pool()
|
||||
@@ -28,6 +33,7 @@ async def lifespan(app: FastAPI):
|
||||
await pgdb.open()
|
||||
|
||||
if project_info.name:
|
||||
print(project_info.name)
|
||||
open_project(project_info.name)
|
||||
|
||||
yield
|
||||
@@ -53,16 +59,3 @@ app.add_middleware(GZipMiddleware, minimum_size=1000)
|
||||
app.include_router(api_router, prefix="/api/v1")
|
||||
app.include_router(timescaledb_router)
|
||||
app.include_router(postgresql_router)
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup_db():
|
||||
logger.info("**********************************************************")
|
||||
logger.info(str(datetime.now()))
|
||||
logger.info("TJWater CloudService is starting...")
|
||||
logger.info("**********************************************************")
|
||||
|
||||
# open proj_name by default
|
||||
if project_info.name:
|
||||
print(project_info.name)
|
||||
open_project(project_info.name)
|
||||
|
||||
Reference in New Issue
Block a user