Files
TJWaterServerBinary/app/api/v1/endpoints/simulation.py

645 lines
21 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
from typing import Any, List, Optional
from datetime import datetime, timedelta
import json
import os
import shutil
import threading
import pandas as pd
from fastapi import APIRouter, HTTPException, File, UploadFile, Query
from fastapi.responses import PlainTextResponse
import app.infra.db.influxdb.api as influxdb_api
import app.services.simulation as simulation
import app.services.globals as globals
from app.infra.cache.redis_client import redis_client
from app.services.tjnetwork import (
run_project,
run_project_return_dict,
run_inp,
dump_output,
)
from app.algorithms.simulations import (
burst_analysis,
valve_close_analysis,
flushing_analysis,
contaminant_simulation,
age_analysis,
# scheduling_analysis,
pressure_regulation,
)
from app.algorithms.sensors import (
pressure_sensor_placement_sensitivity,
pressure_sensor_placement_kmeans,
)
import app.algorithms.api_ex.Fdataclean as Fdataclean
import app.algorithms.api_ex.Pdataclean as Pdataclean
from app.services.network_import import network_update
from app.services.simulation_ops import (
project_management,
scheduling_simulation,
daily_scheduling_simulation,
)
from app.services.valve_isolation import analyze_valve_isolation
from pydantic import BaseModel
router = APIRouter()
class RunSimulationManuallyByDate(BaseModel):
name: str
simulation_date: str
start_time: str
duration: int
class BurstAnalysis(BaseModel):
name: str
modify_pattern_start_time: str
burst_ID: List[str] | str | None = None
burst_size: List[float] | float | int | None = None
modify_total_duration: int = 900
modify_fixed_pump_pattern: Optional[dict[str, list]] = None
modify_variable_pump_pattern: Optional[dict[str, list]] = None
modify_valve_opening: Optional[dict[str, float]] = None
scheme_Name: Optional[str] = None
class SchedulingAnalysis(BaseModel):
network: str
start_time: str
pump_control: dict
tank_id: str
water_plant_output_id: str
time_delta: Optional[int] = 300
class PressureRegulation(BaseModel):
network: str
start_time: str
pump_control: dict
tank_init_level: Optional[dict] = None
duration: Optional[int] = 900
scheme_Name: Optional[str] = None
class ProjectManagement(BaseModel):
network: str
start_time: str
pump_control: dict
tank_init_level: Optional[dict] = None
region_demand: Optional[dict] = None
class DailySchedulingAnalysis(BaseModel):
network: str
start_time: str
pump_control: dict
reservoir_id: str
tank_id: str
water_plant_output_id: str
time_delta: Optional[int] = 300
class PumpFailureState(BaseModel):
time: str
pump_status: dict
class PressureSensorPlacement(BaseModel):
name: str
scheme_name: str
sensor_number: int
min_diameter: int = 0
username: str
def run_simulation_manually_by_date(
network_name: str, base_date: datetime, start_time: str, duration: int
) -> None:
start_hour, start_minute, start_second = map(int, start_time.split(":"))
start_datetime = base_date.replace(
hour=start_hour, minute=start_minute, second=start_second
)
end_datetime = start_datetime + timedelta(minutes=duration)
current_time = start_datetime
while current_time < end_datetime:
iso_time = current_time.strftime("%Y-%m-%dT%H:%M:%S") + "+08:00"
simulation.run_simulation(
name=network_name,
simulation_type="realtime",
modify_pattern_start_time=iso_time,
)
current_time += timedelta(minutes=15)
# 必须用这个PlainTextResponse不然每个key都有引号
@router.get("/runproject/", response_class=PlainTextResponse)
async def run_project_endpoint(network: str) -> str:
lock_key = "exclusive_api_lock"
timeout = 120 # 锁自动过期时间(秒)
# 尝试获取锁NX=True: 不存在时设置EX=timeout: 过期时间)
acquired = redis_client.set(lock_key, "locked", nx=True, ex=timeout)
if not acquired:
raise HTTPException(status_code=409, detail="is in simulation")
else:
try:
return run_project(network)
finally:
# 手动释放锁(可选,依赖过期时间自动释放更安全)
redis_client.delete(lock_key)
# DingZQ, 2025-02-04, 返回dict[str, Any]
# output 和 report
# output 是 json
# report 是 text
@router.get("/runprojectreturndict/")
async def run_project_return_dict_endpoint(network: str) -> dict[str, Any]:
lock_key = "exclusive_api_lock"
timeout = 120 # 锁自动过期时间(秒)
# 尝试获取锁NX=True: 不存在时设置EX=timeout: 过期时间)
acquired = redis_client.set(lock_key, "locked", nx=True, ex=timeout)
if not acquired:
raise HTTPException(status_code=409, detail="is in simulation")
else:
try:
return run_project_return_dict(network)
finally:
# 手动释放锁(可选,依赖过期时间自动释放更安全)
redis_client.delete(lock_key)
# put in inp folder, name without extension
@router.get("/runinp/")
async def run_inp_endpoint(network: str) -> str:
return run_inp(network)
# path is absolute path
@router.get("/dumpoutput/")
async def dump_output_endpoint(output: str) -> str:
return dump_output(output)
# Analysis Endpoints
@router.get("/burstanalysis/")
async def burst_analysis_endpoint(
network: str, pipe_id: str, start_time: str, end_time: str, burst_flow: float
):
return burst_analysis(network, pipe_id, start_time, end_time, burst_flow)
@router.get("/burst_analysis/")
async def fastapi_burst_analysis(
network: str = Query(...),
modify_pattern_start_time: str = Query(...),
burst_ID: list | str = Query(..., alias="burst_ID[]"), # 添加别名以匹配 URL
burst_size: list | float | int = Query(
..., alias="burst_size[]"
), # 添加别名以匹配 URL
modify_total_duration: int = Query(...),
scheme_name: str = Query(...),
) -> str:
burst_analysis(
name=network,
modify_pattern_start_time=modify_pattern_start_time,
burst_ID=burst_ID,
burst_size=burst_size,
modify_total_duration=modify_total_duration,
scheme_name=scheme_name,
)
return "success"
@router.get("/valvecloseanalysis/")
async def valve_close_analysis_endpoint(
network: str, valve_id: str, start_time: str, end_time: str
):
return valve_close_analysis(network, valve_id, start_time, end_time)
@router.get("/valve_close_analysis/", response_class=PlainTextResponse)
async def fastapi_valve_close_analysis(
network: str,
start_time: str,
valves: List[str] = Query(...),
duration: int | None = None,
) -> str:
result = valve_close_analysis(
name=network,
modify_pattern_start_time=start_time,
modify_total_duration=duration or 900,
modify_valve_opening={valve_id: 0.0 for valve_id in valves},
)
return result or "success"
@router.get("/valve_isolation_analysis/")
async def valve_isolation_endpoint(network: str, accident_element: str):
return analyze_valve_isolation(network, accident_element)
@router.get("/flushinganalysis/")
async def flushing_analysis_endpoint(
network: str, pipe_id: str, start_time: str, duration: float, flow: float
):
return flushing_analysis(network, pipe_id, start_time, duration, flow)
@router.get("/flushing_analysis/", response_class=PlainTextResponse)
async def fastapi_flushing_analysis(
network: str,
start_time: str,
valves: List[str] = Query(...),
valves_k: List[float] = Query(...),
drainage_node_ID: str = Query(...),
flush_flow: float = 0,
duration: int | None = None,
) -> str:
valve_opening = {
valve_id: float(valves_k[idx]) for idx, valve_id in enumerate(valves)
}
result = flushing_analysis(
name=network,
modify_pattern_start_time=start_time,
modify_total_duration=duration or 900,
modify_valve_opening=valve_opening,
drainage_node_ID=drainage_node_ID,
flushing_flow=flush_flow,
)
return result or "success"
@router.get("/contaminant_simulation/", response_class=PlainTextResponse)
async def fastapi_contaminant_simulation(
network: str,
start_time: str,
source: str,
concentration: float,
duration: int,
scheme_name: str | None = None,
pattern: str | None = None,
) -> str:
result = contaminant_simulation(
name=network,
modify_pattern_start_time=start_time,
scheme_name=scheme_name,
modify_total_duration=duration,
source=source,
concentration=concentration,
source_pattern=pattern,
)
return result or "success"
@router.get("/ageanalysis/")
async def age_analysis_endpoint(network: str):
return age_analysis(network)
@router.get("/age_analysis/", response_class=PlainTextResponse)
async def fastapi_age_analysis(
network: str, start_time: str, end_time: str, duration: int
) -> str:
result = age_analysis(network, start_time, duration)
return result or "success"
# @router.get("/schedulinganalysis/")
# async def scheduling_analysis_endpoint(network: str):
# return scheduling_analysis(network)
@router.get("/pressureregulation/")
async def pressure_regulation_endpoint(
network: str, target_node: str, target_pressure: float
):
return pressure_regulation(network, target_node, target_pressure)
@router.post("/pressure_regulation/")
async def fastapi_pressure_regulation(data: PressureRegulation) -> str:
item = data.dict()
simulation.query_corresponding_element_id_and_query_id(item["network"])
fixed_pumps = set(globals.fixed_pumps_id.keys())
variable_pumps = set(globals.variable_pumps_id.keys())
fixed_pump_pattern: dict[str, list] = {}
variable_pump_pattern: dict[str, list] = {}
for pump_id, values in item["pump_control"].items():
if pump_id in variable_pumps:
variable_pump_pattern[pump_id] = values
else:
fixed_pump_pattern[pump_id] = values
pressure_regulation(
name=item["network"],
modify_pattern_start_time=item["start_time"],
modify_total_duration=item["duration"] or 900,
modify_tank_initial_level=item["tank_init_level"],
modify_fixed_pump_pattern=fixed_pump_pattern or None,
modify_variable_pump_pattern=variable_pump_pattern or None,
scheme_Name=item["scheme_Name"],
)
return "success"
@router.get("/projectmanagement/")
async def project_management_endpoint(network: str):
return project_management(network)
@router.post("/project_management/")
async def fastapi_project_management(data: ProjectManagement) -> str:
item = data.dict()
return project_management(
prj_name=item["network"],
start_datetime=item["start_time"],
pump_control=item["pump_control"],
tank_initial_level_control=item["tank_init_level"],
region_demand_control=item["region_demand"],
)
# @router.get("/dailyschedulinganalysis/")
# async def daily_scheduling_analysis_endpoint(network: str):
# return daily_scheduling_analysis(network)
@router.post("/scheduling_analysis/")
async def fastapi_scheduling_analysis(data: SchedulingAnalysis) -> str:
item = data.dict()
return scheduling_simulation(
item["network"],
item["start_time"],
item["pump_control"],
item["tank_id"],
item["water_plant_output_id"],
item["time_delta"],
)
@router.post("/daily_scheduling_analysis/")
async def fastapi_daily_scheduling_analysis(data: DailySchedulingAnalysis) -> str:
item = data.dict()
return daily_scheduling_simulation(
item["network"],
item["start_time"],
item["pump_control"],
item["reservoir_id"],
item["tank_id"],
item["water_plant_output_id"],
)
@router.post("/network_project/")
async def fastapi_network_project(file: UploadFile = File()) -> str:
temp_file_dir = "./inp/"
if not os.path.exists(temp_file_dir):
os.mkdir(temp_file_dir)
temp_file_name = f'network_project_{datetime.now().strftime("%Y%m%d")}'
temp_file_path = f"{temp_file_dir}{temp_file_name}.inp"
with open(temp_file_path, "wb") as buffer:
shutil.copyfileobj(file.file, buffer)
return run_inp(temp_file_name)
@router.get("/networkupdate/")
async def network_update_endpoint(network: str):
return network_update(network)
@router.post("/network_update/")
async def fastapi_network_update(file: UploadFile = File()) -> str:
default_folder = "./"
temp_file_name = f'network_update_{datetime.now().strftime("%Y%m%d")}'
temp_file_path = os.path.join(default_folder, temp_file_name)
try:
with open(temp_file_path, "wb") as buffer:
shutil.copyfileobj(file.file, buffer)
network_update(temp_file_path)
return json.dumps({"message": "管网更新成功"})
except Exception as exc:
raise HTTPException(status_code=500, detail=f"数据库操作失败: {exc}")
# @router.get("/pumpfailure/")
# async def pump_failure_endpoint(network: str, pump_id: str, time: str):
# return pump_failure(network, pump_id, time)
@router.post("/pump_failure/")
async def fastapi_pump_failure(data: PumpFailureState) -> str:
item = data.dict()
with open("./pump_failure_message.txt", "a", encoding="utf-8-sig") as f1:
f1.write("[{}] {}\n".format(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), item))
with open("./pump_failure_status.txt", "r", encoding="utf-8-sig") as f2:
lines = f2.readlines()
first_stage_pump_status_dict = json.loads(json.dumps(eval(lines[0])))
second_stage_pump_status_dict = json.loads(json.dumps(eval(lines[-1])))
pump_status_dict = {
"first": first_stage_pump_status_dict,
"second": second_stage_pump_status_dict,
}
status_info = item.copy()
for pump_type in status_info["pump_status"].keys():
if pump_type in pump_status_dict.keys():
if all(
pump_id in pump_status_dict[pump_type].keys()
for pump_id in status_info["pump_status"][pump_type].keys()
):
for pump_id in status_info["pump_status"][pump_type].keys():
pump_status_dict[pump_type][pump_id] = int(
status_info["pump_status"][pump_type][pump_id]
)
else:
return json.dumps("ERROR: Wrong Pump ID")
else:
return json.dumps("ERROR: Wrong Pump Type")
with open("./pump_failure_status.txt", "w", encoding="utf-8-sig") as f2_:
f2_.write(
"{}\n{}".format(pump_status_dict["first"], pump_status_dict["second"])
)
return json.dumps("SUCCESS")
@router.get("/pressuresensorplacementsensitivity/")
async def pressure_sensor_placement_sensitivity_endpoint(
name: str, scheme_name: str, sensor_number: int, min_diameter: int, username: str
):
return pressure_sensor_placement_sensitivity(
name, scheme_name, sensor_number, min_diameter, username
)
@router.post("/pressure_sensor_placement_sensitivity/")
async def fastapi_pressure_sensor_placement_sensitivity(
data: PressureSensorPlacement,
) -> None:
item = data.dict()
pressure_sensor_placement_sensitivity(
name=item["name"],
scheme_name=item["scheme_name"],
sensor_number=item["sensor_number"],
min_diameter=item["min_diameter"],
username=item["username"],
)
@router.get("/pressuresensorplacementkmeans/")
async def pressure_sensor_placement_kmeans_endpoint(
name: str, scheme_name: str, sensor_number: int, min_diameter: int, username: str
):
return pressure_sensor_placement_kmeans(
name, scheme_name, sensor_number, min_diameter, username
)
@router.post("/pressure_sensor_placement_kmeans/")
async def fastapi_pressure_sensor_placement_kmeans(
data: PressureSensorPlacement,
) -> None:
item = data.dict()
pressure_sensor_placement_kmeans(
name=item["name"],
scheme_name=item["scheme_name"],
sensor_number=item["sensor_number"],
min_diameter=item["min_diameter"],
username=item["username"],
)
@router.post("/sensorplacementscheme/create")
async def fastapi_pressure_sensor_placement(
network: str = Query(...),
scheme_name: str = Query(...),
sensor_type: str = Query(...),
method: str = Query(...),
sensor_count: int = Query(...),
min_diameter: int = Query(0),
user_name: str = Query(...),
) -> str:
if method not in ["sensitivity", "kmeans"]:
raise HTTPException(
status_code=400, detail="Invalid method. Must be 'sensitivity' or 'kmeans'"
)
if method == "sensitivity":
pressure_sensor_placement_sensitivity(
name=network,
scheme_name=scheme_name,
sensor_number=sensor_count,
min_diameter=min_diameter,
username=user_name,
)
elif method == "kmeans":
pressure_sensor_placement_kmeans(
name=network,
scheme_name=scheme_name,
sensor_number=sensor_count,
min_diameter=min_diameter,
username=user_name,
)
return "success"
@router.post("/scadadevicedatacleaning/")
async def fastapi_scada_device_data_cleaning(
network: str = Query(...),
ids_list: List[str] = Query(...),
start_time: str = Query(...),
end_time: str = Query(...),
user_name: str = Query(...),
) -> str:
item = {
"network": network,
"ids": ids_list,
"start_time": start_time,
"end_time": end_time,
"user_name": user_name,
}
query_ids_list = item["ids"][0].split(",")
scada_data = influxdb_api.query_SCADA_data_by_device_ID_and_timerange(
query_ids_list=query_ids_list,
start_time=item["start_time"],
end_time=item["end_time"],
)
scada_device_info = influxdb_api.query_pg_scada_info(item["network"])
scada_device_info_dict = {info["id"]: info for info in scada_device_info}
type_groups: dict[str, list[str]] = {}
for device_id in query_ids_list:
device_info = scada_device_info_dict.get(device_id, {})
device_type = device_info.get("type", "unknown")
type_groups.setdefault(device_type, []).append(device_id)
for device_type, device_ids in type_groups.items():
if device_type not in ["pressure", "pipe_flow"]:
continue
type_scada_data = {
device_id: scada_data[device_id]
for device_id in device_ids
if device_id in scada_data
}
if not type_scada_data:
continue
time_list = [record["time"] for record in next(iter(type_scada_data.values()))]
df = pd.DataFrame({"time": time_list})
for device_id in device_ids:
if device_id in type_scada_data:
values = [record["value"] for record in type_scada_data[device_id]]
df[device_id] = values
if device_type == "pressure":
cleaned_value_df = Pdataclean.clean_pressure_data_df_km(df)
elif device_type == "pipe_flow":
cleaned_value_df = Fdataclean.clean_flow_data_df_kf(df)
cleaned_value_df = pd.DataFrame(cleaned_value_df)
cleaned_df = pd.concat([df["time"], cleaned_value_df], axis=1)
influxdb_api.import_multicolumn_data_from_dict(
data_dict=cleaned_df.to_dict("list"),
raw=False,
)
return "success"
@router.post("/runsimulationmanuallybydate/")
async def fastapi_run_simulation_manually_by_date(
data: RunSimulationManuallyByDate,
) -> dict[str, str]:
item = data.dict()
try:
simulation.query_corresponding_element_id_and_query_id(item["name"])
simulation.query_corresponding_pattern_id_and_query_id(item["name"])
region_result = simulation.query_non_realtime_region(item["name"])
globals.source_outflow_region_id = simulation.get_source_outflow_region_id(
item["name"], region_result
)
globals.realtime_region_pipe_flow_and_demand_id = (
simulation.query_realtime_region_pipe_flow_and_demand_id(
item["name"], region_result
)
)
globals.pipe_flow_region_patterns = simulation.query_pipe_flow_region_patterns(
item["name"]
)
globals.non_realtime_region_patterns = (
simulation.query_non_realtime_region_patterns(item["name"], region_result)
)
(
globals.source_outflow_region_patterns,
globals.realtime_region_pipe_flow_and_demand_patterns,
) = simulation.get_realtime_region_patterns(
item["name"],
globals.source_outflow_region_id,
globals.realtime_region_pipe_flow_and_demand_id,
)
base_date = datetime.strptime(item["simulation_date"], "%Y-%m-%d")
thread = threading.Thread(
target=lambda: run_simulation_manually_by_date(
item["name"], base_date, item["start_time"], item["duration"]
)
)
thread.start()
thread.join()
return {"status": "success"}
except Exception as exc:
return {"status": "error", "message": str(exc)}