调整api结构;恢复丢失部分api,详见scripts文件夹;新增关阀分析算法,实现api
This commit is contained in:
@@ -3,6 +3,7 @@ from app.algorithms.sensors import (
|
||||
pressure_sensor_placement_sensitivity,
|
||||
pressure_sensor_placement_kmeans,
|
||||
)
|
||||
from app.algorithms.valve_isolation import valve_isolation_analysis
|
||||
from app.algorithms.simulations import (
|
||||
convert_to_local_unit,
|
||||
burst_analysis,
|
||||
@@ -25,4 +26,5 @@ __all__ = [
|
||||
"contaminant_simulation",
|
||||
"age_analysis",
|
||||
"pressure_regulation",
|
||||
"valve_isolation_analysis",
|
||||
]
|
||||
|
||||
@@ -14,14 +14,20 @@ class PipelineHealthAnalyzer:
|
||||
使用前需确保安装依赖:joblib, pandas, numpy, scikit-survival, matplotlib。
|
||||
"""
|
||||
|
||||
def __init__(self, model_path: str = "model/my_survival_forest_model_quxi.joblib"):
|
||||
def __init__(self, model_path: str = None):
|
||||
"""
|
||||
初始化分析器,加载预训练的随机生存森林模型。
|
||||
|
||||
:param model_path: 模型文件的路径(默认为相对路径 'model/my_survival_forest_model_quxi.joblib')。
|
||||
:param model_path: 模型文件的路径(默认为相对路径 './model/my_survival_forest_model_quxi.joblib')。
|
||||
:raises FileNotFoundError: 如果模型文件不存在。
|
||||
:raises Exception: 如果模型加载失败。
|
||||
"""
|
||||
if model_path is None:
|
||||
model_path = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
"model",
|
||||
"my_survival_forest_model_quxi.joblib",
|
||||
)
|
||||
# 确保 model 目录存在
|
||||
model_dir = os.path.dirname(model_path)
|
||||
if model_dir and not os.path.exists(model_dir):
|
||||
|
||||
86
app/algorithms/valve_isolation.py
Normal file
86
app/algorithms/valve_isolation.py
Normal file
@@ -0,0 +1,86 @@
|
||||
from collections import defaultdict, deque
|
||||
from typing import Any
|
||||
|
||||
from app.services.tjnetwork import (
|
||||
get_link_properties,
|
||||
get_link_type,
|
||||
get_network_link_nodes,
|
||||
is_link,
|
||||
is_node,
|
||||
)
|
||||
|
||||
|
||||
VALVE_LINK_TYPE = "valve"
|
||||
|
||||
|
||||
def _parse_link_entry(link_entry: str) -> tuple[str, str, str, str]:
|
||||
parts = link_entry.split(":", 3)
|
||||
if len(parts) != 4:
|
||||
raise ValueError(f"Invalid link entry format: {link_entry}")
|
||||
return parts[0], parts[1], parts[2], parts[3]
|
||||
|
||||
|
||||
def valve_isolation_analysis(network: str, accident_element: str) -> dict[str, Any]:
|
||||
"""
|
||||
关阀搜索/分析:基于拓扑结构确定事故隔离所需关阀。
|
||||
:param network: 模型名称
|
||||
:param accident_element: 事故点(节点或管道/泵/阀门ID)
|
||||
:return: dict,包含受影响节点、必须关闭阀门、可选阀门等信息
|
||||
"""
|
||||
if is_node(network, accident_element):
|
||||
start_nodes = {accident_element}
|
||||
accident_type = "node"
|
||||
elif is_link(network, accident_element):
|
||||
accident_type = get_link_type(network, accident_element)
|
||||
link_props = get_link_properties(network, accident_element)
|
||||
node1 = link_props.get("node1")
|
||||
node2 = link_props.get("node2")
|
||||
if not node1 or not node2:
|
||||
raise ValueError("Accident link missing node endpoints")
|
||||
start_nodes = {node1, node2}
|
||||
else:
|
||||
raise ValueError("Accident element not found")
|
||||
|
||||
adjacency: dict[str, set[str]] = defaultdict(set)
|
||||
valve_links: dict[str, tuple[str, str]] = {}
|
||||
for link_entry in get_network_link_nodes(network):
|
||||
link_id, link_type, node1, node2 = _parse_link_entry(link_entry)
|
||||
link_type_name = str(link_type).lower()
|
||||
if link_type_name == VALVE_LINK_TYPE:
|
||||
valve_links[link_id] = (node1, node2)
|
||||
continue
|
||||
adjacency[node1].add(node2)
|
||||
adjacency[node2].add(node1)
|
||||
|
||||
affected_nodes: set[str] = set()
|
||||
queue = deque(start_nodes)
|
||||
while queue:
|
||||
node = queue.popleft()
|
||||
if node in affected_nodes:
|
||||
continue
|
||||
affected_nodes.add(node)
|
||||
for neighbor in adjacency.get(node, []):
|
||||
if neighbor not in affected_nodes:
|
||||
queue.append(neighbor)
|
||||
|
||||
must_close_valves: list[str] = []
|
||||
optional_valves: list[str] = []
|
||||
for valve_id, (node1, node2) in valve_links.items():
|
||||
in_node1 = node1 in affected_nodes
|
||||
in_node2 = node2 in affected_nodes
|
||||
if in_node1 and in_node2:
|
||||
optional_valves.append(valve_id)
|
||||
elif in_node1 or in_node2:
|
||||
must_close_valves.append(valve_id)
|
||||
|
||||
must_close_valves.sort()
|
||||
optional_valves.sort()
|
||||
|
||||
return {
|
||||
"accident_element": accident_element,
|
||||
"accident_type": accident_type,
|
||||
"affected_nodes": sorted(affected_nodes),
|
||||
"must_close_valves": must_close_valves,
|
||||
"optional_valves": optional_valves,
|
||||
"isolatable": len(must_close_valves) > 0,
|
||||
}
|
||||
@@ -11,19 +11,19 @@ router = APIRouter()
|
||||
# coord 24.[COORDINATES]
|
||||
############################################################
|
||||
|
||||
@router.get("/getcoordschema/")
|
||||
async def fastapi_get_coord_schema(network: str) -> dict[str, dict[str, Any]]:
|
||||
return get_coord_schema(network)
|
||||
# @router.get("/getcoordschema/")
|
||||
# async def fastapi_get_coord_schema(network: str) -> dict[str, dict[str, Any]]:
|
||||
# return get_coord_schema(network)
|
||||
|
||||
@router.get("/getcoord/")
|
||||
async def fastapi_get_coord(network: str, node: str) -> dict[str, Any]:
|
||||
return get_coord(network, node)
|
||||
# @router.get("/getcoord/")
|
||||
# async def fastapi_get_coord(network: str, node: str) -> dict[str, Any]:
|
||||
# return get_coord(network, node)
|
||||
|
||||
# example: set_coord(p, ChangeSet({'node': 'j1', 'x': 1.0, 'y': 2.0}))
|
||||
@router.post("/setcoord/", response_model=None)
|
||||
async def fastapi_set_coord(network: str, req: Request) -> ChangeSet:
|
||||
props = await req.json()
|
||||
return set_coord(network, ChangeSet(props))
|
||||
# # example: set_coord(p, ChangeSet({'node': 'j1', 'x': 1.0, 'y': 2.0}))
|
||||
# @router.post("/setcoord/", response_model=None)
|
||||
# async def fastapi_set_coord(network: str, req: Request) -> ChangeSet:
|
||||
# props = await req.json()
|
||||
# return set_coord(network, ChangeSet(props))
|
||||
|
||||
@router.get("/getnodecoord/")
|
||||
async def fastapi_get_node_coord(network: str, node: str) -> dict[str, float] | None:
|
||||
@@ -69,12 +69,12 @@ async def fastapi_get_major_pipe_nodes(network: str, diameter: int) -> list[str]
|
||||
async def fastapi_get_network_link_nodes(network: str) -> list[str] | None:
|
||||
return get_network_link_nodes(network)
|
||||
|
||||
@router.get("/getallcoords/")
|
||||
async def fastapi_get_all_coords(network: str) -> list[Any]:
|
||||
return get_all_coords(network)
|
||||
# @router.get("/getallcoords/")
|
||||
# async def fastapi_get_all_coords(network: str) -> list[Any]:
|
||||
# return get_all_coords(network)
|
||||
|
||||
@router.get("/projectcoordinates/")
|
||||
async def fastapi_project_coordinates(
|
||||
network: str, from_epsg: int, to_epsg: int
|
||||
) -> ChangeSet:
|
||||
return project_coordinates(network, from_epsg, to_epsg)
|
||||
# @router.get("/projectcoordinates/")
|
||||
# async def fastapi_project_coordinates(
|
||||
# network: str, from_epsg: int, to_epsg: int
|
||||
# ) -> ChangeSet:
|
||||
# return project_coordinates(network, from_epsg, to_epsg)
|
||||
|
||||
@@ -1,6 +1,15 @@
|
||||
from typing import Any
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from typing import Any, List, Optional
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import threading
|
||||
import pandas as pd
|
||||
from fastapi import APIRouter, HTTPException, File, UploadFile, Query
|
||||
from fastapi.responses import PlainTextResponse
|
||||
import app.infra.db.influxdb.api as influxdb_api
|
||||
import app.services.simulation as simulation
|
||||
import app.services.globals as globals
|
||||
from app.infra.cache.redis_client import redis_client
|
||||
from app.services.tjnetwork import (
|
||||
run_project,
|
||||
@@ -21,12 +30,107 @@ from app.algorithms.sensors import (
|
||||
pressure_sensor_placement_sensitivity,
|
||||
pressure_sensor_placement_kmeans,
|
||||
)
|
||||
import app.algorithms.api_ex.Fdataclean as Fdataclean
|
||||
import app.algorithms.api_ex.Pdataclean as Pdataclean
|
||||
from app.services.network_import import network_update
|
||||
from app.services.simulation_ops import project_management
|
||||
from app.services.simulation_ops import (
|
||||
project_management,
|
||||
scheduling_simulation,
|
||||
daily_scheduling_simulation,
|
||||
)
|
||||
from app.services.valve_isolation import analyze_valve_isolation
|
||||
from pydantic import BaseModel
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
class RunSimulationManuallyByDate(BaseModel):
|
||||
name: str
|
||||
simulation_date: str
|
||||
start_time: str
|
||||
duration: int
|
||||
|
||||
|
||||
class BurstAnalysis(BaseModel):
|
||||
name: str
|
||||
modify_pattern_start_time: str
|
||||
burst_ID: List[str] | str | None = None
|
||||
burst_size: List[float] | float | int | None = None
|
||||
modify_total_duration: int = 900
|
||||
modify_fixed_pump_pattern: Optional[dict[str, list]] = None
|
||||
modify_variable_pump_pattern: Optional[dict[str, list]] = None
|
||||
modify_valve_opening: Optional[dict[str, float]] = None
|
||||
scheme_Name: Optional[str] = None
|
||||
|
||||
|
||||
class SchedulingAnalysis(BaseModel):
|
||||
network: str
|
||||
start_time: str
|
||||
pump_control: dict
|
||||
tank_id: str
|
||||
water_plant_output_id: str
|
||||
time_delta: Optional[int] = 300
|
||||
|
||||
|
||||
class PressureRegulation(BaseModel):
|
||||
network: str
|
||||
start_time: str
|
||||
pump_control: dict
|
||||
tank_init_level: Optional[dict] = None
|
||||
duration: Optional[int] = 900
|
||||
scheme_Name: Optional[str] = None
|
||||
|
||||
|
||||
class ProjectManagement(BaseModel):
|
||||
network: str
|
||||
start_time: str
|
||||
pump_control: dict
|
||||
tank_init_level: Optional[dict] = None
|
||||
region_demand: Optional[dict] = None
|
||||
|
||||
|
||||
class DailySchedulingAnalysis(BaseModel):
|
||||
network: str
|
||||
start_time: str
|
||||
pump_control: dict
|
||||
reservoir_id: str
|
||||
tank_id: str
|
||||
water_plant_output_id: str
|
||||
time_delta: Optional[int] = 300
|
||||
|
||||
|
||||
class PumpFailureState(BaseModel):
|
||||
time: str
|
||||
pump_status: dict
|
||||
|
||||
|
||||
class PressureSensorPlacement(BaseModel):
|
||||
name: str
|
||||
scheme_name: str
|
||||
sensor_number: int
|
||||
min_diameter: int = 0
|
||||
username: str
|
||||
|
||||
|
||||
def run_simulation_manually_by_date(
|
||||
network_name: str, base_date: datetime, start_time: str, duration: int
|
||||
) -> None:
|
||||
start_hour, start_minute, start_second = map(int, start_time.split(":"))
|
||||
start_datetime = base_date.replace(
|
||||
hour=start_hour, minute=start_minute, second=start_second
|
||||
)
|
||||
end_datetime = start_datetime + timedelta(minutes=duration)
|
||||
current_time = start_datetime
|
||||
while current_time < end_datetime:
|
||||
iso_time = current_time.strftime("%Y-%m-%dT%H:%M:%S") + "+08:00"
|
||||
simulation.run_simulation(
|
||||
name=network_name,
|
||||
simulation_type="realtime",
|
||||
modify_pattern_start_time=iso_time,
|
||||
)
|
||||
current_time += timedelta(minutes=15)
|
||||
|
||||
|
||||
# 必须用这个PlainTextResponse,不然每个key都有引号
|
||||
@router.get("/runproject/", response_class=PlainTextResponse)
|
||||
async def run_project_endpoint(network: str) -> str:
|
||||
@@ -88,6 +192,23 @@ async def burst_analysis_endpoint(
|
||||
return burst_analysis(network, pipe_id, start_time, end_time, burst_flow)
|
||||
|
||||
|
||||
@router.post("/burst_analysis/")
|
||||
async def fastapi_burst_analysis(data: BurstAnalysis) -> str:
|
||||
item = data.dict()
|
||||
burst_analysis(
|
||||
name=item["name"],
|
||||
modify_pattern_start_time=item["modify_pattern_start_time"],
|
||||
burst_ID=item["burst_ID"],
|
||||
burst_size=item["burst_size"],
|
||||
modify_total_duration=item["modify_total_duration"],
|
||||
modify_fixed_pump_pattern=item["modify_fixed_pump_pattern"],
|
||||
modify_variable_pump_pattern=item["modify_variable_pump_pattern"],
|
||||
modify_valve_opening=item["modify_valve_opening"],
|
||||
scheme_Name=item["scheme_Name"],
|
||||
)
|
||||
return "success"
|
||||
|
||||
|
||||
@router.get("/valvecloseanalysis/")
|
||||
async def valve_close_analysis_endpoint(
|
||||
network: str, valve_id: str, start_time: str, end_time: str
|
||||
@@ -95,6 +216,27 @@ async def valve_close_analysis_endpoint(
|
||||
return valve_close_analysis(network, valve_id, start_time, end_time)
|
||||
|
||||
|
||||
@router.get("/valve_close_analysis/", response_class=PlainTextResponse)
|
||||
async def fastapi_valve_close_analysis(
|
||||
network: str,
|
||||
start_time: str,
|
||||
valves: List[str] = Query(...),
|
||||
duration: int | None = None,
|
||||
) -> str:
|
||||
result = valve_close_analysis(
|
||||
name=network,
|
||||
modify_pattern_start_time=start_time,
|
||||
modify_total_duration=duration or 900,
|
||||
modify_valve_opening={valve_id: 0.0 for valve_id in valves},
|
||||
)
|
||||
return result or "success"
|
||||
|
||||
|
||||
@router.get("/valveisolation/")
|
||||
async def valve_isolation_endpoint(network: str, accident_element: str):
|
||||
return analyze_valve_isolation(network, accident_element)
|
||||
|
||||
|
||||
@router.get("/flushinganalysis/")
|
||||
async def flushing_analysis_endpoint(
|
||||
network: str, pipe_id: str, start_time: str, duration: float, flow: float
|
||||
@@ -102,6 +244,28 @@ async def flushing_analysis_endpoint(
|
||||
return flushing_analysis(network, pipe_id, start_time, duration, flow)
|
||||
|
||||
|
||||
@router.get("/flushing_analysis/", response_class=PlainTextResponse)
|
||||
async def fastapi_flushing_analysis(
|
||||
network: str,
|
||||
start_time: str,
|
||||
valves: List[str] = Query(...),
|
||||
valves_k: List[float] = Query(...),
|
||||
drainage_node_ID: str = Query(...),
|
||||
flush_flow: float = 0,
|
||||
duration: int | None = None,
|
||||
) -> str:
|
||||
valve_opening = {valve_id: float(valves_k[idx]) for idx, valve_id in enumerate(valves)}
|
||||
result = flushing_analysis(
|
||||
name=network,
|
||||
modify_pattern_start_time=start_time,
|
||||
modify_total_duration=duration or 900,
|
||||
modify_valve_opening=valve_opening,
|
||||
drainage_node_ID=drainage_node_ID,
|
||||
flushing_flow=flush_flow,
|
||||
)
|
||||
return result or "success"
|
||||
|
||||
|
||||
@router.get("/contaminantsimulation/")
|
||||
async def contaminant_simulation_endpoint(
|
||||
network: str, node_id: str, start_time: str, duration: float, concentration: float
|
||||
@@ -109,14 +273,42 @@ async def contaminant_simulation_endpoint(
|
||||
return contaminant_simulation(network, node_id, start_time, duration, concentration)
|
||||
|
||||
|
||||
@router.get("/contaminant_simulation/", response_class=PlainTextResponse)
|
||||
async def fastapi_contaminant_simulation(
|
||||
network: str,
|
||||
start_time: str,
|
||||
source: str,
|
||||
concentration: float,
|
||||
duration: int = 900,
|
||||
pattern: str | None = None,
|
||||
) -> str:
|
||||
result = contaminant_simulation(
|
||||
name=network,
|
||||
modify_pattern_start_time=start_time,
|
||||
modify_total_duration=duration,
|
||||
source=source,
|
||||
concentration=concentration,
|
||||
source_pattern=pattern,
|
||||
)
|
||||
return result or "success"
|
||||
|
||||
|
||||
@router.get("/ageanalysis/")
|
||||
async def age_analysis_endpoint(network: str):
|
||||
return age_analysis(network)
|
||||
|
||||
|
||||
@router.get("/schedulinganalysis/")
|
||||
async def scheduling_analysis_endpoint(network: str):
|
||||
return scheduling_analysis(network)
|
||||
@router.get("/age_analysis/", response_class=PlainTextResponse)
|
||||
async def fastapi_age_analysis(
|
||||
network: str, start_time: str, end_time: str, duration: int
|
||||
) -> str:
|
||||
result = age_analysis(network, start_time, duration)
|
||||
return result or "success"
|
||||
|
||||
|
||||
# @router.get("/schedulinganalysis/")
|
||||
# async def scheduling_analysis_endpoint(network: str):
|
||||
# return scheduling_analysis(network)
|
||||
|
||||
|
||||
@router.get("/pressureregulation/")
|
||||
@@ -126,14 +318,89 @@ async def pressure_regulation_endpoint(
|
||||
return pressure_regulation(network, target_node, target_pressure)
|
||||
|
||||
|
||||
@router.post("/pressure_regulation/")
|
||||
async def fastapi_pressure_regulation(data: PressureRegulation) -> str:
|
||||
item = data.dict()
|
||||
simulation.query_corresponding_element_id_and_query_id(item["network"])
|
||||
fixed_pumps = set(globals.fixed_pumps_id.keys())
|
||||
variable_pumps = set(globals.variable_pumps_id.keys())
|
||||
fixed_pump_pattern: dict[str, list] = {}
|
||||
variable_pump_pattern: dict[str, list] = {}
|
||||
for pump_id, values in item["pump_control"].items():
|
||||
if pump_id in variable_pumps:
|
||||
variable_pump_pattern[pump_id] = values
|
||||
else:
|
||||
fixed_pump_pattern[pump_id] = values
|
||||
pressure_regulation(
|
||||
name=item["network"],
|
||||
modify_pattern_start_time=item["start_time"],
|
||||
modify_total_duration=item["duration"] or 900,
|
||||
modify_tank_initial_level=item["tank_init_level"],
|
||||
modify_fixed_pump_pattern=fixed_pump_pattern or None,
|
||||
modify_variable_pump_pattern=variable_pump_pattern or None,
|
||||
scheme_Name=item["scheme_Name"],
|
||||
)
|
||||
return "success"
|
||||
|
||||
|
||||
@router.get("/projectmanagement/")
|
||||
async def project_management_endpoint(network: str):
|
||||
return project_management(network)
|
||||
|
||||
|
||||
@router.get("/dailyschedulinganalysis/")
|
||||
async def daily_scheduling_analysis_endpoint(network: str):
|
||||
return daily_scheduling_analysis(network)
|
||||
@router.post("/project_management/")
|
||||
async def fastapi_project_management(data: ProjectManagement) -> str:
|
||||
item = data.dict()
|
||||
return project_management(
|
||||
prj_name=item["network"],
|
||||
start_datetime=item["start_time"],
|
||||
pump_control=item["pump_control"],
|
||||
tank_initial_level_control=item["tank_init_level"],
|
||||
region_demand_control=item["region_demand"],
|
||||
)
|
||||
|
||||
|
||||
# @router.get("/dailyschedulinganalysis/")
|
||||
# async def daily_scheduling_analysis_endpoint(network: str):
|
||||
# return daily_scheduling_analysis(network)
|
||||
|
||||
|
||||
@router.post("/scheduling_analysis/")
|
||||
async def fastapi_scheduling_analysis(data: SchedulingAnalysis) -> str:
|
||||
item = data.dict()
|
||||
return scheduling_simulation(
|
||||
item["network"],
|
||||
item["start_time"],
|
||||
item["pump_control"],
|
||||
item["tank_id"],
|
||||
item["water_plant_output_id"],
|
||||
item["time_delta"],
|
||||
)
|
||||
|
||||
|
||||
@router.post("/daily_scheduling_analysis/")
|
||||
async def fastapi_daily_scheduling_analysis(data: DailySchedulingAnalysis) -> str:
|
||||
item = data.dict()
|
||||
return daily_scheduling_simulation(
|
||||
item["network"],
|
||||
item["start_time"],
|
||||
item["pump_control"],
|
||||
item["reservoir_id"],
|
||||
item["tank_id"],
|
||||
item["water_plant_output_id"],
|
||||
)
|
||||
|
||||
|
||||
@router.post("/network_project/")
|
||||
async def fastapi_network_project(file: UploadFile = File()) -> str:
|
||||
temp_file_dir = "./inp/"
|
||||
if not os.path.exists(temp_file_dir):
|
||||
os.mkdir(temp_file_dir)
|
||||
temp_file_name = f'network_project_{datetime.now().strftime("%Y%m%d")}'
|
||||
temp_file_path = f"{temp_file_dir}{temp_file_name}.inp"
|
||||
with open(temp_file_path, "wb") as buffer:
|
||||
shutil.copyfileobj(file.file, buffer)
|
||||
return run_inp(temp_file_name)
|
||||
|
||||
|
||||
@router.get("/networkupdate/")
|
||||
@@ -141,9 +408,60 @@ async def network_update_endpoint(network: str):
|
||||
return network_update(network)
|
||||
|
||||
|
||||
@router.get("/pumpfailure/")
|
||||
async def pump_failure_endpoint(network: str, pump_id: str, time: str):
|
||||
return pump_failure(network, pump_id, time)
|
||||
@router.post("/network_update/")
|
||||
async def fastapi_network_update(file: UploadFile = File()) -> str:
|
||||
default_folder = "./"
|
||||
temp_file_name = f'network_update_{datetime.now().strftime("%Y%m%d")}'
|
||||
temp_file_path = os.path.join(default_folder, temp_file_name)
|
||||
try:
|
||||
with open(temp_file_path, "wb") as buffer:
|
||||
shutil.copyfileobj(file.file, buffer)
|
||||
network_update(temp_file_path)
|
||||
return json.dumps({"message": "管网更新成功"})
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=500, detail=f"数据库操作失败: {exc}")
|
||||
|
||||
|
||||
# @router.get("/pumpfailure/")
|
||||
# async def pump_failure_endpoint(network: str, pump_id: str, time: str):
|
||||
# return pump_failure(network, pump_id, time)
|
||||
|
||||
|
||||
@router.post("/pump_failure/")
|
||||
async def fastapi_pump_failure(data: PumpFailureState) -> str:
|
||||
item = data.dict()
|
||||
with open("./pump_failure_message.txt", "a", encoding="utf-8-sig") as f1:
|
||||
f1.write(
|
||||
"[{}] {}\n".format(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), item)
|
||||
)
|
||||
with open("./pump_failure_status.txt", "r", encoding="utf-8-sig") as f2:
|
||||
lines = f2.readlines()
|
||||
first_stage_pump_status_dict = json.loads(json.dumps(eval(lines[0])))
|
||||
second_stage_pump_status_dict = json.loads(json.dumps(eval(lines[-1])))
|
||||
pump_status_dict = {
|
||||
"first": first_stage_pump_status_dict,
|
||||
"second": second_stage_pump_status_dict,
|
||||
}
|
||||
status_info = item.copy()
|
||||
for pump_type in status_info["pump_status"].keys():
|
||||
if pump_type in pump_status_dict.keys():
|
||||
if all(
|
||||
pump_id in pump_status_dict[pump_type].keys()
|
||||
for pump_id in status_info["pump_status"][pump_type].keys()
|
||||
):
|
||||
for pump_id in status_info["pump_status"][pump_type].keys():
|
||||
pump_status_dict[pump_type][pump_id] = int(
|
||||
status_info["pump_status"][pump_type][pump_id]
|
||||
)
|
||||
else:
|
||||
return json.dumps("ERROR: Wrong Pump ID")
|
||||
else:
|
||||
return json.dumps("ERROR: Wrong Pump Type")
|
||||
with open("./pump_failure_status.txt", "w", encoding="utf-8-sig") as f2_:
|
||||
f2_.write(
|
||||
"{}\n{}".format(pump_status_dict["first"], pump_status_dict["second"])
|
||||
)
|
||||
return json.dumps("SUCCESS")
|
||||
|
||||
|
||||
@router.get("/pressuresensorplacementsensitivity/")
|
||||
@@ -155,6 +473,20 @@ async def pressure_sensor_placement_sensitivity_endpoint(
|
||||
)
|
||||
|
||||
|
||||
@router.post("/pressure_sensor_placement_sensitivity/")
|
||||
async def fastapi_pressure_sensor_placement_sensitivity(
|
||||
data: PressureSensorPlacement,
|
||||
) -> None:
|
||||
item = data.dict()
|
||||
pressure_sensor_placement_sensitivity(
|
||||
name=item["name"],
|
||||
scheme_name=item["scheme_name"],
|
||||
sensor_number=item["sensor_number"],
|
||||
min_diameter=item["min_diameter"],
|
||||
username=item["username"],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/pressuresensorplacementkmeans/")
|
||||
async def pressure_sensor_placement_kmeans_endpoint(
|
||||
name: str, scheme_name: str, sensor_number: int, min_diameter: int, username: str
|
||||
@@ -162,3 +494,152 @@ async def pressure_sensor_placement_kmeans_endpoint(
|
||||
return pressure_sensor_placement_kmeans(
|
||||
name, scheme_name, sensor_number, min_diameter, username
|
||||
)
|
||||
|
||||
|
||||
@router.post("/pressure_sensor_placement_kmeans/")
|
||||
async def fastapi_pressure_sensor_placement_kmeans(
|
||||
data: PressureSensorPlacement,
|
||||
) -> None:
|
||||
item = data.dict()
|
||||
pressure_sensor_placement_kmeans(
|
||||
name=item["name"],
|
||||
scheme_name=item["scheme_name"],
|
||||
sensor_number=item["sensor_number"],
|
||||
min_diameter=item["min_diameter"],
|
||||
username=item["username"],
|
||||
)
|
||||
|
||||
|
||||
@router.post("/sensorplacementscheme/create")
|
||||
async def fastapi_pressure_sensor_placement(
|
||||
network: str = Query(...),
|
||||
scheme_name: str = Query(...),
|
||||
sensor_type: str = Query(...),
|
||||
method: str = Query(...),
|
||||
sensor_count: int = Query(...),
|
||||
min_diameter: int = Query(0),
|
||||
user_name: str = Query(...),
|
||||
) -> str:
|
||||
if method not in ["sensitivity", "kmeans"]:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Invalid method. Must be 'sensitivity' or 'kmeans'"
|
||||
)
|
||||
if method == "sensitivity":
|
||||
pressure_sensor_placement_sensitivity(
|
||||
name=network,
|
||||
scheme_name=scheme_name,
|
||||
sensor_number=sensor_count,
|
||||
min_diameter=min_diameter,
|
||||
username=user_name,
|
||||
)
|
||||
elif method == "kmeans":
|
||||
pressure_sensor_placement_kmeans(
|
||||
name=network,
|
||||
scheme_name=scheme_name,
|
||||
sensor_number=sensor_count,
|
||||
min_diameter=min_diameter,
|
||||
username=user_name,
|
||||
)
|
||||
return "success"
|
||||
|
||||
|
||||
@router.post("/scadadevicedatacleaning/")
|
||||
async def fastapi_scada_device_data_cleaning(
|
||||
network: str = Query(...),
|
||||
ids_list: List[str] = Query(...),
|
||||
start_time: str = Query(...),
|
||||
end_time: str = Query(...),
|
||||
user_name: str = Query(...),
|
||||
) -> str:
|
||||
item = {
|
||||
"network": network,
|
||||
"ids": ids_list,
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
"user_name": user_name,
|
||||
}
|
||||
query_ids_list = item["ids"][0].split(",")
|
||||
scada_data = influxdb_api.query_SCADA_data_by_device_ID_and_timerange(
|
||||
query_ids_list=query_ids_list,
|
||||
start_time=item["start_time"],
|
||||
end_time=item["end_time"],
|
||||
)
|
||||
scada_device_info = influxdb_api.query_pg_scada_info(item["network"])
|
||||
scada_device_info_dict = {info["id"]: info for info in scada_device_info}
|
||||
type_groups: dict[str, list[str]] = {}
|
||||
for device_id in query_ids_list:
|
||||
device_info = scada_device_info_dict.get(device_id, {})
|
||||
device_type = device_info.get("type", "unknown")
|
||||
type_groups.setdefault(device_type, []).append(device_id)
|
||||
for device_type, device_ids in type_groups.items():
|
||||
if device_type not in ["pressure", "pipe_flow"]:
|
||||
continue
|
||||
type_scada_data = {
|
||||
device_id: scada_data[device_id]
|
||||
for device_id in device_ids
|
||||
if device_id in scada_data
|
||||
}
|
||||
if not type_scada_data:
|
||||
continue
|
||||
time_list = [record["time"] for record in next(iter(type_scada_data.values()))]
|
||||
df = pd.DataFrame({"time": time_list})
|
||||
for device_id in device_ids:
|
||||
if device_id in type_scada_data:
|
||||
values = [record["value"] for record in type_scada_data[device_id]]
|
||||
df[device_id] = values
|
||||
value_df = df.drop(columns=["time"])
|
||||
if device_type == "pressure":
|
||||
cleaned_value_df = Pdataclean.clean_pressure_data_df_km(value_df)
|
||||
elif device_type == "pipe_flow":
|
||||
cleaned_value_df = Fdataclean.clean_flow_data_df_kf(value_df)
|
||||
cleaned_value_df = pd.DataFrame(cleaned_value_df)
|
||||
cleaned_df = pd.concat([df["time"], cleaned_value_df], axis=1)
|
||||
influxdb_api.import_multicolumn_data_from_dict(
|
||||
data_dict=cleaned_df.to_dict("list"),
|
||||
raw=False,
|
||||
)
|
||||
return "success"
|
||||
|
||||
|
||||
@router.post("/runsimulationmanuallybydate/")
|
||||
async def fastapi_run_simulation_manually_by_date(
|
||||
data: RunSimulationManuallyByDate,
|
||||
) -> dict[str, str]:
|
||||
item = data.dict()
|
||||
try:
|
||||
simulation.query_corresponding_element_id_and_query_id(item["name"])
|
||||
simulation.query_corresponding_pattern_id_and_query_id(item["name"])
|
||||
region_result = simulation.query_non_realtime_region(item["name"])
|
||||
globals.source_outflow_region_id = simulation.get_source_outflow_region_id(
|
||||
item["name"], region_result
|
||||
)
|
||||
globals.realtime_region_pipe_flow_and_demand_id = (
|
||||
simulation.query_realtime_region_pipe_flow_and_demand_id(
|
||||
item["name"], region_result
|
||||
)
|
||||
)
|
||||
globals.pipe_flow_region_patterns = simulation.query_pipe_flow_region_patterns(
|
||||
item["name"]
|
||||
)
|
||||
globals.non_realtime_region_patterns = (
|
||||
simulation.query_non_realtime_region_patterns(item["name"], region_result)
|
||||
)
|
||||
(
|
||||
globals.source_outflow_region_patterns,
|
||||
globals.realtime_region_pipe_flow_and_demand_patterns,
|
||||
) = simulation.get_realtime_region_patterns(
|
||||
item["name"],
|
||||
globals.source_outflow_region_id,
|
||||
globals.realtime_region_pipe_flow_and_demand_id,
|
||||
)
|
||||
base_date = datetime.strptime(item["simulation_date"], "%Y-%m-%d")
|
||||
thread = threading.Thread(
|
||||
target=lambda: run_simulation_manually_by_date(
|
||||
item["name"], base_date, item["start_time"], item["duration"]
|
||||
)
|
||||
)
|
||||
thread.start()
|
||||
thread.join()
|
||||
return {"status": "success"}
|
||||
except Exception as exc:
|
||||
return {"status": "error", "message": str(exc)}
|
||||
|
||||
@@ -35,6 +35,9 @@ from app.api.v1.endpoints.components import (
|
||||
visuals,
|
||||
)
|
||||
|
||||
from app.infra.db.postgresql import router as postgresql_router
|
||||
from app.infra.db.timescaledb import router as timescaledb_router
|
||||
|
||||
api_router = APIRouter()
|
||||
|
||||
# Core Services
|
||||
@@ -75,5 +78,9 @@ api_router.include_router(misc.router, tags=["Misc"])
|
||||
api_router.include_router(risk.router, tags=["Risk"])
|
||||
api_router.include_router(cache.router, tags=["Cache"])
|
||||
|
||||
# Database Routers
|
||||
api_router.include_router(timescaledb_router, tags=["TimescaleDB"])
|
||||
api_router.include_router(postgresql_router, tags=["PostgreSQL"])
|
||||
|
||||
# Extension
|
||||
api_router.include_router(extension.router, tags=["Extension"])
|
||||
|
||||
@@ -6,7 +6,7 @@ from .database import get_database_instance
|
||||
from .scada_info import ScadaRepository
|
||||
from .scheme import SchemeRepository
|
||||
|
||||
router = APIRouter(prefix="/postgresql", tags=["postgresql"])
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# 创建支持数据库选择的连接依赖函数
|
||||
|
||||
@@ -583,9 +583,7 @@ class CompositeQueries:
|
||||
)
|
||||
|
||||
# 7. 使用PipelineHealthAnalyzer进行预测
|
||||
analyzer = PipelineHealthAnalyzer(
|
||||
model_path="api_ex/model/my_survival_forest_model_quxi.joblib"
|
||||
)
|
||||
analyzer = PipelineHealthAnalyzer()
|
||||
survival_functions = analyzer.predict_survival(data)
|
||||
# 8. 组合结果
|
||||
results = []
|
||||
|
||||
@@ -10,7 +10,7 @@ from .schemas.scada import ScadaRepository
|
||||
from .composite_queries import CompositeQueries
|
||||
from app.infra.db.postgresql.database import get_database_instance as get_postgres_database_instance
|
||||
|
||||
router = APIRouter(prefix="/timescaledb", tags=["TimescaleDB"])
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# 创建支持数据库选择的连接依赖函数
|
||||
|
||||
@@ -7,8 +7,6 @@ from datetime import datetime
|
||||
|
||||
import app.services.project_info as project_info
|
||||
from app.api.v1.router import api_router
|
||||
from app.infra.db.timescaledb import router as timescaledb_router
|
||||
from app.infra.db.postgresql import router as postgresql_router
|
||||
from app.infra.db.timescaledb.database import db as tsdb
|
||||
from app.infra.db.postgresql.database import db as pgdb
|
||||
from app.services.tjnetwork import open_project
|
||||
@@ -57,5 +55,5 @@ app.add_middleware(GZipMiddleware, minimum_size=1000)
|
||||
|
||||
# Include Routers
|
||||
app.include_router(api_router, prefix="/api/v1")
|
||||
app.include_router(timescaledb_router)
|
||||
app.include_router(postgresql_router)
|
||||
# Legcy Routers without version prefix
|
||||
# app.include_router(api_router)
|
||||
|
||||
@@ -9,6 +9,7 @@ from app.services.scheme_management import (
|
||||
upload_shp_to_pg,
|
||||
submit_risk_probability_result,
|
||||
)
|
||||
from app.services.valve_isolation import analyze_valve_isolation
|
||||
from app.services.simulation_ops import (
|
||||
project_management,
|
||||
scheduling_simulation,
|
||||
@@ -29,4 +30,5 @@ __all__ = [
|
||||
"project_management",
|
||||
"scheduling_simulation",
|
||||
"daily_scheduling_simulation",
|
||||
"analyze_valve_isolation",
|
||||
]
|
||||
|
||||
7
app/services/valve_isolation.py
Normal file
7
app/services/valve_isolation.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from typing import Any
|
||||
|
||||
from app.algorithms.valve_isolation import valve_isolation_analysis
|
||||
|
||||
|
||||
def analyze_valve_isolation(network: str, accident_element: str) -> dict[str, Any]:
|
||||
return valve_isolation_analysis(network, accident_element)
|
||||
Reference in New Issue
Block a user