From 9d7a9fb2fdb6d0e48d44128b31070e572df23fa6 Mon Sep 17 00:00:00 2001 From: Jiang Date: Thu, 29 Jan 2026 11:39:50 +0800 Subject: [PATCH] =?UTF-8?q?=E8=B0=83=E6=95=B4api=E7=BB=93=E6=9E=84?= =?UTF-8?q?=EF=BC=9B=E6=81=A2=E5=A4=8D=E4=B8=A2=E5=A4=B1=E9=83=A8=E5=88=86?= =?UTF-8?q?api=EF=BC=8C=E8=AF=A6=E8=A7=81scripts=E6=96=87=E4=BB=B6?= =?UTF-8?q?=E5=A4=B9=EF=BC=9B=E6=96=B0=E5=A2=9E=E5=85=B3=E9=98=80=E5=88=86?= =?UTF-8?q?=E6=9E=90=E7=AE=97=E6=B3=95=EF=BC=8C=E5=AE=9E=E7=8E=B0api?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/algorithms/__init__.py | 2 + .../api_ex/pipeline_health_analyzer.py | 10 +- app/algorithms/valve_isolation.py | 86 + app/api/v1/endpoints/network/geometry.py | 38 +- app/api/v1/endpoints/simulation.py | 505 +- app/api/v1/router.py | 7 + app/infra/db/postgresql/router.py | 2 +- app/infra/db/timescaledb/composite_queries.py | 4 +- app/infra/db/timescaledb/router.py | 2 +- app/main.py | 6 +- app/services/__init__.py | 2 + app/services/valve_isolation.py | 7 + scripts/main.py | 4244 +++++++++++++++++ scripts/main_api_endpoints.md | 398 ++ scripts/missing_api_endpoints.md | 26 + scripts/online_Analysis.py | 1568 ++++++ tests/test_pipeline_health_analyzer.py | 4 +- 17 files changed, 6866 insertions(+), 45 deletions(-) create mode 100644 app/algorithms/valve_isolation.py create mode 100644 app/services/valve_isolation.py create mode 100644 scripts/main.py create mode 100644 scripts/main_api_endpoints.md create mode 100644 scripts/missing_api_endpoints.md create mode 100644 scripts/online_Analysis.py diff --git a/app/algorithms/__init__.py b/app/algorithms/__init__.py index c64213d..b96c814 100644 --- a/app/algorithms/__init__.py +++ b/app/algorithms/__init__.py @@ -3,6 +3,7 @@ from app.algorithms.sensors import ( pressure_sensor_placement_sensitivity, pressure_sensor_placement_kmeans, ) +from app.algorithms.valve_isolation import valve_isolation_analysis from app.algorithms.simulations import ( convert_to_local_unit, burst_analysis, @@ -25,4 +26,5 @@ __all__ = [ "contaminant_simulation", "age_analysis", "pressure_regulation", + "valve_isolation_analysis", ] diff --git a/app/algorithms/api_ex/pipeline_health_analyzer.py b/app/algorithms/api_ex/pipeline_health_analyzer.py index 0edb27a..ed1caca 100644 --- a/app/algorithms/api_ex/pipeline_health_analyzer.py +++ b/app/algorithms/api_ex/pipeline_health_analyzer.py @@ -14,14 +14,20 @@ class PipelineHealthAnalyzer: 使用前需确保安装依赖:joblib, pandas, numpy, scikit-survival, matplotlib。 """ - def __init__(self, model_path: str = "model/my_survival_forest_model_quxi.joblib"): + def __init__(self, model_path: str = None): """ 初始化分析器,加载预训练的随机生存森林模型。 - :param model_path: 模型文件的路径(默认为相对路径 'model/my_survival_forest_model_quxi.joblib')。 + :param model_path: 模型文件的路径(默认为相对路径 './model/my_survival_forest_model_quxi.joblib')。 :raises FileNotFoundError: 如果模型文件不存在。 :raises Exception: 如果模型加载失败。 """ + if model_path is None: + model_path = os.path.join( + os.path.dirname(__file__), + "model", + "my_survival_forest_model_quxi.joblib", + ) # 确保 model 目录存在 model_dir = os.path.dirname(model_path) if model_dir and not os.path.exists(model_dir): diff --git a/app/algorithms/valve_isolation.py b/app/algorithms/valve_isolation.py new file mode 100644 index 0000000..efef3d5 --- /dev/null +++ b/app/algorithms/valve_isolation.py @@ -0,0 +1,86 @@ +from collections import defaultdict, deque +from typing import Any + +from app.services.tjnetwork import ( + get_link_properties, + get_link_type, + get_network_link_nodes, + is_link, + is_node, +) + + +VALVE_LINK_TYPE = "valve" + + +def _parse_link_entry(link_entry: str) -> tuple[str, str, str, str]: + parts = link_entry.split(":", 3) + if len(parts) != 4: + raise ValueError(f"Invalid link entry format: {link_entry}") + return parts[0], parts[1], parts[2], parts[3] + + +def valve_isolation_analysis(network: str, accident_element: str) -> dict[str, Any]: + """ + 关阀搜索/分析:基于拓扑结构确定事故隔离所需关阀。 + :param network: 模型名称 + :param accident_element: 事故点(节点或管道/泵/阀门ID) + :return: dict,包含受影响节点、必须关闭阀门、可选阀门等信息 + """ + if is_node(network, accident_element): + start_nodes = {accident_element} + accident_type = "node" + elif is_link(network, accident_element): + accident_type = get_link_type(network, accident_element) + link_props = get_link_properties(network, accident_element) + node1 = link_props.get("node1") + node2 = link_props.get("node2") + if not node1 or not node2: + raise ValueError("Accident link missing node endpoints") + start_nodes = {node1, node2} + else: + raise ValueError("Accident element not found") + + adjacency: dict[str, set[str]] = defaultdict(set) + valve_links: dict[str, tuple[str, str]] = {} + for link_entry in get_network_link_nodes(network): + link_id, link_type, node1, node2 = _parse_link_entry(link_entry) + link_type_name = str(link_type).lower() + if link_type_name == VALVE_LINK_TYPE: + valve_links[link_id] = (node1, node2) + continue + adjacency[node1].add(node2) + adjacency[node2].add(node1) + + affected_nodes: set[str] = set() + queue = deque(start_nodes) + while queue: + node = queue.popleft() + if node in affected_nodes: + continue + affected_nodes.add(node) + for neighbor in adjacency.get(node, []): + if neighbor not in affected_nodes: + queue.append(neighbor) + + must_close_valves: list[str] = [] + optional_valves: list[str] = [] + for valve_id, (node1, node2) in valve_links.items(): + in_node1 = node1 in affected_nodes + in_node2 = node2 in affected_nodes + if in_node1 and in_node2: + optional_valves.append(valve_id) + elif in_node1 or in_node2: + must_close_valves.append(valve_id) + + must_close_valves.sort() + optional_valves.sort() + + return { + "accident_element": accident_element, + "accident_type": accident_type, + "affected_nodes": sorted(affected_nodes), + "must_close_valves": must_close_valves, + "optional_valves": optional_valves, + "isolatable": len(must_close_valves) > 0, + } diff --git a/app/api/v1/endpoints/network/geometry.py b/app/api/v1/endpoints/network/geometry.py index 563fc28..52470d2 100644 --- a/app/api/v1/endpoints/network/geometry.py +++ b/app/api/v1/endpoints/network/geometry.py @@ -11,19 +11,19 @@ router = APIRouter() # coord 24.[COORDINATES] ############################################################ -@router.get("/getcoordschema/") -async def fastapi_get_coord_schema(network: str) -> dict[str, dict[str, Any]]: - return get_coord_schema(network) +# @router.get("/getcoordschema/") +# async def fastapi_get_coord_schema(network: str) -> dict[str, dict[str, Any]]: +# return get_coord_schema(network) -@router.get("/getcoord/") -async def fastapi_get_coord(network: str, node: str) -> dict[str, Any]: - return get_coord(network, node) +# @router.get("/getcoord/") +# async def fastapi_get_coord(network: str, node: str) -> dict[str, Any]: +# return get_coord(network, node) -# example: set_coord(p, ChangeSet({'node': 'j1', 'x': 1.0, 'y': 2.0})) -@router.post("/setcoord/", response_model=None) -async def fastapi_set_coord(network: str, req: Request) -> ChangeSet: - props = await req.json() - return set_coord(network, ChangeSet(props)) +# # example: set_coord(p, ChangeSet({'node': 'j1', 'x': 1.0, 'y': 2.0})) +# @router.post("/setcoord/", response_model=None) +# async def fastapi_set_coord(network: str, req: Request) -> ChangeSet: +# props = await req.json() +# return set_coord(network, ChangeSet(props)) @router.get("/getnodecoord/") async def fastapi_get_node_coord(network: str, node: str) -> dict[str, float] | None: @@ -69,12 +69,12 @@ async def fastapi_get_major_pipe_nodes(network: str, diameter: int) -> list[str] async def fastapi_get_network_link_nodes(network: str) -> list[str] | None: return get_network_link_nodes(network) -@router.get("/getallcoords/") -async def fastapi_get_all_coords(network: str) -> list[Any]: - return get_all_coords(network) +# @router.get("/getallcoords/") +# async def fastapi_get_all_coords(network: str) -> list[Any]: +# return get_all_coords(network) -@router.get("/projectcoordinates/") -async def fastapi_project_coordinates( - network: str, from_epsg: int, to_epsg: int -) -> ChangeSet: - return project_coordinates(network, from_epsg, to_epsg) +# @router.get("/projectcoordinates/") +# async def fastapi_project_coordinates( +# network: str, from_epsg: int, to_epsg: int +# ) -> ChangeSet: +# return project_coordinates(network, from_epsg, to_epsg) diff --git a/app/api/v1/endpoints/simulation.py b/app/api/v1/endpoints/simulation.py index c00a5a7..afb52ce 100644 --- a/app/api/v1/endpoints/simulation.py +++ b/app/api/v1/endpoints/simulation.py @@ -1,6 +1,15 @@ -from typing import Any -from fastapi import APIRouter, HTTPException +from typing import Any, List, Optional +from datetime import datetime, timedelta +import json +import os +import shutil +import threading +import pandas as pd +from fastapi import APIRouter, HTTPException, File, UploadFile, Query from fastapi.responses import PlainTextResponse +import app.infra.db.influxdb.api as influxdb_api +import app.services.simulation as simulation +import app.services.globals as globals from app.infra.cache.redis_client import redis_client from app.services.tjnetwork import ( run_project, @@ -21,12 +30,107 @@ from app.algorithms.sensors import ( pressure_sensor_placement_sensitivity, pressure_sensor_placement_kmeans, ) +import app.algorithms.api_ex.Fdataclean as Fdataclean +import app.algorithms.api_ex.Pdataclean as Pdataclean from app.services.network_import import network_update -from app.services.simulation_ops import project_management +from app.services.simulation_ops import ( + project_management, + scheduling_simulation, + daily_scheduling_simulation, +) +from app.services.valve_isolation import analyze_valve_isolation +from pydantic import BaseModel router = APIRouter() +class RunSimulationManuallyByDate(BaseModel): + name: str + simulation_date: str + start_time: str + duration: int + + +class BurstAnalysis(BaseModel): + name: str + modify_pattern_start_time: str + burst_ID: List[str] | str | None = None + burst_size: List[float] | float | int | None = None + modify_total_duration: int = 900 + modify_fixed_pump_pattern: Optional[dict[str, list]] = None + modify_variable_pump_pattern: Optional[dict[str, list]] = None + modify_valve_opening: Optional[dict[str, float]] = None + scheme_Name: Optional[str] = None + + +class SchedulingAnalysis(BaseModel): + network: str + start_time: str + pump_control: dict + tank_id: str + water_plant_output_id: str + time_delta: Optional[int] = 300 + + +class PressureRegulation(BaseModel): + network: str + start_time: str + pump_control: dict + tank_init_level: Optional[dict] = None + duration: Optional[int] = 900 + scheme_Name: Optional[str] = None + + +class ProjectManagement(BaseModel): + network: str + start_time: str + pump_control: dict + tank_init_level: Optional[dict] = None + region_demand: Optional[dict] = None + + +class DailySchedulingAnalysis(BaseModel): + network: str + start_time: str + pump_control: dict + reservoir_id: str + tank_id: str + water_plant_output_id: str + time_delta: Optional[int] = 300 + + +class PumpFailureState(BaseModel): + time: str + pump_status: dict + + +class PressureSensorPlacement(BaseModel): + name: str + scheme_name: str + sensor_number: int + min_diameter: int = 0 + username: str + + +def run_simulation_manually_by_date( + network_name: str, base_date: datetime, start_time: str, duration: int +) -> None: + start_hour, start_minute, start_second = map(int, start_time.split(":")) + start_datetime = base_date.replace( + hour=start_hour, minute=start_minute, second=start_second + ) + end_datetime = start_datetime + timedelta(minutes=duration) + current_time = start_datetime + while current_time < end_datetime: + iso_time = current_time.strftime("%Y-%m-%dT%H:%M:%S") + "+08:00" + simulation.run_simulation( + name=network_name, + simulation_type="realtime", + modify_pattern_start_time=iso_time, + ) + current_time += timedelta(minutes=15) + + # 必须用这个PlainTextResponse,不然每个key都有引号 @router.get("/runproject/", response_class=PlainTextResponse) async def run_project_endpoint(network: str) -> str: @@ -88,6 +192,23 @@ async def burst_analysis_endpoint( return burst_analysis(network, pipe_id, start_time, end_time, burst_flow) +@router.post("/burst_analysis/") +async def fastapi_burst_analysis(data: BurstAnalysis) -> str: + item = data.dict() + burst_analysis( + name=item["name"], + modify_pattern_start_time=item["modify_pattern_start_time"], + burst_ID=item["burst_ID"], + burst_size=item["burst_size"], + modify_total_duration=item["modify_total_duration"], + modify_fixed_pump_pattern=item["modify_fixed_pump_pattern"], + modify_variable_pump_pattern=item["modify_variable_pump_pattern"], + modify_valve_opening=item["modify_valve_opening"], + scheme_Name=item["scheme_Name"], + ) + return "success" + + @router.get("/valvecloseanalysis/") async def valve_close_analysis_endpoint( network: str, valve_id: str, start_time: str, end_time: str @@ -95,6 +216,27 @@ async def valve_close_analysis_endpoint( return valve_close_analysis(network, valve_id, start_time, end_time) +@router.get("/valve_close_analysis/", response_class=PlainTextResponse) +async def fastapi_valve_close_analysis( + network: str, + start_time: str, + valves: List[str] = Query(...), + duration: int | None = None, +) -> str: + result = valve_close_analysis( + name=network, + modify_pattern_start_time=start_time, + modify_total_duration=duration or 900, + modify_valve_opening={valve_id: 0.0 for valve_id in valves}, + ) + return result or "success" + + +@router.get("/valveisolation/") +async def valve_isolation_endpoint(network: str, accident_element: str): + return analyze_valve_isolation(network, accident_element) + + @router.get("/flushinganalysis/") async def flushing_analysis_endpoint( network: str, pipe_id: str, start_time: str, duration: float, flow: float @@ -102,6 +244,28 @@ async def flushing_analysis_endpoint( return flushing_analysis(network, pipe_id, start_time, duration, flow) +@router.get("/flushing_analysis/", response_class=PlainTextResponse) +async def fastapi_flushing_analysis( + network: str, + start_time: str, + valves: List[str] = Query(...), + valves_k: List[float] = Query(...), + drainage_node_ID: str = Query(...), + flush_flow: float = 0, + duration: int | None = None, +) -> str: + valve_opening = {valve_id: float(valves_k[idx]) for idx, valve_id in enumerate(valves)} + result = flushing_analysis( + name=network, + modify_pattern_start_time=start_time, + modify_total_duration=duration or 900, + modify_valve_opening=valve_opening, + drainage_node_ID=drainage_node_ID, + flushing_flow=flush_flow, + ) + return result or "success" + + @router.get("/contaminantsimulation/") async def contaminant_simulation_endpoint( network: str, node_id: str, start_time: str, duration: float, concentration: float @@ -109,14 +273,42 @@ async def contaminant_simulation_endpoint( return contaminant_simulation(network, node_id, start_time, duration, concentration) +@router.get("/contaminant_simulation/", response_class=PlainTextResponse) +async def fastapi_contaminant_simulation( + network: str, + start_time: str, + source: str, + concentration: float, + duration: int = 900, + pattern: str | None = None, +) -> str: + result = contaminant_simulation( + name=network, + modify_pattern_start_time=start_time, + modify_total_duration=duration, + source=source, + concentration=concentration, + source_pattern=pattern, + ) + return result or "success" + + @router.get("/ageanalysis/") async def age_analysis_endpoint(network: str): return age_analysis(network) -@router.get("/schedulinganalysis/") -async def scheduling_analysis_endpoint(network: str): - return scheduling_analysis(network) +@router.get("/age_analysis/", response_class=PlainTextResponse) +async def fastapi_age_analysis( + network: str, start_time: str, end_time: str, duration: int +) -> str: + result = age_analysis(network, start_time, duration) + return result or "success" + + +# @router.get("/schedulinganalysis/") +# async def scheduling_analysis_endpoint(network: str): +# return scheduling_analysis(network) @router.get("/pressureregulation/") @@ -126,14 +318,89 @@ async def pressure_regulation_endpoint( return pressure_regulation(network, target_node, target_pressure) +@router.post("/pressure_regulation/") +async def fastapi_pressure_regulation(data: PressureRegulation) -> str: + item = data.dict() + simulation.query_corresponding_element_id_and_query_id(item["network"]) + fixed_pumps = set(globals.fixed_pumps_id.keys()) + variable_pumps = set(globals.variable_pumps_id.keys()) + fixed_pump_pattern: dict[str, list] = {} + variable_pump_pattern: dict[str, list] = {} + for pump_id, values in item["pump_control"].items(): + if pump_id in variable_pumps: + variable_pump_pattern[pump_id] = values + else: + fixed_pump_pattern[pump_id] = values + pressure_regulation( + name=item["network"], + modify_pattern_start_time=item["start_time"], + modify_total_duration=item["duration"] or 900, + modify_tank_initial_level=item["tank_init_level"], + modify_fixed_pump_pattern=fixed_pump_pattern or None, + modify_variable_pump_pattern=variable_pump_pattern or None, + scheme_Name=item["scheme_Name"], + ) + return "success" + + @router.get("/projectmanagement/") async def project_management_endpoint(network: str): return project_management(network) -@router.get("/dailyschedulinganalysis/") -async def daily_scheduling_analysis_endpoint(network: str): - return daily_scheduling_analysis(network) +@router.post("/project_management/") +async def fastapi_project_management(data: ProjectManagement) -> str: + item = data.dict() + return project_management( + prj_name=item["network"], + start_datetime=item["start_time"], + pump_control=item["pump_control"], + tank_initial_level_control=item["tank_init_level"], + region_demand_control=item["region_demand"], + ) + + +# @router.get("/dailyschedulinganalysis/") +# async def daily_scheduling_analysis_endpoint(network: str): +# return daily_scheduling_analysis(network) + + +@router.post("/scheduling_analysis/") +async def fastapi_scheduling_analysis(data: SchedulingAnalysis) -> str: + item = data.dict() + return scheduling_simulation( + item["network"], + item["start_time"], + item["pump_control"], + item["tank_id"], + item["water_plant_output_id"], + item["time_delta"], + ) + + +@router.post("/daily_scheduling_analysis/") +async def fastapi_daily_scheduling_analysis(data: DailySchedulingAnalysis) -> str: + item = data.dict() + return daily_scheduling_simulation( + item["network"], + item["start_time"], + item["pump_control"], + item["reservoir_id"], + item["tank_id"], + item["water_plant_output_id"], + ) + + +@router.post("/network_project/") +async def fastapi_network_project(file: UploadFile = File()) -> str: + temp_file_dir = "./inp/" + if not os.path.exists(temp_file_dir): + os.mkdir(temp_file_dir) + temp_file_name = f'network_project_{datetime.now().strftime("%Y%m%d")}' + temp_file_path = f"{temp_file_dir}{temp_file_name}.inp" + with open(temp_file_path, "wb") as buffer: + shutil.copyfileobj(file.file, buffer) + return run_inp(temp_file_name) @router.get("/networkupdate/") @@ -141,9 +408,60 @@ async def network_update_endpoint(network: str): return network_update(network) -@router.get("/pumpfailure/") -async def pump_failure_endpoint(network: str, pump_id: str, time: str): - return pump_failure(network, pump_id, time) +@router.post("/network_update/") +async def fastapi_network_update(file: UploadFile = File()) -> str: + default_folder = "./" + temp_file_name = f'network_update_{datetime.now().strftime("%Y%m%d")}' + temp_file_path = os.path.join(default_folder, temp_file_name) + try: + with open(temp_file_path, "wb") as buffer: + shutil.copyfileobj(file.file, buffer) + network_update(temp_file_path) + return json.dumps({"message": "管网更新成功"}) + except Exception as exc: + raise HTTPException(status_code=500, detail=f"数据库操作失败: {exc}") + + +# @router.get("/pumpfailure/") +# async def pump_failure_endpoint(network: str, pump_id: str, time: str): +# return pump_failure(network, pump_id, time) + + +@router.post("/pump_failure/") +async def fastapi_pump_failure(data: PumpFailureState) -> str: + item = data.dict() + with open("./pump_failure_message.txt", "a", encoding="utf-8-sig") as f1: + f1.write( + "[{}] {}\n".format(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), item) + ) + with open("./pump_failure_status.txt", "r", encoding="utf-8-sig") as f2: + lines = f2.readlines() + first_stage_pump_status_dict = json.loads(json.dumps(eval(lines[0]))) + second_stage_pump_status_dict = json.loads(json.dumps(eval(lines[-1]))) + pump_status_dict = { + "first": first_stage_pump_status_dict, + "second": second_stage_pump_status_dict, + } + status_info = item.copy() + for pump_type in status_info["pump_status"].keys(): + if pump_type in pump_status_dict.keys(): + if all( + pump_id in pump_status_dict[pump_type].keys() + for pump_id in status_info["pump_status"][pump_type].keys() + ): + for pump_id in status_info["pump_status"][pump_type].keys(): + pump_status_dict[pump_type][pump_id] = int( + status_info["pump_status"][pump_type][pump_id] + ) + else: + return json.dumps("ERROR: Wrong Pump ID") + else: + return json.dumps("ERROR: Wrong Pump Type") + with open("./pump_failure_status.txt", "w", encoding="utf-8-sig") as f2_: + f2_.write( + "{}\n{}".format(pump_status_dict["first"], pump_status_dict["second"]) + ) + return json.dumps("SUCCESS") @router.get("/pressuresensorplacementsensitivity/") @@ -155,6 +473,20 @@ async def pressure_sensor_placement_sensitivity_endpoint( ) +@router.post("/pressure_sensor_placement_sensitivity/") +async def fastapi_pressure_sensor_placement_sensitivity( + data: PressureSensorPlacement, +) -> None: + item = data.dict() + pressure_sensor_placement_sensitivity( + name=item["name"], + scheme_name=item["scheme_name"], + sensor_number=item["sensor_number"], + min_diameter=item["min_diameter"], + username=item["username"], + ) + + @router.get("/pressuresensorplacementkmeans/") async def pressure_sensor_placement_kmeans_endpoint( name: str, scheme_name: str, sensor_number: int, min_diameter: int, username: str @@ -162,3 +494,152 @@ async def pressure_sensor_placement_kmeans_endpoint( return pressure_sensor_placement_kmeans( name, scheme_name, sensor_number, min_diameter, username ) + + +@router.post("/pressure_sensor_placement_kmeans/") +async def fastapi_pressure_sensor_placement_kmeans( + data: PressureSensorPlacement, +) -> None: + item = data.dict() + pressure_sensor_placement_kmeans( + name=item["name"], + scheme_name=item["scheme_name"], + sensor_number=item["sensor_number"], + min_diameter=item["min_diameter"], + username=item["username"], + ) + + +@router.post("/sensorplacementscheme/create") +async def fastapi_pressure_sensor_placement( + network: str = Query(...), + scheme_name: str = Query(...), + sensor_type: str = Query(...), + method: str = Query(...), + sensor_count: int = Query(...), + min_diameter: int = Query(0), + user_name: str = Query(...), +) -> str: + if method not in ["sensitivity", "kmeans"]: + raise HTTPException( + status_code=400, detail="Invalid method. Must be 'sensitivity' or 'kmeans'" + ) + if method == "sensitivity": + pressure_sensor_placement_sensitivity( + name=network, + scheme_name=scheme_name, + sensor_number=sensor_count, + min_diameter=min_diameter, + username=user_name, + ) + elif method == "kmeans": + pressure_sensor_placement_kmeans( + name=network, + scheme_name=scheme_name, + sensor_number=sensor_count, + min_diameter=min_diameter, + username=user_name, + ) + return "success" + + +@router.post("/scadadevicedatacleaning/") +async def fastapi_scada_device_data_cleaning( + network: str = Query(...), + ids_list: List[str] = Query(...), + start_time: str = Query(...), + end_time: str = Query(...), + user_name: str = Query(...), +) -> str: + item = { + "network": network, + "ids": ids_list, + "start_time": start_time, + "end_time": end_time, + "user_name": user_name, + } + query_ids_list = item["ids"][0].split(",") + scada_data = influxdb_api.query_SCADA_data_by_device_ID_and_timerange( + query_ids_list=query_ids_list, + start_time=item["start_time"], + end_time=item["end_time"], + ) + scada_device_info = influxdb_api.query_pg_scada_info(item["network"]) + scada_device_info_dict = {info["id"]: info for info in scada_device_info} + type_groups: dict[str, list[str]] = {} + for device_id in query_ids_list: + device_info = scada_device_info_dict.get(device_id, {}) + device_type = device_info.get("type", "unknown") + type_groups.setdefault(device_type, []).append(device_id) + for device_type, device_ids in type_groups.items(): + if device_type not in ["pressure", "pipe_flow"]: + continue + type_scada_data = { + device_id: scada_data[device_id] + for device_id in device_ids + if device_id in scada_data + } + if not type_scada_data: + continue + time_list = [record["time"] for record in next(iter(type_scada_data.values()))] + df = pd.DataFrame({"time": time_list}) + for device_id in device_ids: + if device_id in type_scada_data: + values = [record["value"] for record in type_scada_data[device_id]] + df[device_id] = values + value_df = df.drop(columns=["time"]) + if device_type == "pressure": + cleaned_value_df = Pdataclean.clean_pressure_data_df_km(value_df) + elif device_type == "pipe_flow": + cleaned_value_df = Fdataclean.clean_flow_data_df_kf(value_df) + cleaned_value_df = pd.DataFrame(cleaned_value_df) + cleaned_df = pd.concat([df["time"], cleaned_value_df], axis=1) + influxdb_api.import_multicolumn_data_from_dict( + data_dict=cleaned_df.to_dict("list"), + raw=False, + ) + return "success" + + +@router.post("/runsimulationmanuallybydate/") +async def fastapi_run_simulation_manually_by_date( + data: RunSimulationManuallyByDate, +) -> dict[str, str]: + item = data.dict() + try: + simulation.query_corresponding_element_id_and_query_id(item["name"]) + simulation.query_corresponding_pattern_id_and_query_id(item["name"]) + region_result = simulation.query_non_realtime_region(item["name"]) + globals.source_outflow_region_id = simulation.get_source_outflow_region_id( + item["name"], region_result + ) + globals.realtime_region_pipe_flow_and_demand_id = ( + simulation.query_realtime_region_pipe_flow_and_demand_id( + item["name"], region_result + ) + ) + globals.pipe_flow_region_patterns = simulation.query_pipe_flow_region_patterns( + item["name"] + ) + globals.non_realtime_region_patterns = ( + simulation.query_non_realtime_region_patterns(item["name"], region_result) + ) + ( + globals.source_outflow_region_patterns, + globals.realtime_region_pipe_flow_and_demand_patterns, + ) = simulation.get_realtime_region_patterns( + item["name"], + globals.source_outflow_region_id, + globals.realtime_region_pipe_flow_and_demand_id, + ) + base_date = datetime.strptime(item["simulation_date"], "%Y-%m-%d") + thread = threading.Thread( + target=lambda: run_simulation_manually_by_date( + item["name"], base_date, item["start_time"], item["duration"] + ) + ) + thread.start() + thread.join() + return {"status": "success"} + except Exception as exc: + return {"status": "error", "message": str(exc)} diff --git a/app/api/v1/router.py b/app/api/v1/router.py index 1bd340d..235d61e 100644 --- a/app/api/v1/router.py +++ b/app/api/v1/router.py @@ -35,6 +35,9 @@ from app.api.v1.endpoints.components import ( visuals, ) +from app.infra.db.postgresql import router as postgresql_router +from app.infra.db.timescaledb import router as timescaledb_router + api_router = APIRouter() # Core Services @@ -75,5 +78,9 @@ api_router.include_router(misc.router, tags=["Misc"]) api_router.include_router(risk.router, tags=["Risk"]) api_router.include_router(cache.router, tags=["Cache"]) +# Database Routers +api_router.include_router(timescaledb_router, tags=["TimescaleDB"]) +api_router.include_router(postgresql_router, tags=["PostgreSQL"]) + # Extension api_router.include_router(extension.router, tags=["Extension"]) diff --git a/app/infra/db/postgresql/router.py b/app/infra/db/postgresql/router.py index 28df3ea..6a03d24 100644 --- a/app/infra/db/postgresql/router.py +++ b/app/infra/db/postgresql/router.py @@ -6,7 +6,7 @@ from .database import get_database_instance from .scada_info import ScadaRepository from .scheme import SchemeRepository -router = APIRouter(prefix="/postgresql", tags=["postgresql"]) +router = APIRouter() # 创建支持数据库选择的连接依赖函数 diff --git a/app/infra/db/timescaledb/composite_queries.py b/app/infra/db/timescaledb/composite_queries.py index ab560c4..47a75f9 100644 --- a/app/infra/db/timescaledb/composite_queries.py +++ b/app/infra/db/timescaledb/composite_queries.py @@ -583,9 +583,7 @@ class CompositeQueries: ) # 7. 使用PipelineHealthAnalyzer进行预测 - analyzer = PipelineHealthAnalyzer( - model_path="api_ex/model/my_survival_forest_model_quxi.joblib" - ) + analyzer = PipelineHealthAnalyzer() survival_functions = analyzer.predict_survival(data) # 8. 组合结果 results = [] diff --git a/app/infra/db/timescaledb/router.py b/app/infra/db/timescaledb/router.py index 5a69f9b..465087b 100644 --- a/app/infra/db/timescaledb/router.py +++ b/app/infra/db/timescaledb/router.py @@ -10,7 +10,7 @@ from .schemas.scada import ScadaRepository from .composite_queries import CompositeQueries from app.infra.db.postgresql.database import get_database_instance as get_postgres_database_instance -router = APIRouter(prefix="/timescaledb", tags=["TimescaleDB"]) +router = APIRouter() # 创建支持数据库选择的连接依赖函数 diff --git a/app/main.py b/app/main.py index 4fdbfd5..86a9c91 100644 --- a/app/main.py +++ b/app/main.py @@ -7,8 +7,6 @@ from datetime import datetime import app.services.project_info as project_info from app.api.v1.router import api_router -from app.infra.db.timescaledb import router as timescaledb_router -from app.infra.db.postgresql import router as postgresql_router from app.infra.db.timescaledb.database import db as tsdb from app.infra.db.postgresql.database import db as pgdb from app.services.tjnetwork import open_project @@ -57,5 +55,5 @@ app.add_middleware(GZipMiddleware, minimum_size=1000) # Include Routers app.include_router(api_router, prefix="/api/v1") -app.include_router(timescaledb_router) -app.include_router(postgresql_router) +# Legcy Routers without version prefix +# app.include_router(api_router) diff --git a/app/services/__init__.py b/app/services/__init__.py index e4f4031..2fd9a9c 100644 --- a/app/services/__init__.py +++ b/app/services/__init__.py @@ -9,6 +9,7 @@ from app.services.scheme_management import ( upload_shp_to_pg, submit_risk_probability_result, ) +from app.services.valve_isolation import analyze_valve_isolation from app.services.simulation_ops import ( project_management, scheduling_simulation, @@ -29,4 +30,5 @@ __all__ = [ "project_management", "scheduling_simulation", "daily_scheduling_simulation", + "analyze_valve_isolation", ] diff --git a/app/services/valve_isolation.py b/app/services/valve_isolation.py new file mode 100644 index 0000000..ae63571 --- /dev/null +++ b/app/services/valve_isolation.py @@ -0,0 +1,7 @@ +from typing import Any + +from app.algorithms.valve_isolation import valve_isolation_analysis + + +def analyze_valve_isolation(network: str, accident_element: str) -> dict[str, Any]: + return valve_isolation_analysis(network, accident_element) diff --git a/scripts/main.py b/scripts/main.py new file mode 100644 index 0000000..5be0bbd --- /dev/null +++ b/scripts/main.py @@ -0,0 +1,4244 @@ +import os +import json +import time +import datetime +import logging +import threading +import shutil +import random + +from typing import * +from typing import List, Annotated, Optional, Union + +from urllib.request import Request + +from fastapi import ( + FastAPI, + File, + UploadFile, + Response, + status, + Request, + HTTPException, + Query, + Depends, + Header, +) +from fastapi.responses import PlainTextResponse +from fastapi.middleware.gzip import GZipMiddleware +from fastapi.middleware.cors import CORSMiddleware + +from starlette.responses import FileResponse, JSONResponse +from contextlib import asynccontextmanager + +from pydantic import BaseModel + +from multiprocessing import Value + +import redis +import msgpack +from datetime import datetime, timedelta, timezone + +# 第三方/自定义模块 +import app.infra.db.influxdb.api as influxdb_api +import app.infra.db.timescaledb as timescaledb +import app.infra.db.postgresql as postgresql +import py_linq +import app.services.time_api as time_api +import app.services.simulation as simulation +import app.services.globals as globals +import app.services.project_info as project_info +from app.infra.db.timescaledb.database import db as tsdb +from app.infra.db.postgresql.database import db as pgdb +from app.algorithms.online_Analysis import * +from app.services.tjnetwork import * + + +JUNCTION = 0 +RESERVOIR = 1 +TANK = 2 +PIPE = 1 +NODE_COUNT = 0 +LINK_COUNT = 2 + +prjs = [] +# inpDir = "C:/inpfiles/" +# tmpDir = "C:/tmpfiles/" +# proj_name = project_info.name +# lockedPrjs = {} + +# if not os.path.exists(inpDir): +# os.mkdir(inpDir) + +# if not os.path.exists(tmpDir): +# os.mkdir(tmpDir) + + +# 全局依赖项 +async def global_auth(request: Request): + # 白名单跳过 + # if request.url.path in WHITE_LIST: + # return + # 验证 + token = request.headers.get("Authorization") + if token != "Bearer 567e33c876a2" and token != "Bearer 38b3be72b8af": + raise HTTPException(status_code=401, detail="Invalid token") + + +# 简易令牌验证(实际项目中应替换为 JWT/OAuth2 等) +AUTH_TOKEN = "567e33c876a2" # 预设的有效令牌 + + +async def verify_token(authorization: Annotated[str, Header()] = None): + # 检查请求头是否存在 + if not authorization: + raise HTTPException(status_code=401, detail="Authorization header missing") + + # 提取 Bearer 后的令牌 (格式: Bearer ) + try: + token_type, token = authorization.split(" ", 1) + if token_type.lower() != "bearer": + raise ValueError + except ValueError: + raise HTTPException( + status_code=401, detail="Invalid authorization format. Use: Bearer " + ) + + # 验证令牌 + if token != AUTH_TOKEN: + raise HTTPException(status_code=403, detail="Invalid authentication token") + + return True + + +# 全局依赖项 +# app = FastAPI(dependencies=[Depends(global_auth)]) +# app = FastAPI() + + +# 生命周期管理器 +@asynccontextmanager +async def lifespan(app: FastAPI): + # 初始化数据库连接池 + tsdb.init_pool() + pgdb.init_pool() + + await tsdb.open() + await pgdb.open() + + open_project(project_info.name) + + yield + # 清理资源 + tsdb.close() + pgdb.close() + + +app = FastAPI(lifespan=lifespan) + +app.include_router(timescaledb.router) +app.include_router(postgresql.router) + +access_tokens = [] + + +def generate_access_token(username: str, password: str) -> str: + """ + 根据用户名和密码生成JWT access token + + 参数: + username: 用户名 + password: 密码 + + 返回: + JWT access token字符串 + """ + + if username != "tjwater" or password != "tjwater@123": + raise ValueError("用户名或密码错误") + + token = "567e33c876a2" + return token + + +# 将 Query的信息 序列号到 redis/json, 默认不支持datetime,需要自定义 +# 自定义序列化函数 +# 序列化处理器 +def encode_datetime(obj): + """将datetime转换为可序列化的字典结构""" + if isinstance(obj, datetime): + return {"__datetime__": True, "as_str": obj.strftime("%Y%m%dT%H:%M:%S.%f")} + return obj + + +# 反序列化处理器 +def decode_datetime(obj): + """将字典还原为datetime对象""" + if "__datetime__" in obj: + return datetime.strptime(obj["as_str"], "%Y%m%dT%H:%M:%S.%f") + return obj + + +# 初始化 Redis 连接 +# 用redis 限制并发访u +redis_client = redis.Redis(host="127.0.0.1", port=6379, db=0) + +# influxdb数据库连接信息 +# influx_url = influxdb_info.url +# influx_token = influxdb_info.token +# influx_org_name = influxdb_info.org +# influx_client = InfluxDBClient(url=influx_url, token=influx_token, org=influx_org_name, timeout=100*1000) # 100 seconds + + +# 配置 CORS 中间件 +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], # 允许所有来源 + allow_credentials=True, # 允许传递凭证(Cookie、HTTP 头等) + allow_methods=["*"], # 允许所有 HTTP 方法 + allow_headers=["*"], # 允许所有 HTTP 头 +) + +# 定义一个共享变量 +lock_simulation = Value("i", 0) + +app.add_middleware(GZipMiddleware, minimum_size=1000) + +logger = logging.getLogger() +logger.setLevel(logging.INFO) + + +@app.on_event("startup") +async def startup_db(): + logger.info("**********************************************************") + logger.info(str(datetime.now())) + logger.info("TJWater CloudService is starting...") + logger.info("**********************************************************") + + # open proj_name by default + print(project_info.name) + open_project(project_info.name) + + +############################################################ +# auth +############################################################ +@app.post("/login/") +async def fastapi_login(username: str, password: str) -> str: + return generate_access_token(username, password) + + +############################################################ +# extension_data +############################################################ +@app.get("/getallextensiondatakeys/") +async def fastapi_get_all_extension_data_keys(network: str) -> list[str]: + return get_all_extension_data_keys(network) + + +@app.get("/getallextensiondata/") +async def fastapi_get_all_extension_data(network: str) -> dict[str, Any]: + return get_all_extension_data(network) + + +@app.get("/getextensiondata/") +async def fastapi_get_extension_data(network: str, key: str) -> str | None: + return get_extension_data(network, key) + + +@app.post("/setextensiondata", response_model=None) +async def fastapi_set_extension_data(network: str, req: Request) -> ChangeSet: + props = await req.json() + print(props) + cs = set_extension_data(network, ChangeSet(props)) + print(cs.operations[0]) + return cs + + +############################################################ +# project +############################################################ + + +@app.get("/listprojects/") +async def fastapi_list_projects() -> list[str]: + return list_project() + + +@app.get("/haveproject/") +async def fastapi_have_project(network: str): + return have_project(network) + + +@app.post("/createproject/") +async def fastapi_create_project(network: str): + create_project(network) + return network + + +@app.post("/deleteproject/") +async def fastapi_delete_project(network: str): + delete_project(network) + return True + + +@app.get("/isprojectopen/") +async def fastapi_is_project_open(network: str): + return is_project_open(network) + + +@app.post("/openproject/") +async def fastapi_open_project(network: str): + open_project(network) + return network + + +@app.post("/closeproject/") +async def fastapi_close_project(network: str): + close_project(network) + return True + + +@app.post("/copyproject/") +async def fastapi_copy_project(source: str, target: str): + copy_project(source, target) + return True + + +@app.post("/importinp/") +async def fastapi_import_inp(network: str, req: Request): + jo_root = await req.json() + inp_text = jo_root["inp"] + ps = {"inp": inp_text} + ret = import_inp(network, ChangeSet(ps)) + print(ret) + return ret + + +@app.get("/exportinp/", response_model=None) +async def fastapi_export_inp(network: str, version: str) -> ChangeSet: + cs = export_inp(network, version) + op = cs.operations[0] + open_project(network) + op["vertex"] = json.dumps(get_all_vertices(network)) + op["scada"] = json.dumps(get_all_scada_elements(network)) + op["dma"] = json.dumps(get_all_district_metering_areas(network)) + op["sa"] = json.dumps(get_all_service_areas(network)) + op["vd"] = json.dumps(get_all_virtual_districts(network)) + op["legend"] = get_extension_data(network, "legend") + + db = get_extension_data(network, "scada_db") + print(db) + scada_db = "" + if db: + scada_db = db + print(scada_db) + op["scada_db"] = scada_db + + close_project(network) + + return cs + + +@app.post("/readinp/") +async def fastapi_read_inp(network: str, inp: str) -> bool: + read_inp(network, inp) + return True + + +@app.get("/dumpinp/") +async def fastapi_dump_inp(network: str, inp: str) -> bool: + dump_inp(network, inp) + return True + + +# 必须用这个PlainTextResponse,不然每个key都有引号 +@app.get("/runproject/", response_class=PlainTextResponse) +async def fastapi_run_project(network: str) -> str: + lock_key = "exclusive_api_lock" + timeout = 120 # 锁自动过期时间(秒) + + # 尝试获取锁(NX=True: 不存在时设置,EX=timeout: 过期时间) + acquired = redis_client.set(lock_key, "locked", nx=True, ex=timeout) + + if not acquired: + raise HTTPException(status_code=409, detail="is in simulation") + else: + try: + return run_project(network) + finally: + # 手动释放锁(可选,依赖过期时间自动释放更安全) + redis_client.delete(lock_key) + + +# DingZQ, 2025-02-04, 返回dict[str, Any] +# output 和 report +# output 是 json +# report 是 text +@app.get("/runprojectreturndict/") +async def fastapi_run_project_return_dict(network: str) -> dict[str, Any]: + lock_key = "exclusive_api_lock" + timeout = 120 # 锁自动过期时间(秒) + + # 尝试获取锁(NX=True: 不存在时设置,EX=timeout: 过期时间) + acquired = redis_client.set(lock_key, "locked", nx=True, ex=timeout) + + if not acquired: + raise HTTPException(status_code=409, detail="is in simulation") + else: + try: + return run_project_return_dict(network) + finally: + # 手动释放锁(可选,依赖过期时间自动释放更安全) + redis_client.delete(lock_key) + + +# put in inp folder, name without extension +@app.get("/runinp/") +async def fastapi_run_inp(network: str) -> str: + return run_inp(network) + + +# path is absolute path +@app.get("/dumpoutput/") +async def fastapi_dump_output(output: str) -> str: + return dump_output(output) + + +@app.get("/isprojectlocked/") +async def fastapi_is_locked(network: str, req: Request): + return str in lockedPrjs.keys() + + +@app.get("/isprojectlockedbyme/") +async def fastapi_is_locked_by_me(network: str, req: Request): + client_host = req.client.host + return lockedPrjs.get(network) == client_host + + +# 0 successfully locked +# 1 already locked by you +# 2 locked by others +@app.post("/lockproject/") +async def fastapi_lock_project(network: str, req: Request): + client_host = req.client.host + if not network in lockedPrjs.keys(): + lockedPrjs[network] = client_host + return 0 + else: + if lockedPrjs.get(network) == client_host: + return 1 + else: + return 2 + + +@app.post("/unlockproject/") +def fastapi_unlock_project(network: str, req: Request): + client_host = req.client.host + if lockedPrjs[network] == client_host: + print("delete key") + del lockedPrjs[network] + return True + + return False + + +### operations + + +@app.get("/getcurrentoperationid/") +async def fastapi_get_current_operaiton_id(network: str) -> int: + return get_current_operation(network) + + +@app.post("/undo/") +async def fastapi_undo(network: str): + return execute_undo(network) + + +@app.post("/redo/") +async def fastapi_redo(network: str): + return execute_redo(network) + + +@app.get("/getsnapshots/") +def fastapi_list_snapshot(network: str) -> list[tuple[int, str]]: + return list_snapshot(network) + + +@app.get("/havesnapshot/") +async def fastapi_have_snapshot(network: str, tag: str) -> bool: + return have_snapshot(network, tag) + + +@app.get("/havesnapshotforoperation/") +async def fastapi_have_snapshot_for_operation(network: str, operation: int) -> bool: + return have_snapshot_for_operation(network, operation) + + +@app.get("/havesnapshotforcurrentoperation/") +async def fastapi_have_snapshot_for_current_operation(network: str) -> bool: + return have_snapshot_for_current_operation(network) + + +@app.post("/takesnapshotforoperation/") +async def fastapi_take_snapshot_for_operation( + network: str, operation: int, tag: str +) -> None: + return take_snapshot_for_operation(network, operation, tag) + + +@app.post("takenapshotforcurrentoperation") +async def fastapi_take_snapshot_for_current_operation(network: str, tag: str) -> None: + return take_snapshot_for_current_operation(network, tag) + + +@app.post("/takesnapshot/") +def fastapi_take_snapshot(network: str, tag: str) -> None: + return take_snapshot(network, tag) + + +@app.post("/picksnapshot/", response_model=None) +def fastapi_pick_snapshot(network: str, tag: str, discard: bool = False) -> ChangeSet: + return pick_snapshot(network, tag, discard) + + +@app.post("/pickoperation/", response_model=None) +async def fastapi_pick_operation( + network: str, operation: int, discard: bool = False +) -> ChangeSet: + return pick_operation(network, operation, discard) + + +@app.get("/syncwithserver/", response_model=None) +async def fastapi_sync_with_server(network: str, operation: int) -> ChangeSet: + return sync_with_server(network, operation) + + +@app.post("/batch/", response_model=None) +async def fastapi_execute_batch_commands(network: str, req: Request) -> ChangeSet: + jo_root = await req.json() + cs: ChangeSet = ChangeSet() + cs.operations = jo_root["operations"] + rcs = execute_batch_commands(network, cs) + return rcs + + +@app.post("/compressedbatch/", response_model=None) +async def fastapi_execute_compressed_batch_commands( + network: str, req: Request +) -> ChangeSet: + jo_root = await req.json() + cs: ChangeSet = ChangeSet() + cs.operations = jo_root["operations"] + return execute_batch_command(network, cs) + + +@app.get("/getrestoreoperation/") +async def fastapi_get_restore_operation(network: str) -> int: + return get_restore_operation(network) + + +@app.post("/setrestoreoperation/") +async def fastapi_set_restore_operation(network: str, operation: int) -> None: + return set_restore_operation(network, operation) + + +############################################################ +# type +############################################################ + + +@app.get("/isnode/") +async def fastapi_is_node(network: str, node: str) -> bool: + return is_node(network, node) + + +@app.get("/isjunction/") +async def fastapi_is_junction(network: str, node: str) -> bool: + return is_junction(network, node) + + +@app.get("/isreservoir/") +async def fastapi_is_reservoir(network: str, node: str) -> bool: + return is_reservoir(network, node) + + +@app.get("/istank/") +async def fastapi_is_tank(network: str, node: str) -> bool: + return is_tank(network, node) + + +@app.get("/islink/") +async def fastapi_is_link(network: str, link: str) -> bool: + return is_link(network, link) + + +@app.get("/ispipe/") +async def fastapi_is_pipe(network: str, link: str) -> bool: + return is_pipe(network, link) + + +@app.get("/ispump/") +async def fastapi_is_pump(network: str, link: str) -> bool: + return is_pump(network, link) + + +@app.get("/isvalve/") +async def fastapi_is_valve(network: str, link: str) -> bool: + return is_valve(network, link) + + +# DingZQ, 2025-02-05 +@app.get("/getnodetype/") +async def fastapi_get_node_type(network: str, node: str) -> str: + return get_node_type(network, node) + + +@app.get("/getlinktype/") +async def fastapi_get_link_type(network: str, link: str) -> str: + return get_link_type(network, link) + + +@app.get("/getelementtype/") +async def fastapi_get_element_type(network: str, element: str) -> str: + return get_element_type(network, element) + + +@app.get("/getelementtypevalue/") +async def fastapi_get_element_type_value(network: str, element: str) -> int: + return get_element_type_value(network, element) + + +@app.get("/iscurve/") +async def fastapi_is_curve(network: str, curve: str) -> bool: + return is_curve(network, curve) + + +@app.get("/ispattern/") +async def fastapi_is_pattern(network: str, pattern: str) -> bool: + return is_pattern(network, pattern) + + +@app.get("/getnodes/") +async def fastapi_get_nodes(network: str) -> list[str]: + return get_nodes(network) + + +@app.get("/getlinks/") +async def fastapi_get_links(network: str) -> list[str]: + return get_links(network) + + +@app.get("/getcurves/") +async def fastapi_get_curves(network: str) -> list[str]: + return get_curves(network) + + +@app.get("/getpatterns/") +async def fastapi_get_patterns(network: str) -> list[str]: + return get_patterns(network) + + +@app.get("/getnodelinks/") +def get_node_links(network: str, node: str) -> list[str]: + return get_node_links(network, node) + + +############################################################ +# DingZQ, 2025-02-05 +# 用统一的接口来获取 Node & Link properties, Node和Link的Id可以一样,不能进一步统一成获取Element 的 properties +# Node & Link properties +############################################################ +@app.get("/getnodeproperties/") +async def fast_get_node_properties(network: str, node: str) -> dict[str, Any]: + return get_node_properties(network, node) + + +@app.get("/getlinkproperties/") +async def fast_get_link_properties(network: str, link: str) -> dict[str, Any]: + return get_link_properties(network, link) + + +@app.get("/getscadaproperties/") +async def fast_get_scada_properties(network: str, scada: str) -> dict[str, Any]: + return get_scada_info(network, scada) + + +@app.get("/getallscadaproperties/") +async def fast_get_all_scada_properties(network: str) -> list[dict[str, Any]]: + return get_all_scada_info(network) + + +# elementtype can be 'node' or 'link' or 'scada' +@app.get("/getelementpropertieswithtype/") +async def fast_get_element_properties_with_type( + network: str, elementtype: str, element: str +) -> dict[str, Any]: + return get_element_properties_with_type(network, elementtype, element) + + +# type can be 'node' or 'link' or 'scada' +@app.get("/getelementproperties/") +async def fast_get_element_properties(network: str, element: str) -> dict[str, Any]: + return get_element_properties(network, element) + + +############################################################ +# title 1.[TITLE] +############################################################ +@app.get("/gettitleschema/") +async def fast_get_title_schema(network: str) -> dict[str, dict[str, Any]]: + return get_title_schema(network) + + +@app.get("/gettitle/") +async def fast_get_title(network: str) -> dict[str, Any]: + return get_title(network) + + +@app.get("/settitle/", response_model=None) +async def fastapi_set_title(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_title(network, ChangeSet(props)) + + +############################################################ +# junction 2.[JUNCTIONS] +############################################################ +@app.get("/getjunctionschema") +async def fast_get_junction_schema(network: str) -> dict[str, dict[str, Any]]: + return get_junction_schema(network) + + +@app.post("/addjunction/", response_model=None) +async def fastapi_add_junction( + network: str, junction: str, x: float, y: float, z: float +) -> ChangeSet: + ps = {"id": junction, "x": x, "y": y, "elevation": z} + return add_junction(network, ChangeSet(ps)) + + +@app.post("/deletejunction/", response_model=None) +async def fastapi_delete_junction(network: str, junction: str) -> ChangeSet: + ps = {"id": junction} + return delete_junction(network, ChangeSet(ps)) + + +@app.get("/getjunctionelevation/") +async def fastapi_get_junction_elevation(network: str, junction: str) -> float: + ps = get_junction(network, junction) + return ps["elevation"] + + +@app.get("/getjunctionx/") +async def fastapi_get_junction_x(network: str, junction: str) -> float: + ps = get_junction(network, junction) + return ps["x"] + + +@app.get("/getjunctiony/") +async def fastapi_get_junction_x(network: str, junction: str) -> float: + ps = get_junction(network, junction) + return ps["y"] + + +@app.get("/getjunctioncoord/") +async def fastapi_get_junction_coord(network: str, junction: str) -> dict[str, float]: + ps = get_junction(network, junction) + coord = {"x": ps["x"], "y": ps["y"]} + return coord + + +@app.get("/getjunctiondemand/") +async def fastapi_get_junction_demand(network: str, junction: str) -> float: + ps = get_junction(network, junction) + return ps["demand"] + + +@app.get("/getjunctionpattern/") +async def fastapi_get_junction_pattern(network: str, junction: str) -> str: + ps = get_junction(network, junction) + return ps["pattern"] + + +@app.post("/setjunctionelevation/", response_model=None) +async def fastapi_set_junction_elevation( + network: str, junction: str, elevation: float +) -> ChangeSet: + ps = {"id": junction, "elevation": elevation} + return set_junction(network, ChangeSet(ps)) + + +@app.post("/setjunctionx/", response_model=None) +async def fastapi_set_junction_x(network: str, junction: str, x: float) -> ChangeSet: + ps = {"id": junction, "x": x} + return set_junction(network, ChangeSet(ps)) + + +@app.post("/setjunctiony/", response_model=None) +async def fastapi_set_junction_y(network: str, junction: str, y: float) -> ChangeSet: + ps = {"id": junction, "y": y} + return set_junction(network, ChangeSet(ps)) + + +@app.post("/setjunctioncoord/", response_model=None) +async def fastapi_set_junction_coord( + network: str, junction: str, x: float, y: float +) -> ChangeSet: + ps = {"id": junction, "x": x, "y": y} + return set_junction(network, ChangeSet(ps)) + + +@app.post("/setjunctiondemand/", response_model=None) +async def fastapi_set_junction_demand( + network: str, junction: str, demand: float +) -> ChangeSet: + ps = {"id": junction, "demand": demand} + return set_junction(network, ChangeSet(ps)) + + +@app.post("/setjunctionpattern/", response_model=None) +async def fastapi_set_junction_pattern( + network: str, junction: str, pattern: str +) -> ChangeSet: + ps = {"id": junction, "pattern": pattern} + return set_junction(network, ChangeSet(ps)) + + +@app.get("/getjunctionproperties/") +async def fastapi_get_junction_properties( + network: str, junction: str +) -> dict[str, Any]: + return get_junction(network, junction) + + +# DingZQ, 2025-03-29 +@app.get("/getalljunctionproperties/") +async def fastapi_get_all_junction_properties(network: str) -> list[dict[str, Any]]: + # 缓存查询结果提高性能 + global redis_client + cache_key = f"getalljunctionproperties_{network}" + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) + return loaded_dict + + results = get_all_junctions(network) + redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime)) + + return results + + +@app.post("/setjunctionproperties/", response_model=None) +async def fastapi_set_junction_properties( + network: str, junction: str, req: Request +) -> ChangeSet: + props = await req.json() + ps = {"id": junction} | props + return set_junction(network, ChangeSet(ps)) + + +############################################################ +# reservoir 3.[RESERVOIRS] +############################################################ +@app.get("/getreservoirschema") +async def fast_get_reservoir_schema(network: str) -> dict[str, dict[str, Any]]: + return get_reservoir_schema(network) + + +@app.post("/addreservoir/", response_model=None) +async def fastapi_add_reservoir( + network: str, reservoir: str, x: float, y: float, head: float +) -> ChangeSet: + ps = {"id": reservoir, "x": x, "y": y, "head": head} + return add_reservoir(network, ChangeSet(ps)) + + +@app.post("/deletereservoir/", response_model=None) +async def fastapi_delete_reservoir(network: str, reservoir: str) -> ChangeSet: + ps = {"id": reservoir} + return delete_reservoir(network, ChangeSet(ps)) + + +@app.get("/getreservoirhead/") +async def fastapi_get_reservoir_head(network: str, reservoir: str) -> float | None: + ps = get_reservoir(network, reservoir) + return ps["head"] + + +@app.get("/getreservoirpattern/") +async def fastapi_get_reservoir_pattern(network: str, reservoir: str) -> str | None: + ps = get_reservoir(network, reservoir) + return ps["pattern"] + + +@app.get("/getreservoirx/") +async def fastapi_get_reservoir_x( + network: str, reservoir: str +) -> dict[str, float] | None: + ps = get_reservoir(network, reservoir) + return ps["x"] + + +@app.get("/getreservoiry/") +async def fastapi_get_reservoir_y( + network: str, reservoir: str +) -> dict[str, float] | None: + ps = get_reservoir(network, reservoir) + return ps["y"] + + +@app.get("/getreservoircoord/") +async def fastapi_get_reservoir_y( + network: str, reservoir: str +) -> dict[str, float] | None: + ps = get_reservoir(network, reservoir) + coord = {"id": reservoir, "x": ps["x"], "y": ps["y"]} + return coord + + +@app.post("/setreservoirhead/", response_model=None) +async def fastapi_set_reservoir_head( + network: str, reservoir: str, head: float +) -> ChangeSet: + ps = {"id": reservoir, "head": head} + return set_reservoir(network, ChangeSet(ps)) + + +@app.post("/setreservoirpattern/", response_model=None) +async def fastapi_set_reservoir_pattern( + network: str, reservoir: str, pattern: str +) -> ChangeSet: + ps = {"id": reservoir, "pattern": pattern} + return set_reservoir(network, ChangeSet(ps)) + + +@app.post("/setreservoirx/", response_model=None) +async def fastapi_set_reservoir_x(network: str, reservoir: str, x: float) -> ChangeSet: + ps = {"id": reservoir, "x": x} + return set_reservoir(network, ChangeSet(ps)) + + +@app.post("/setreservoirx/", response_model=None) +async def fastapi_set_reservoir_y(network: str, reservoir: str, y: float) -> ChangeSet: + ps = {"id": reservoir, "y": y} + return set_reservoir(network, ChangeSet(ps)) + + +@app.post("/setreservoircoord/", response_model=None) +async def fastapi_set_reservoir_y( + network: str, reservoir: str, x: float, y: float +) -> ChangeSet: + ps = {"id": reservoir, "x": x, "y": y} + return set_reservoir(network, ChangeSet(ps)) + + +@app.get("/getreservoirproperties/") +async def fastapi_get_reservoir_properties( + network: str, reservoir: str +) -> dict[str, Any]: + return get_reservoir(network, reservoir) + + +# DingZQ, 2025-03-29 +@app.get("/getallreservoirproperties/") +async def fastapi_get_all_reservoir_properties(network: str) -> list[dict[str, Any]]: + # 缓存查询结果提高性能 + global redis_client + cache_key = f"getallreservoirproperties_{network}" + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) + return loaded_dict + + results = get_all_reservoirs(network) + redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime)) + + return results + + +@app.post("/setreservoirproperties/", response_model=None) +async def fastapi_set_reservoir_properties( + network: str, reservoir: str, req: Request +) -> ChangeSet: + props = await req.json() + ps = {"id": reservoir} | props + return set_reservoir(network, ChangeSet(ps)) + + +############################################################ +# tank 4.[TANKS] +############################################################ +@app.get("/gettankschema") +async def fast_get_tank_schema(network: str) -> dict[str, dict[str, Any]]: + return get_tank_schema(network) + + +@app.post("/addtank/", response_model=None) +async def fastapi_add_tank( + network: str, + tank: str, + x: float, + y: float, + elevation: float, + init_level: float = 0, + min_level: float = 0, + max_level: float = 0, + diameter: float = 0, + min_vol: float = 0, +) -> ChangeSet: + ps = { + "id": tank, + "x": x, + "y": y, + "elevation": elevation, + "init_level": init_level, + "min_level": min_level, + "max_level": max_level, + "diameter": diameter, + "min_vol": min_vol, + } + return add_tank(network, ChangeSet(ps)) + + +@app.post("/deletetank/", response_model=None) +async def fastapi_delete_tank(network: str, tank: str) -> ChangeSet: + ps = {"id": tank} + return delete_tank(network, ChangeSet(ps)) + + +@app.get("/gettankelevation/") +async def fastapi_get_tank_elevation(network: str, tank: str) -> float | None: + ps = get_tank(network, tank) + return ps["elevation"] + + +@app.get("/gettankinitlevel/") +async def fastapi_get_tank_init_level(network: str, tank: str) -> float | None: + ps = get_tank(network, tank) + return ps["init_level"] + + +@app.get("/gettankminlevel/") +async def fastapi_get_tank_min_level(network: str, tank: str) -> float | None: + ps = get_tank(network, tank) + return ps["min_level"] + + +@app.get("/gettankmaxlevel/") +async def fastapi_get_tank_max_level(network: str, tank: str) -> float | None: + ps = get_tank(network, tank) + return ps["max_level"] + + +@app.get("/gettankdiameter/") +async def fastapi_get_tank_diameter(network: str, tank: str) -> float | None: + ps = get_tank(network, tank) + return ps["diameter"] + + +@app.get("/gettankminvol/") +async def fastapi_get_tank_min_vol(network: str, tank: str) -> float | None: + ps = get_tank(network, tank) + return ps["min_vol"] + + +@app.get("/gettankvolcurve/") +async def fastapi_get_tank_vol_curve(network: str, tank: str) -> str | None: + ps = get_tank(network, tank) + return ps["vol_curve"] + + +@app.get("/gettankoverflow/") +async def fastapi_get_tank_overflow(network: str, tank: str) -> str | None: + ps = get_tank(network, tank) + return ps["overflow"] + + +@app.get("/gettankx/") +async def fastapi_get_tank_x(network: str, tank: str) -> float: + ps = get_tank(network, tank) + return ps["x"] + + +@app.get("/gettanky/") +async def fastapi_get_tank_x(network: str, tank: str) -> float: + ps = get_tank(network, tank) + return ps["y"] + + +@app.get("/gettankcoord/") +async def fastapi_get_tank_coord(network: str, tank: str) -> dict[str, float]: + ps = get_tank(network, tank) + coord = {"x": ps["x"], "y": ps["y"]} + return coord + + +@app.post("/settankelevation/", response_model=None) +async def fastapi_set_tank_elevation( + network: str, tank: str, elevation: float +) -> ChangeSet: + ps = {"id": tank, "elevation": elevation} + return set_tank(network, ChangeSet(ps)) + + +@app.post("/settankinitlevel/", response_model=None) +async def fastapi_set_tank_init_level( + network: str, tank: str, init_level: float +) -> ChangeSet: + ps = {"id": tank, "init_level": init_level} + return set_tank(network, ChangeSet(ps)) + + +@app.post("/settankminlevel/", response_model=None) +async def fastapi_set_tank_min_level( + network: str, tank: str, min_level: float +) -> ChangeSet: + ps = {"id": tank, "min_level": min_level} + return set_tank(network, ChangeSet(ps)) + + +@app.post("/settankmaxlevel/", response_model=None) +async def fastapi_set_tank_max_level( + network: str, tank: str, max_level: float +) -> ChangeSet: + ps = {"id": tank, "max_level": max_level} + return set_tank(network, ChangeSet(ps)) + + +@app.post("settankdiameter//", response_model=None) +async def fastapi_set_tank_diameter( + network: str, tank: str, diameter: float +) -> ChangeSet: + ps = {"id": tank, "diameter": diameter} + return set_tank(network, ChangeSet(ps)) + + +@app.post("/settankminvol/", response_model=None) +async def fastapi_set_tank_min_vol( + network: str, tank: str, min_vol: float +) -> ChangeSet: + ps = {"id": tank, "min_vol": min_vol} + return set_tank(network, ChangeSet(ps)) + + +@app.post("/settankvolcurve/", response_model=None) +async def fastapi_set_tank_vol_curve( + network: str, tank: str, vol_curve: str +) -> ChangeSet: + ps = {"id": tank, "vol_curve": vol_curve} + return set_tank(network, ChangeSet(ps)) + + +@app.post("/settankoverflow/", response_model=None) +async def fastapi_set_tank_overflow( + network: str, tank: str, overflow: str +) -> ChangeSet: + ps = {"id": tank, "overflow": overflow} + return set_tank(network, ChangeSet(ps)) + + +@app.post("/settankx/", response_model=None) +async def fastapi_set_tank_x(network: str, tank: str, x: float) -> ChangeSet: + ps = {"id": tank, "x": x} + return set_tank(network, ChangeSet(ps)) + + +@app.post("/settanky/", response_model=None) +async def fastapi_set_tank_y(network: str, tank: str, y: float) -> ChangeSet: + ps = {"id": tank, "y": y} + return set_tank(network, ChangeSet(ps)) + + +@app.post("/settankcoord/", response_model=None) +async def fastapi_set_tank_coord( + network: str, tank: str, x: float, y: float +) -> ChangeSet: + ps = {"id": tank, "x": x, "y": y} + return set_tank(network, ChangeSet(ps)) + + +@app.get("/gettankproperties/") +async def fastapi_get_tank_properties(network: str, tank: str) -> dict[str, Any]: + return get_tank(network, tank) + + +# DingZQ, 2025-03-29 +@app.get("/getalltankproperties/") +async def fastapi_get_all_tank_properties(network: str) -> list[dict[str, Any]]: + # 缓存查询结果提高性能 + global redis_client + cache_key = f"getalltankproperties_{network}" + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) + return loaded_dict + + results = get_all_tanks(network) + redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime)) + + return results + + +@app.post("/settankproperties/", response_model=None) +async def fastapi_set_tank_properties( + network: str, tank: str, req: Request +) -> ChangeSet: + props = await req.json() + ps = {"id": tank} | props + return set_tank(network, ChangeSet(ps)) + + +############################################################ +# pipe 4.[PIPES] +############################################################ +@app.get("/getpipeschema") +async def fastapi_get_pipe_schema(network: str) -> dict[str, dict[str, Any]]: + return get_pipe_schema(network) + + +@app.post("/addpipe/", response_model=None) +async def fastapi_add_pipe( + network: str, + pipe: str, + node1: str, + node2: str, + length: float = 0, + diameter: float = 0, + roughness: float = 0, + minor_loss: float = 0, + status: str = PIPE_STATUS_OPEN, +) -> ChangeSet: + ps = { + "id": pipe, + "node1": node1, + "node2": node2, + "length": length, + "diameter": diameter, + "roughness": roughness, + "minor_loss": minor_loss, + "status": status, + } + return add_pipe(network, ChangeSet(ps)) + + +@app.post("/deletepipe/", response_model=None) +async def fastapi_delete_pipe(network: str, pipe: str) -> ChangeSet: + ps = {"id": pipe} + return delete_pipe(network, ChangeSet(ps)) + + +@app.get("/getpipenode1/") +async def fastapi_get_pipe_node1(network: str, pipe: str) -> str | None: + ps = get_pipe(network, pipe) + return ps["node1"] + + +@app.get("/getpipenode2/") +async def fastapi_get_pipe_node2(network: str, pipe: str) -> str | None: + ps = get_pipe(network, pipe) + return ps["node2"] + + +@app.get("/getpipelength/") +async def fastapi_get_pipe_length(network: str, pipe: str) -> float | None: + ps = get_pipe(network, pipe) + return ps["length"] + + +@app.get("/getpipediameter/") +async def fastapi_get_pipe_diameter(network: str, pipe: str) -> float | None: + ps = get_pipe(network, pipe) + return ps["diameter"] + + +@app.get("/getpiperoughness/") +async def fastapi_get_pipe_roughness(network: str, pipe: str) -> float | None: + ps = get_pipe(network, pipe) + return ps["roughness"] + + +@app.get("/getpipeminorloss/") +async def fastapi_get_pipe_minor_loss(network: str, pipe: str) -> float | None: + ps = get_pipe(network, pipe) + return ps["minor_loss"] + + +@app.get("/getpipestatus/") +async def fastapi_get_pipe_status(network: str, pipe: str) -> str | None: + ps = get_pipe(network, pipe) + return ps["status"] + + +@app.post("/setpipenode1/", response_model=None) +async def fastapi_set_pipe_node1(network: str, pipe: str, node1: str) -> ChangeSet: + ps = {"id": pipe, "node1": node1} + return set_pipe(network, ChangeSet(ps)) + + +@app.post("/setpipenode2/", response_model=None) +async def fastapi_set_pipe_node2(network: str, pipe: str, node2: str) -> ChangeSet: + ps = {"id": pipe, "node2": node2} + return set_pipe(network, ChangeSet(ps)) + + +@app.post("/setpipelength/", response_model=None) +async def fastapi_set_pipe_length(network: str, pipe: str, length: float) -> ChangeSet: + ps = {"id": pipe, "length": length} + return set_pipe(network, ChangeSet(ps)) + + +@app.post("/setpipediameter/", response_model=None) +async def fastapi_set_pipe_diameter( + network: str, pipe: str, diameter: float +) -> ChangeSet: + ps = {"id": pipe, "diameter": diameter} + return set_pipe(network, ChangeSet(ps)) + + +@app.post("/setpiperoughness/", response_model=None) +async def fastapi_set_pipe_roughness( + network: str, pipe: str, roughness: float +) -> ChangeSet: + ps = {"id": pipe, "roughness": roughness} + return set_pipe(network, ChangeSet(ps)) + + +@app.post("/setpipeminorloss/", response_model=None) +async def fastapi_set_pipe_minor_loss( + network: str, pipe: str, minor_loss: float +) -> ChangeSet: + ps = {"id": pipe, "minor_loss": minor_loss} + return set_pipe(network, ChangeSet(ps)) + + +@app.post("/setpipestatus/", response_model=None) +async def fastapi_set_pipe_status(network: str, pipe: str, status: str) -> ChangeSet: + ps = {"id": pipe, "status": status} + + print(status) + print(ps) + + ret = set_pipe(network, ChangeSet(ps)) + print(ret) + return ret + + +@app.get("/getpipeproperties/") +async def fastapi_get_pipe_properties(network: str, pipe: str) -> dict[str, Any]: + return get_pipe(network, pipe) + + +# DingZQ, 2025-03-29 +@app.get("/getallpipeproperties/") +async def fastapi_get_all_pipe_properties(network: str) -> list[dict[str, Any]]: + # 缓存查询结果提高性能 + global redis_client + cache_key = f"getallpipeproperties_{network}" + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) + return loaded_dict + + results = get_all_pipes(network) + redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime)) + + return results + + +@app.post("/setpipeproperties/", response_model=None) +async def fastapi_set_pipe_properties( + network: str, pipe: str, req: Request +) -> ChangeSet: + props = await req.json() + ps = {"id": pipe} | props + return set_pipe(network, ChangeSet(ps)) + + +############################################################ +# pump 4.[PUMPS] +############################################################ +@app.get("/getpumpschema") +async def fastapi_get_pump_schema(network: str) -> dict[str, dict[str, Any]]: + return get_pump_schema(network) + + +@app.post("/addpump/", response_model=None) +async def fastapi_add_pump( + network: str, pump: str, node1: str, node2: str, power: float = 0.0 +) -> ChangeSet: + ps = {"id": pump, "node1": node1, "node2": node2, "power": power} + return add_pump(network, ChangeSet(ps)) + + +@app.post("/deletepump/", response_model=None) +async def fastapi_delete_pump(network: str, pump: str) -> ChangeSet: + ps = {"id": pump} + return delete_pump(network, ChangeSet(ps)) + + +@app.get("/getpumpnode1/") +async def fastapi_get_pump_node1(network: str, pump: str) -> str | None: + ps = get_pump(network, pump) + return ps["node1"] + + +@app.get("/getpumpnode2/") +async def fastapi_get_pump_node2(network: str, pump: str) -> str | None: + ps = get_pump(network, pump) + return ps["node2"] + + +@app.post("/setpumpnode1/", response_model=None) +async def fastapi_set_pump_node1(network: str, pump: str, node1: str) -> ChangeSet: + ps = {"id": pump, "node1": node1} + return set_pump(network, ChangeSet(ps)) + + +@app.post("/setpumpnode2/", response_model=None) +async def fastapi_set_pump_node2(network: str, pump: str, node2: str) -> ChangeSet: + ps = {"id": pump, "node2": node2} + return set_pump(network, ChangeSet(ps)) + + +@app.get("/getpumpproperties/") +async def fastapi_get_pump_properties(network: str, pump: str) -> dict[str, Any]: + return get_pump(network, pump) + + +# DingZQ, 2025-03-29 +@app.get("/getallpumpproperties/") +async def fastapi_get_all_pump_properties(network: str) -> list[dict[str, Any]]: + # 缓存查询结果提高性能 + global redis_client + cache_key = f"getallpumpproperties_{network}" + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) + return loaded_dict + + results = get_all_pumps(network) + redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime)) + + return results + + +@app.post("/setpumpproperties/", response_model=None) +async def fastapi_set_pump_properties( + network: str, pump: str, req: Request +) -> ChangeSet: + props = await req.json() + ps = {"id": pump} | props + return set_pump(network, ChangeSet(ps)) + + +############################################################ +# valve 4.[VALVES] +############################################################ +@app.get("/getvalveschema") +async def fastapi_get_valve_schema(network: str) -> dict[str, dict[str, Any]]: + return get_valve_schema(network) + + +@app.post("/addvalve/", response_model=None) +async def fastapi_add_valve( + network: str, + valve: str, + node1: str, + node2: str, + diameter: float = 0, + v_type: str = VALVES_TYPE_PRV, + setting: float = 0, + minor_loss: float = 0, +) -> ChangeSet: + ps = { + "id": valve, + "node1": node1, + "node2": node2, + "diameter": diameter, + "v_type": v_type, + "setting": setting, + "minor_loss": minor_loss, + } + + return add_valve(network, ChangeSet(ps)) + + +@app.post("/deletevalve/", response_model=None) +async def fastapi_delete_valve(network: str, valve: str) -> ChangeSet: + ps = {"id": valve} + return delete_valve(network, ChangeSet(ps)) + + +@app.get("/getvalvenode1/") +async def fastapi_get_valve_node1(network: str, valve: str) -> str | None: + ps = get_valve(network, valve) + return ps["node1"] + + +@app.get("/getvalvenode2/") +async def fastapi_get_valve_node2(network: str, valve: str) -> str | None: + ps = get_valve(network, valve) + return ps["node2"] + + +@app.get("/getvalvediameter/") +async def fastapi_get_valve_diameter(network: str, valve: str) -> float | None: + ps = get_valve(network, valve) + return ps["diameter"] + + +@app.get("/getvalvetype/") +async def fastapi_get_valve_type(network: str, valve: str) -> str | None: + ps = get_valve(network, valve) + return ps["type"] + + +@app.get("/getvalvesetting/") +async def fastapi_get_valve_setting(network: str, valve: str) -> float | None: + ps = get_valve(network, valve) + return ps["setting"] + + +@app.get("/getvalveminorloss/") +async def fastapi_get_valve_minor_loss(network: str, valve: str) -> float | None: + ps = get_valve(network, valve) + return ps["minor_loss"] + + +@app.post("/setvalvenode1/", response_model=None) +async def fastapi_set_valve_node1(network: str, valve: str, node1: str) -> ChangeSet: + ps = {"id": valve, "node1": node1} + return set_valve(network, ChangeSet(ps)) + + +@app.post("/setvalvenode2/", response_model=None) +async def fastapi_set_valve_node2(network: str, valve: str, node2: str) -> ChangeSet: + ps = {"id": valve, "node2": node2} + return set_valve(network, ChangeSet(ps)) + + +@app.post("/setvalvenodediameter/", response_model=None) +async def fastapi_set_valve_diameter( + network: str, valve: str, diameter: float +) -> ChangeSet: + ps = {"id": valve, "diameter": diameter} + return set_valve(network, ChangeSet(ps)) + + +@app.post("/setvalvetype/", response_model=None) +async def fastapi_set_valve_type(network: str, valve: str, type: str) -> ChangeSet: + ps = {"id": valve, "type": type} + return set_valve(network, ChangeSet(ps)) + + +@app.post("/setvalvesetting/", response_model=None) +async def fastapi_set_valve_setting( + network: str, valve: str, setting: float +) -> ChangeSet: + ps = {"id": valve, "setting": setting} + return set_valve(network, ChangeSet(ps)) + + +@app.get("/getvalveproperties/") +async def fastapi_get_valve_properties(network: str, valve: str) -> dict[str, Any]: + return get_valve(network, valve) + + +# DingZQ, 2025-03-29 +@app.get("/getallvalveproperties/") +async def fastapi_get_all_valve_properties(network: str) -> list[dict[str, Any]]: + # 缓存查询结果提高性能 + global redis_client + cache_key = f"getallvalveproperties_{network}" + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) + return loaded_dict + + results = get_all_valves(network) + redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime)) + + return results + + +@app.post("/setvalveproperties/", response_model=None) +async def fastapi_set_valve_properties( + network: str, valve: str, req: Request +) -> ChangeSet: + props = await req.json() + ps = {"id": valve} | props + return set_valve(network, ChangeSet(ps)) + + +# node & link +@app.post("/deletenode/", response_model=None) +async def fastapi_delete_node(network: str, node: str) -> ChangeSet: + ps = {"id": node} + if is_junction(network, node): + return delete_junction(network, ChangeSet(ps)) + elif is_reservoir(network, node): + return delete_reservoir(network, ChangeSet(ps)) + elif is_tank(network, node): + return delete_tank(network, ChangeSet(ps)) + + +@app.post("/deletelink/", response_model=None) +async def fastapi_delete_link(network: str, link: str) -> ChangeSet: + ps = {"id": link} + if is_pipe(network, link): + return delete_pipe(network, ChangeSet(ps)) + elif is_pump(network, link): + return delete_pump(network, ChangeSet(ps)) + elif is_valve(network, link): + return delete_valve(network, ChangeSet(ps)) + + +############################################################ +# tag 8.[TAGS] +############################################################ +# +# TAG_TYPE_NODE = api.TAG_TYPE_NODE +# TAG_TYPE_LINK = api.TAG_TYPE_LINK +# + + +@app.get("/gettagschema/") +async def fastapi_get_tag_schema(network: str) -> dict[str, dict[str, Any]]: + return get_tag_schema(network) + + +@app.get("/gettag/") +async def fastapi_get_tag(network: str, t_type: str, id: str) -> dict[str, Any]: + return get_tag(network, t_type, id) + + +@app.get("/gettags/") +async def fastapi_get_tags(network: str) -> list[dict[str, Any]]: + tags = get_tags(network) + print(tags) + return tags + + +# example: +# set_tag(p, ChangeSet({'t_type': TAG_TYPE_NODE, 'id': 'j1', 'tag': 'j1t' })) +# set_tag(p, ChangeSet({'t_type': TAG_TYPE_LINK, 'id': 'p0', 'tag': 'p0t' })) +@app.post("/settag/", response_model=None) +async def fastapi_set_tag(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_tag(network, ChangeSet(props)) + + +############################################################ +# demand 9.[DEMANDS] +############################################################ +@app.get("/getdemandschema") +async def fastapi_get_demand_schema(network: str) -> dict[str, dict[str, Any]]: + return get_demand_schema(network) + + +@app.get("/getdemandproperties/") +async def fastapi_get_demand_properties(network: str, junction: str) -> dict[str, Any]: + return get_demand(network, junction) + + +# example: set_demand(p, ChangeSet({'junction': 'j1', 'demands': [{'demand': 10.0, 'pattern': None, 'category': 'x'}, {'demand': 20.0, 'pattern': None, 'category': None}]})) +@app.post("/setdemandproperties/", response_model=None) +async def fastapi_set_demand_properties( + network: str, junction: str, req: Request +) -> ChangeSet: + props = await req.json() + ps = {"junction": junction} | props + return set_demand(network, ChangeSet(ps)) + + +############################################################ +# status 10.[STATUS] init_status +############################################################ +@app.get("/getstatusschema") +async def fastapi_get_status_schema(network: str) -> dict[str, dict[str, Any]]: + return get_status_schema(network) + + +@app.get("/getstatus/") +async def fastapi_get_status(network: str, link: str) -> dict[str, Any]: + return get_status(network, link) + + +# example: set_status(p, ChangeSet({'link': 'p0', 'status': LINK_STATUS_OPEN, 'setting': 10.0})) +@app.post("/setstatus/", response_model=None) +async def fastapi_set_status_properties( + network: str, link: str, req: Request +) -> ChangeSet: + props = await req.json() + ps = {"link": link} | props + return set_status(network, ChangeSet(ps)) + + +############################################################ +# pattern 11.[PATTERNS] +############################################################ +@app.get("/getpatternschema") +async def fastapi_get_pattern_schema(network: str) -> dict[str, dict[str, Any]]: + return get_pattern_schema(network) + + +@app.post("/addpattern/", response_model=None) +async def fastapi_add_pattern(network: str, pattern: str, req: Request) -> ChangeSet: + props = await req.json() + ps = { + "id": pattern, + } | props + return add_pattern(network, ChangeSet(ps)) + + +@app.post("/deletepattern/", response_model=None) +async def fastapi_delete_pattern(network: str, pattern: str) -> ChangeSet: + ps = {"id": pattern} + return delete_pattern(network, ChangeSet(ps)) + + +@app.get("/getpatternproperties/") +async def fastapi_get_pattern_properties(network: str, pattern: str) -> dict[str, Any]: + return get_pattern(network, pattern) + + +# example: set_pattern(p, ChangeSet({'id' : 'p0', 'factors': [1.0, 2.0, 3.0]})) +@app.post("/setpatternproperties/", response_model=None) +async def fastapi_set_pattern_properties( + network: str, pattern: str, req: Request +) -> ChangeSet: + props = await req.json() + ps = {"id": pattern} | props + return set_pattern(network, ChangeSet(ps)) + + +############################################################ +# curve 12.[CURVES] +############################################################ +@app.get("/getcurveschema") +async def fastapi_get_curve_schema(network: str) -> dict[str, dict[str, Any]]: + return get_curve_schema(network) + + +@app.post("/addcurve/", response_model=None) +async def fastapi_add_curve(network: str, curve: str, req: Request) -> ChangeSet: + props = await req.json() + + print(props) + + ps = { + "id": curve, + } | props + + print(ps) + + return add_curve(network, ChangeSet(ps)) + + +@app.post("/deletecurve/", response_model=None) +async def fastapi_delete_curve(network: str, curve: str) -> ChangeSet: + ps = {"id": curve} + return delete_curve(network, ChangeSet(ps)) + + +@app.get("/getcurveproperties/") +async def fastapi_get_curve_properties(network: str, curve: str) -> dict[str, Any]: + return get_curve(network, curve) + + +# example: set_curve(p, ChangeSet({'id' : 'c0', 'c_type' : CURVE_TYPE_PUMP, 'coords': [{'x': 1.0, 'y': 2.0}, {'x': 2.0, 'y': 1.0}]})) +@app.post("/setcurveproperties/", response_model=None) +async def fastapi_set_curve_properties( + network: str, curve: str, req: Request +) -> ChangeSet: + props = await req.json() + # c_type放到request中 + ps = {"id": curve} | props + return set_curve(network, ChangeSet(ps)) + + +############################################################ +# control 13.[CONTROLS] +############################################################ +@app.get("/getcontrolschema/") +async def fastapi_get_control_schema(network: str) -> dict[str, dict[str, Any]]: + return get_control_schema(network) + + +@app.get("/getcontrolproperties/") +async def fastapi_get_control_properties(network: str) -> dict[str, Any]: + return get_control(network) + + +# example: set_control(p, ChangeSet({'control': 'x'})) +@app.post("/setcontrolproperties/", response_model=None) +async def fastapi_set_control_properties(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_control(network, ChangeSet(props)) + + +############################################################ +# rule 14.[RULES] +############################################################ +@app.get("/getruleschema/") +async def fastapi_get_rule_schema(network: str) -> dict[str, dict[str, Any]]: + return get_rule_schema(network) + + +@app.get("/getruleproperties/") +async def fastapi_get_rule_properties(network: str) -> dict[str, Any]: + return get_rule(network) + + +# example: set_rule(p, ChangeSet({'rule': 'x'})) +@app.post("/setruleproperties/", response_model=None) +async def fastapi_set_rule_properties(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_rule(network, ChangeSet(props)) + + +############################################################ +# energy 15.[ENERGY] +############################################################ +@app.get("/getenergyschema/") +async def fastapi_get_energy_schema(network: str) -> dict[str, dict[str, Any]]: + return get_energy_schema(network) + + +@app.get("/getenergyproperties/") +async def fastapi_get_energy_properties(network: str) -> dict[str, Any]: + return get_energy(network) + + +@app.post("/setenergyproperties/", response_model=None) +async def fastapi_set_energy_properties(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_energy(network, ChangeSet(props)) + + +@app.get("/getpumpenergyschema/") +async def fastapi_get_pump_energy_schema(network: str) -> dict[str, dict[str, Any]]: + return get_pump_energy_schema(network) + + +@app.get("/getpumpenergyproperties//") +async def fastapi_get_pump_energy_proeprties(network: str, pump: str) -> dict[str, Any]: + return get_pump_energy(network, pump) + + +@app.get("/setpumpenergyproperties//", response_model=None) +async def fastapi_set_pump_energy_properties( + network: str, pump: str, req: Request +) -> ChangeSet: + props = await req.json() + ps = {"id": pump} | props + return set_pump_energy(network, ChangeSet(ps)) + + +############################################################ +# emitter 16.[EMITTERS] +############################################################ +@app.get("/getemitterschema") +async def fastapi_get_emitter_schema(network: str) -> dict[str, dict[str, Any]]: + return get_emitter_schema(network) + + +@app.get("/getemitterproperties/") +async def fastapi_get_emitter_properties(network: str, junction: str) -> dict[str, Any]: + return get_emitter(network, junction) + + +# example: set_emitter(p, ChangeSet({'junction': 'j1', 'coefficient': 10.0})) +@app.post("/setemitterproperties/", response_model=None) +async def fastapi_set_emitter_properties( + network: str, junction: str, req: Request +) -> ChangeSet: + props = await req.json() + ps = {"junction": junction} | props + return set_emitter(network, ChangeSet(ps)) + + +############################################################ +# quality 17.[QUALITY] +############################################################ +@app.get("/getqualityschema/") +async def fastapi_get_quality_schema(network: str) -> dict[str, dict[str, Any]]: + return get_quality_schema(network) + + +@app.get("/getqualityproperties/") +async def fastapi_get_quality_properties(network: str, node: str) -> dict[str, Any]: + return get_quality(network, node) + + +# example: set_quality(p, ChangeSet({'node': 'j1', 'quality': 10.0})) +@app.post("/setqualityproperties/", response_model=None) +async def fastapi_set_quality_properties(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_quality(network, ChangeSet(props)) + + +############################################################ +# source 18.[SOURCES] +############################################################ +@app.get("/getsourcechema/") +async def fastapi_get_source_schema(network: str) -> dict[str, dict[str, Any]]: + return get_source_schema(network) + + +@app.get("/getsource/") +async def fastapi_get_source(network: str, node: str) -> dict[str, Any]: + return get_source(network, node) + + +@app.post("/setsource/", response_model=None) +async def fastapi_set_source(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_source(network, ChangeSet(props)) + + +# example: add_source(p, ChangeSet({'node': 'j0', 's_type': SOURCE_TYPE_CONCEN, 'strength': 10.0, 'pattern': 'p0'})) +@app.post("/addsource/", response_model=None) +async def fastapi_add_source(network: str, req: Request) -> ChangeSet: + props = await req.json() + return add_source(network, ChangeSet(props)) + + +@app.post("/deletesource/", response_model=None) +async def fastapi_delete_source(network: str, node: str) -> ChangeSet: + props = {"node": node} + return delete_source(network, ChangeSet(props)) + + +############################################################ +# reaction 19.[REACTIONS] +############################################################ +@app.get("/getreactionschema/") +async def fastapi_get_reaction_schema(network: str) -> dict[str, dict[str, Any]]: + return get_reaction_schema(network) + + +@app.get("/getreaction/") +async def fastapi_get_reaction(network: str) -> dict[str, Any]: + return get_reaction(network) + + +@app.post("/setreaction/", response_model=None) +# set_reaction(p, ChangeSet({ 'ORDER BULK' : '10' })) +async def fastapi_set_reaction(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_reaction(network, ChangeSet(props)) + + +@app.get("/getpipereactionschema/") +async def fastapi_get_pipe_reaction_schema(network: str) -> dict[str, dict[str, Any]]: + return get_pipe_reaction_schema(network) + + +@app.get("/getpipereaction/") +async def fastapi_get_pipe_reaction(network: str, pipe: str) -> dict[str, Any]: + return get_pipe_reaction(network, pipe) + + +@app.post("/setpipereaction/", response_model=None) +async def fastapi_set_pipe_reaction(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_pipe_reaction(network, ChangeSet(props)) + + +@app.get("/gettankreactionschema/") +async def fastapi_get_tank_reaction_schema(network: str) -> dict[str, dict[str, Any]]: + return get_tank_reaction_schema(network) + + +@app.get("/gettankreaction/") +async def fastapi_get_tank_reaction(network: str, tank: str) -> dict[str, Any]: + return get_tank_reaction(network, tank) + + +@app.post("/settankreaction/", response_model=None) +async def fastapi_set_tank_reaction(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_tank_reaction(network, ChangeSet(props)) + + +############################################################ +# mixing 20.[MIXING] +############################################################ +@app.get("/getmixingschema/") +async def fastapi_get_mixing_schema(network: str) -> dict[str, dict[str, Any]]: + return get_mixing_schema(network) + + +@app.get("/getmixing/") +async def fastapi_get_mixing(network: str, tank: str) -> dict[str, Any]: + return get_mixing(network, tank) + + +@app.post("/setmixing/", response_model=None) +async def fastapi_set_mixing(network: str, req: Request) -> ChangeSet: + props = await req.json() + return api.set_mixing(network, ChangeSet(props)) + + +# example: add_mixing(p, ChangeSet({'tank': 't0', 'model': MIXING_MODEL_MIXED, 'value': 10.0})) +@app.post("/addmixing/", response_model=None) +async def fastapi_add_mixing(network: str, req: Request) -> ChangeSet: + props = await req.json() + return add_mixing(network, ChangeSet(props)) + + +@app.post("/deletemixing/", response_model=None) +async def fastapi_delete_mixing(network: str, req: Request) -> ChangeSet: + props = await req.json() + return delete_mixing(network, ChangeSet(props)) + + +############################################################ +# time 21.[TIME] +############################################################ +@app.get("/gettimeschema") +async def fastapi_get_time_schema(network: str) -> dict[str, dict[str, Any]]: + return get_time_schema(network) + + +@app.get("/gettimeproperties/") +async def fastapi_get_time_properties(network: str) -> dict[str, Any]: + return get_time(network) + + +@app.post("/settimeproperties/", response_model=None) +async def fastapi_set_time_properties(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_time(network, ChangeSet(props)) + + +############################################################ +# option 23.[OPTIONS] +############################################################ +@app.get("/getoptionschema/") +async def fastapi_get_option_schema(network: str) -> dict[str, dict[str, Any]]: + return get_option_v3_schema(network) + + +@app.get("/getoptionproperties/") +async def fastapi_get_option_properties(network: str) -> dict[str, Any]: + return get_option_v3(network) + + +@app.post("/setoptionproperties/", response_model=None) +async def fastapi_set_option_properties(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_option_v3(network, ChangeSet(props)) + + +############################################################ +# coord 24.[COORDINATES] +############################################################ +@app.get("/getnodecoord/") +async def fastapi_get_node_coord(network: str, node: str) -> dict[str, float] | None: + return get_node_coord(network, node) + + +# DingZQ, 2025-01-27, get all node coord/links +# nodes: id:type:x:y +# links: id:type:node1:node2 +# node type: junction, reservoir, tank +# link type: pipe, pump, valve +@app.get("/getnetworkgeometries/", dependencies=[Depends(verify_token)]) +async def fastapi_get_network_geometries(network: str) -> dict[str, Any] | None: + + # 获取所有节点坐标# 缓存查询结果提高性能 + global redis_client + cache_key = f"getnetworkgeometries_{network}" + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) + return loaded_dict + + coords = get_network_node_coords(network) + nodes = [] + for node_id, coord in coords.items(): + nodes.append(f"{node_id}:{coord['type']}:{coord['x']}:{coord['y']}") + links = get_network_link_nodes(network) + + # return list of scadas. scada : id, x, y + # scadas = get_all_scada_elements(network) + + # data from WMH's scada info + scadas = get_all_scada_info(network) + + results = {"nodes": nodes, "links": links, "scadas": scadas} + + # 缓存查询结果提高性能 + redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime)) + + return results + + +# DingZQ, 2024-12-31, get major node coord +# id:type:x:y +# type: junction, reservoir, tank +@app.get("/getmajornodecoords/") +async def fastapi_get_major_node_coords( + network: str, diameter: int +) -> list[str] | None: + start_time = time.time() + coords = get_major_node_coords(network, diameter) + end_time = time.time() + logger.info("get_major_node_coords: %s, time: %s", coords, end_time - start_time) + + result = [] + for node_id, coord in coords.items(): + result.append(f"{node_id}:{coord['type']}:{coord['x']}:{coord['y']}") + return result + + +# DingZQ, 2025-01-03, get network in extent +@app.get("/getnetworkinextent/") +async def fastapi_get_network_in_extent( + network: str, x1: float, y1: float, x2: float, y2: float +) -> dict[str, Any] | None: + nodes = api.get_nodes_in_extent(network, x1, y1, x2, y2) + links = api.get_links_in_extent(network, x1, y1, x2, y2) + return {"nodes": nodes, "links": links} + + +# DingZQ, 2024-12-08, get all links' start and end node +# link_id:link_type:node_id1:node_id2 +@app.get("/getnetworklinknodes/") +async def fastapi_get_network_link_nodes(network: str) -> list[str] | None: + return get_network_link_nodes(network) + + +# DingZQ 2024-12-31 +# 获取直径大于800的管道 +@app.get("/getmajorpipenodes/") +async def fastapi_get_major_pipe_nodes(network: str, diameter: int) -> list[str] | None: + start_time = time.time() + result = get_major_pipe_nodes(network, diameter) + end_time = time.time() + logger.info("get_major_pipe_nodes: %s, time: %s", result, end_time - start_time) + return result + + +############################################################ +# vertex 25.[VERTICES] +############################################################ +@app.get("/getvertexschema/") +async def fastapi_get_vertex_schema(network: str) -> dict[str, dict[str, Any]]: + return get_vertex_schema(network) + + +@app.get("/getvertexproperties/") +async def fastapi_get_vertex_properties(network: str, link: str) -> dict[str, Any]: + return get_vertex(network, link) + + +# set_vertex(p, ChangeSet({'link' : 'p0', 'coords': [{'x': 1.0, 'y': 2.0}, {'x': 2.0, 'y': 1.0}]})) +@app.post("/setvertexproperties/", response_model=None) +async def fastapi_set_vertex_properties(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_vertex(network, ChangeSet(props)) + + +@app.post("/addvertex/", response_model=None) +async def fastapi_add_vertex(network: str, req: Request) -> ChangeSet: + props = await req.json() + return add_vertex(network, ChangeSet(props)) + + +@app.post("/deletevertex/", response_model=None) +async def fastapi_delete_vertex(network: str, req: Request) -> ChangeSet: + props = await req.json() + return api.delete_vertex(network, ChangeSet(props)) + + +@app.get("/getallvertexlinks/", response_class=PlainTextResponse) +async def fastapi_get_all_vertex_links(network: str) -> list[str]: + return json.dumps(get_all_vertex_links(network)) + + +@app.get("/getallvertices/", response_class=PlainTextResponse) +async def fastapi_get_all_vertices(network: str) -> list[dict[str, Any]]: + return json.dumps(get_all_vertices(network)) + + +############################################################ +# label 26.[LABELS] +############################################################ +@app.get("/getlabelschema/") +async def fastapi_get_label_schema(network: str) -> dict[str, dict[str, Any]]: + return get_label_schema(network) + + +@app.get("/getlabelproperties/") +async def fastapi_get_label_properties( + network: str, x: float, y: float +) -> dict[str, Any]: + return get_label(network, x, y) + + +@app.post("/setlabelproperties/", response_model=None) +async def fastapi_set_label_properties(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_label(network, ChangeSet(props)) + + +@app.post("/addlabel/", response_model=None) +async def fastapi_add_label(network: str, req: Request) -> ChangeSet: + props = await req.json() + return add_label(network, ChangeSet(props)) + + +@app.post("/deletelabel/", response_model=None) +async def fastapi_delete_label(network: str, req: Request) -> ChangeSet: + props = await req.json() + return delete_label(network, ChangeSet(props)) + + +############################################################ +# backdrop 27.[BACKDROP] +############################################################ +@app.get("/getbackdropschema/") +async def fastapi_get_backdrop_schema(network: str) -> dict[str, dict[str, Any]]: + return get_backdrop_schema(network) + + +@app.get("/getbackdropproperties/") +async def fastapi_get_backdrop_properties(network: str) -> dict[str, Any]: + return get_backdrop(network) + + +@app.post("/setbackdropproperties/", response_model=None) +async def fastapi_set_backdrop_properties(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_backdrop(network, ChangeSet(props)) + + +############################################################ +# scada_device 29 +############################################################ +@app.get("/getscadadeviceschema/") +async def fastapi_get_scada_device_schema(network: str) -> dict[str, dict[str, Any]]: + return get_scada_device_schema(network) + + +@app.get("/getscadadevice/") +async def fastapi_get_scada_device(network: str, id: str) -> dict[str, Any]: + return get_scada_device(network, id) + + +@app.post("/setscadadevice/", response_model=None) +async def fastapi_set_scada_device(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_scada_device(network, ChangeSet(props)) + + +@app.post("/addscadadevice/", response_model=None) +async def fastapi_add_scada_device(network: str, req: Request) -> ChangeSet: + props = await req.json() + return add_scada_device(network, ChangeSet(props)) + + +@app.post("/deletescadadevice/", response_model=None) +async def fastapi_delete_scada_device(network: str, req: Request) -> ChangeSet: + props = await req.json() + return delete_scada_device(network, ChangeSet(props)) + + +@app.post("/cleanscadadevice/", response_model=None) +async def fastapi_clean_scada_device(network: str) -> ChangeSet: + return clean_scada_device(network) + + +@app.get("/getallscadadeviceids/") +async def fastapi_get_all_scada_device_ids(network: str) -> list[str]: + return get_all_scada_device_ids(network) + + +@app.get("/getallscadadevices/", response_class=PlainTextResponse) +async def fastapi_get_all_scada_devices(network: str) -> list[dict[str, Any]]: + return json.dumps(get_all_scada_devices(network)) + + +############################################################ +# scada_device_data 30 +############################################################ +@app.get("/getscadadevicedataschema/") +async def fastapi_get_scada_device_data_schema( + network: str, +) -> dict[str, dict[str, Any]]: + return get_scada_device_data_schema(network) + + +@app.get("/getscadadevicedata/") +async def fastapi_get_scada_device_data(network: str, id: str) -> dict[str, Any]: + return get_scada_device_data(network, id) + + +# example: set_scada_device_data(p, ChangeSet({'device_id': 'sm_device', 'data': [{ 'time': '2023-02-10 00:02:22', 'value': 100.0 }, { 'time': '2023-02-10 00:03:22', 'value': 200.0 }]})) +# time format must be 'YYYY-MM-DD HH:MM:SS' +@app.post("/setscadadevicedata/", response_model=None) +async def fastapi_set_scada_device_data(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_scada_device_data(network, ChangeSet(props)) + + +# example: add_scada_device_data(p, ChangeSet({'device_id': 'sm_device', 'time': '2023-02-10 00:02:22', 'value': 100.0})) +@app.post("/addscadadevicedata/", response_model=None) +async def fastapi_add_scada_device_data(network: str, req: Request) -> ChangeSet: + props = await req.json() + return add_scada_device_data(network, ChangeSet(props)) + + +# example: delete_scada_device_data(p, ChangeSet({'device_id': 'sm_device', 'time': '2023-02-12 00:02:22'})) +@app.post("/deletescadadevicedata/", response_model=None) +async def fastapi_delete_scada_device_data(network: str, req: Request) -> ChangeSet: + props = await req.json() + return delete_scada_device_data(network, ChangeSet(props)) + + +@app.post("/cleanscadadevicedata/", response_model=None) +async def fastapi_clean_scada_device_data(network: str) -> ChangeSet: + return clean_scada_device_data(network) + + +############################################################ +# scada_element 31 +############################################################ +@app.get("/getscadaelementschema/") +async def fastapi_get_scada_element_schema(network: str) -> dict[str, dict[str, Any]]: + return get_scada_element_schema(network) + + +@app.get("/getscadaelements/") +async def fastapi_get_scada_elements(network: str) -> list[str]: + return get_all_scada_elements(network) + + +@app.get("/getscadaelement/") +async def fastapi_get_scada_element(network: str, id: str) -> dict[str, Any]: + return get_scada_element(network, id) + + +@app.post("/setscadaelement/", response_model=None) +async def fastapi_set_scada_element(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_scada_element(network, ChangeSet(props)) + + +@app.post("/addscadaelement/", response_model=None) +async def fastapi_add_scada_element(network: str, req: Request) -> ChangeSet: + props = await req.json() + return add_scada_element(network, ChangeSet(props)) + + +@app.post("/deletescadaelement/", response_model=None) +async def fastapi_delete_scada_element(network: str, req: Request) -> ChangeSet: + props = await req.json() + return delete_scada_element(network, ChangeSet(props)) + + +@app.post("/cleanscadaelement/", response_model=None) +async def fastapi_clean_scada_element(network: str) -> ChangeSet: + return clean_scada_element(network) + + +############################################################ +# general_region 32 +############################################################ +@app.get("/getregionschema/") +async def fastapi_get_region_schema(network: str) -> dict[str, dict[str, Any]]: + return get_region_schema(network) + + +@app.get("/getregion/") +async def fastapi_get_region(network: str, id: str) -> dict[str, Any]: + return get_region(network, id) + + +@app.post("/setregion/", response_model=None) +async def fastapi_set_region(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_region(network, ChangeSet(props)) + + +# example: add_region(p, ChangeSet({'id': 'r', 'boundary': [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 0.0)]})) +@app.post("/addregion/", response_model=None) +async def fastapi_add_region(network: str, req: Request) -> ChangeSet: + props = await req.json() + return add_region(network, ChangeSet(props)) + + +@app.post("/deleteregion/", response_model=None) +async def fastapi_delete_region(network: str, req: Request) -> ChangeSet: + props = await req.json() + return delete_region(network, ChangeSet(props)) + + +############################################################ +# district_metering_area 33 +############################################################ +@app.get("/calculatedistrictmeteringareafornodes/") +async def fastapi_calculate_district_metering_area_for_nodes( + network: str, req: Request +) -> list[list[str]]: + props = await req.json() + nodes = props["nodes"] + part_count = props["part_count"] + part_type = props["part_type"] + return calculate_district_metering_area_for_nodes( + network, nodes, part_count, part_type + ) + + +@app.get("/calculatedistrictmeteringareaforregion/") +async def fastapi_calculate_district_metering_area_for_region( + network: str, req: Request +) -> list[list[str]]: + props = await req.json() + region = props["region"] + part_count = props["part_count"] + part_type = props["part_type"] + return calculate_district_metering_area_for_region( + network, region, part_count, part_type + ) + + +@app.get("/calculatedistrictmeteringareafornetwork/") +async def fastapi_calculate_district_metering_area_for_network( + network: str, req: Request +) -> list[list[str]]: + props = await req.json() + part_count = props["part_count"] + part_type = props["part_type"] + return calculate_district_metering_area_for_network(network, part_count, part_type) + + +@app.get("/getdistrictmeteringareaschema/") +async def fastapi_get_district_metering_area_schema( + network: str, +) -> dict[str, dict[str, Any]]: + return get_district_metering_area_schema(network) + + +@app.get("/getdistrictmeteringarea/") +async def fastapi_get_district_metering_area(network: str, id: str) -> dict[str, Any]: + return get_district_metering_area(network, id) + + +@app.post("/setdistrictmeteringarea/", response_model=None) +async def fastapi_set_district_metering_area(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_district_metering_area(network, ChangeSet(props)) + + +@app.post("/adddistrictmeteringarea/", response_model=None) +async def fastapi_add_district_metering_area(network: str, req: Request) -> ChangeSet: + props = await req.json() + + # boundary should be [(x,y), (x,y)] + boundary = props["boundary"] + newBoundary = [] + for pt in boundary: + newBoundary.append((pt[0], pt[1])) + + props["boundary"] = newBoundary + + return add_district_metering_area(network, ChangeSet(props)) + + +@app.post("/deletedistrictmeteringarea/", response_model=None) +async def fastapi_delete_district_metering_area( + network: str, req: Request +) -> ChangeSet: + props = await req.json() + return delete_district_metering_area(network, ChangeSet(props)) + + +@app.get("/getalldistrictmeteringareaids/") +async def fastapi_get_all_district_metering_area_ids(network: str) -> list[str]: + return get_all_district_metering_area_ids(network) + + +@app.get("/getalldistrictmeteringareas/") +async def getalldistrictmeteringareas(network: str) -> list[dict[str, Any]]: + return get_all_district_metering_areas(network) + + +@app.post("/generatedistrictmeteringarea/", response_model=None) +async def fastapi_generate_district_metering_area( + network: str, part_count: int, part_type: int, inflate_delta: float +) -> ChangeSet: + return generate_district_metering_area( + network, part_count, part_type, inflate_delta + ) + + +@app.post("/generatesubdistrictmeteringarea/", response_model=None) +async def fastapi_generate_sub_district_metering_area( + network: str, dma: str, part_count: int, part_type: int, inflate_delta: float +) -> ChangeSet: + print(network) + print(dma) + print(part_count) + print(part_type) + print(inflate_delta) + return generate_sub_district_metering_area( + network, dma, part_count, part_type, inflate_delta + ) + + +############################################################ +# service_area 34 +############################################################ +@app.get("/calculateservicearea/") +async def fastapi_calculate_service_area( + network: str, time_index: int +) -> dict[str, Any]: + return calculate_service_area(network, time_index) + + +@app.get("/getserviceareaschema/") +async def fastapi_get_service_area_schema(network: str) -> dict[str, dict[str, Any]]: + return get_service_area_schema(network) + + +@app.get("/getservicearea/") +async def fastapi_get_service_area(network: str, id: str) -> dict[str, Any]: + return get_service_area(network, id) + + +@app.post("/setservicearea/", response_model=None) +async def fastapi_set_service_area(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_service_area(network, ChangeSet(props)) + + +@app.post("/addservicearea/", response_model=None) +async def fastapi_add_service_area(network: str, req: Request) -> ChangeSet: + props = await req.json() + return add_service_area(network, ChangeSet(props)) + + +@app.post("/deleteservicearea/", response_model=None) +async def fastapi_delete_service_area(network: str, req: Request) -> ChangeSet: + props = await req.json() + return delete_service_area(network, ChangeSet(props)) + + +@app.get("/getallserviceareas/") +async def fastapi_get_all_service_areas(network: str) -> list[dict[str, Any]]: + return get_all_service_areas(network) + + +@app.post("/generateservicearea/", response_model=None) +async def fastapi_generate_service_area( + network: str, inflate_delta: float +) -> ChangeSet: + return generate_service_area(network, inflate_delta) + + +############################################################ +# virtual_district 35 +############################################################ +@app.get("/calculatevirtualdistrict/") +async def fastapi_calculate_virtual_district( + network: str, centers: list[str] +) -> dict[str, list[Any]]: + return calculate_virtual_district(network, centers) + + +@app.get("/getvirtualdistrictschema/") +async def fastapi_get_virtual_district_schema( + network: str, +) -> dict[str, dict[str, Any]]: + return get_virtual_district_schema(network) + + +@app.get("/getvirtualdistrict/") +async def fastapi_get_virtual_district(network: str, id: str) -> dict[str, Any]: + return get_virtual_district(network, id) + + +@app.post("/setvirtualdistrict/", response_model=None) +async def fastapi_set_virtual_district(network: str, req: Request) -> ChangeSet: + props = await req.json() + return set_virtual_district(network, ChangeSet(props)) + + +@app.post("/addvirtualdistrict/", response_model=None) +async def fastapi_add_virtual_district(network: str, req: Request) -> ChangeSet: + props = await req.json() + return add_virtual_district(network, ChangeSet(props)) + + +@app.post("/deletevirtualdistrict/", response_model=None) +async def fastapi_delete_virtual_district(network: str, req: Request) -> ChangeSet: + props = await req.json() + return delete_virtual_district(network, ChangeSet(props)) + + +@app.get("/getallvirtualdistrict/") +async def fastapi_get_all_virtual_district(network: str) -> list[dict[str, Any]]: + return get_all_virtual_districts(network) + + +@app.post("/generatevirtualdistrict/", response_model=None) +async def fastapi_generate_virtual_district( + network: str, inflate_delta: float, req: Request +) -> ChangeSet: + props = await req.json() + return generate_virtual_district(network, props["centers"], inflate_delta) + + +############################################################ +# water_distribution_area 36 +############################################################ +@app.get("/calculatedemandtonodes/") +async def fastapi_calculate_demand_to_nodes( + network: str, req: Request +) -> dict[str, float]: + props = await req.json() + demand = props["demand"] + nodes = props["nodes"] + return calculate_demand_to_nodes(network, demand, nodes) + + +@app.get("/calculatedemandtoregion/") +async def fastapi_calculate_demand_to_region( + network: str, req: Request +) -> dict[str, float]: + props = await req.json() + demand = props["demand"] + region = props["region"] + return calculate_demand_to_region(network, demand, region) + + +@app.get("/calculatedemandtonetwork/") +async def fastapi_calculate_demand_to_network( + network: str, demand: float +) -> dict[str, float]: + return calculate_demand_to_network(network, demand) + + +########################################################### +# scada_info 38 || written by WMH +############################################################ +@app.get("/getscadainfoschema/") +async def fastapi_get_scada_info_schema(network: str) -> dict[str, dict[str, Any]]: + return get_scada_info_schema(network) + + +@app.get("/getscadainfo/") +async def fastapi_get_scada_info(network: str, id: str) -> dict[str, float]: + return get_scada_info(network, id) + + +@app.get("/getallscadainfo/") +async def fastapi_get_all_scada_info(network: str) -> list[dict[str, float]]: + return get_all_scada_info(network) + + +########################################################### +# user 39 +########################################################### +@app.get("/getuserschema/") +async def fastapi_get_user_schema(network: str) -> dict[str, dict[Any, Any]]: + return get_user_schema(network) + + +@app.get("/getuser/") +async def fastapi_get_user(network: str, user_name: str) -> dict[Any, Any]: + return get_user(network, user_name) + + +@app.get("/getallusers/") +async def fastapi_get_all_users(network: str) -> list[dict[Any, Any]]: + return get_all_users(network) + + +############################################################ +# scheme 40 +############################################################ +@app.get("/getschemeschema/") +async def fastapi_get_scheme_schema(network: str) -> dict[str, dict[Any, Any]]: + return get_scheme_schema(network) + + +@app.get("/getscheme/") +async def fastapi_get_scheme(network: str, schema_name: str) -> dict[Any, Any]: + return get_scheme(network, schema_name) + + +@app.get("/getallschemes/") +async def fastapi_get_all_schemes(network: str) -> list[dict[Any, Any]]: + return get_all_schemes(network) + + +############################################################ +# pipe_risk_probability 41 +############################################################ +@app.get("/getpiperiskprobabilitynow/") +async def fastapi_get_pipe_risk_probability_now( + network: str, pipe_id: str +) -> dict[str, Any]: + return get_pipe_risk_probability_now(network, pipe_id) + + +@app.get("/getpiperiskprobability/") +async def fastapi_get_pipe_risk_probability( + network: str, pipe_id: str +) -> dict[str, Any]: + return get_pipe_risk_probability(network, pipe_id) + + +@app.get("/getpipesriskprobability/") +async def fastapi_get_pipes_risk_probability( + network: str, pipe_ids: str +) -> list[dict[str, Any]]: + pipeids = pipe_ids.split(",") + return get_pipes_risk_probability(network, pipeids) + + +@app.get("/getnetworkpiperiskprobabilitynow/") +async def fastapi_get_network_pipe_risk_probability_now( + network: str, +) -> list[dict[str, Any]]: + return get_network_pipe_risk_probability_now(network) + + +# 返回一个字典,key 是管道的 id,value 是管道的几何信息 +# 几何信息是一个字典,包含 start 和 end 两个 key,value 是管道的起点和终点的坐标 +# 例如: +# "GSD240730154246A51D2C324D1A": { +# "start": [ +# 106.424759007, +# 29.815104642 +# ], +# "end": [ +# 106.424824186, +# 29.814950582 +# ] +# }, +@app.get("/getpiperiskprobabilitygeometries/") +async def fastapi_get_pipe_risk_probability_geometries(network: str) -> dict[str, Any]: + return get_pipe_risk_probability_geometries(network) + + +############################################################ +# sensor_placement 42 +############################################################ +@app.get("/getallsensorplacements/") +async def fastapi_get_all_sensor_placements(network: str) -> list[dict[Any, Any]]: + return get_all_sensor_placements(network) + + +############################################################ +# burst_locate_result 43 +############################################################ +@app.get("/getallburstlocateresults/") +async def fastapi_get_all_burst_locate_results(network: str) -> list[dict[Any, Any]]: + return get_all_burst_locate_results(network) + + +# inp file +@app.post("/uploadinp/", status_code=status.HTTP_200_OK) +async def fastapi_upload_inp(afile: bytes, name: str): + filePath = inpDir + str(name) + f = open(filePath, "wb") + f.write(afile) + f.close() + + return True + + +@app.get("/downloadinp/", status_code=status.HTTP_200_OK) +async def fastapi_download_inp(name: str, response: Response): + filePath = inpDir + name + if os.path.exists(filePath): + return FileResponse( + filePath, media_type="application/octet-stream", filename="inp.inp" + ) + else: + response.status_code = status.HTTP_400_BAD_REQUEST + return True + + +# DingZQ, 2024-12-28, convert v3 to v2 +@app.get("/convertv3tov2/", response_model=None) +async def fastapi_convert_v3_to_v2(req: Request) -> ChangeSet: + network = "v3Tov2" + jo_root = await req.json() + inp = jo_root["inp"] + cs = convert_inp_v3_to_v2(inp) + op = cs.operations[0] + open_project(network) + op["vertex"] = json.dumps(get_all_vertices(network)) + op["scada"] = json.dumps(get_all_scada_elements(network)) + op["dma"] = json.dumps(get_all_district_metering_areas(network)) + op["sa"] = json.dumps(get_all_service_areas(network)) + op["vd"] = json.dumps(get_all_virtual_districts(network)) + op["legend"] = get_extension_data(network, "legend") + + db = get_extension_data(network, "scada_db") + print(db) + scada_db = "" + if db: + scada_db = db + print(scada_db) + op["scada_db"] = scada_db + + close_project(network) + + return cs + + +@app.get("/getjson/") +async def fastapi_get_json(): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content={ + "code": 400, + "message": "this is message", + "data": 123, + }, + ) + + +############################################################ +# DingZQ, 2024-12-09, Add sample API to return real time data/simulation result +# influx db operation +############################################################ +@app.get("/getrealtimedata/") +async def fastapi_get_realtimedata(): + data = [random.randint(0, 100) for _ in range(100)] + return data + + +@app.get("/getsimulationresult/") +async def fastapi_get_simulationresult(): + data = [random.randint(0, 100) for _ in range(100)] + return data + + +# 下面几个query 函数,都是从 influxdb 中查询的,不与 network 绑定,用固定的network 名字 + + +# DingZQ 2025-01-31 +# def query_latest_record_by_ID(ID: str, type: str, bucket: str="realtime_data", client: InfluxDBClient=client) -> dict: +@app.get("/querynodelatestrecordbyid/") +async def fastapi_query_node_latest_record_by_id(id: str): + return influxdb_api.query_latest_record_by_ID(id, type="node") + + +@app.get("/querylinklatestrecordbyid/") +async def fastapi_query_link_latest_record_by_id(id: str): + return influxdb_api.query_latest_record_by_ID(id, type="link") + + +# query scada +@app.get("/queryscadalatestrecordbyid/") +async def fastapi_query_scada_latest_record_by_id(id: str): + return influxdb_api.query_latest_record_by_ID(id, type="scada") + + +# def query_all_record_by_time(query_time: str, bucket: str="realtime_data", client: InfluxDBClient=client) -> tuple: +@app.get("/queryallrecordsbytime/") +async def fastapi_query_all_records_by_time(querytime: str) -> dict[str, list]: + results: tuple = influxdb_api.query_all_records_by_time(query_time=querytime) + return {"nodes": results[0], "links": results[1]} + + +# def query_all_record_by_time_property(querytime: str, type: str, property: str, bucket: str = "realtime_simulation_result") -> tuple: +@app.get("/queryallrecordsbytimeproperty/") +async def fastapi_query_all_record_by_time_property( + querytime: str, type: str, property: str, bucket: str = "realtime_simulation_result" +) -> dict[str, list]: + results: tuple = influxdb_api.query_all_record_by_time_property( + query_time=querytime, type=type, property=property, bucket=bucket + ) + return {"results": results} + + +@app.get("/queryallschemerecordsbytimeproperty/") +async def fastapi_query_all_scheme_record_by_time_property( + querytime: str, + type: str, + property: str, + schemename: str, + bucket: str = "scheme_simulation_result", +) -> dict[str, list]: + """ + 查询指定方案某一时刻的所有记录,查询 'node' 或 'link' 的某一属性值 + + :param querytime: 查询时间,格式为 '2024-11-24T17:30:00+08:00' + :param type: 查询类型 'node' 或 'link' + :param property: 查询的属性字段名 + :param schemename: 方案名称,如 "FANGAN1761124840355" + :param bucket: 数据存储的bucket名称 + :return: 包含查询结果的字典 + """ + results: list = influxdb_api.query_all_scheme_record_by_time_property( + query_time=querytime, + type=type, + property=property, + scheme_name=schemename, + bucket=bucket, + ) + return {"results": results} + + +@app.get("/querysimulationrecordsbyidtime/") +async def fastapi_query_simulation_record_by_ids_time( + id: str, querytime: str, type: str, bucket: str = "realtime_simulation_result" +) -> dict[str, list]: + results: tuple = influxdb_api.query_simulation_result_by_ID_time( + ID=id, type=type, query_time=querytime, bucket=bucket + ) + return {"results": results} + + +@app.get("/queryschemesimulationrecordsbyidtime/") +async def fastapi_query_scheme_simulation_record_by_ids_time( + scheme_name: str, + id: str, + querytime: str, + type: str, + bucket: str = "scheme_simulation_result", +) -> dict[str, list]: + results: tuple = influxdb_api.query_scheme_simulation_result_by_ID_time( + scheme_name=scheme_name, ID=id, type=type, query_time=querytime, bucket=bucket + ) + return {"results": results} + + +@app.get("/queryallrecordsbydate/") +async def fastapi_query_all_records_by_date(querydate: str) -> dict: + # 缓存查询结果提高性能 + global redis_client + + is_today_or_future = time_api.is_today_or_future(querydate) + logger.info(f"isToday or future: {is_today_or_future}") + + # 今天的不要去缓存 + if not is_today_or_future: + cache_key = f"queryallrecordsbydate_{querydate}" + logger.info(f"cache_key: {cache_key}") + + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + results = msgpack.unpackb(data, object_hook=decode_datetime) + logger.info(f"return from cache redis") + return results + + logger.info(f"query from influxdb") + + nodes_links: tuple = influxdb_api.query_all_records_by_date(query_date=querydate) + results = {"nodes": nodes_links[0], "links": nodes_links[1]} + + # 今天的不要去缓存 + if not is_today_or_future: + logger.info(f"save to cache redis") + redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime)) + + logger.info(f"return results") + + return results + + +@app.get("/queryallrecordsbytimerange/") +async def fastapi_query_all_records_by_time_range( + starttime: str, endtime: str +) -> dict[str, list]: + # 缓存查询结果提高性能 + global redis_client + + # 今天的不要去缓存 + if not time_api.is_today_or_future(starttime): + cache_key = f"queryallrecordsbytimerange_{starttime}_{endtime}" + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) + return loaded_dict + + nodes_links: tuple = influxdb_api.query_all_records_by_time_range( + starttime=starttime, endtime=endtime + ) + results = {"nodes": nodes_links[0], "links": nodes_links[1]} + + # 今天的不要去缓存 + if not time_api.is_today_or_future(starttime): + redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime)) + + return results + + +# 2025-03-15, DingZQ +@app.get("/queryallrecordsbydatewithtype/") +async def fastapi_query_all_records_by_date_with_type( + querydate: str, querytype: str +) -> list: + # 缓存查询结果提高性能 + global redis_client + cache_key = f"queryallrecordsbydatewithtype_{querydate}_{querytype}" + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) + return loaded_dict + + results = influxdb_api.query_all_records_by_date_with_type( + query_date=querydate, query_type=querytype + ) + + packed = msgpack.packb(results, default=encode_datetime) + redis_client.set(cache_key, packed) + + return results + + +@app.get("/queryallrecordsbyidsdatetype/") +async def fastapi_query_all_records_by_ids_date_type( + ids: str, querydate: str, querytype: str +) -> list: + # 缓存查询结果提高性能 + global redis_client + cache_key = f"queryallrecordsbydatewithtype_{querydate}_{querytype}" + data = redis_client.get(cache_key) + results = [] + if data: + # 使用自定义的反序列化函数 + results = msgpack.unpackb(data, object_hook=decode_datetime) + else: + results = influxdb_api.query_all_records_by_date_with_type( + query_date=querydate, query_type=querytype + ) + packed = msgpack.packb(results, default=encode_datetime) + redis_client.set(cache_key, packed) + + query_ids = ids.split(",") + e_results = py_linq.Enumerable(results) + lst_results = e_results.where(lambda x: x["ID"] in query_ids).to_list() + + return lst_results + + +# 查询指定日期、类型、属性的所有记录 +# 返回 [{'time': '2024-01-01T00:00:00Z', 'ID': '1', 'value': 1.0}, {'time': '2024-01-01T00:00:00Z', 'ID': '2', 'value': 2.0}] +@app.get("/queryallrecordsbydateproperty/") +async def fastapi_query_all_records_by_date_property( + querydate: str, querytype: str, property: str +) -> list[dict]: + # 缓存查询结果提高性能 + global redis_client + cache_key = f"queryallrecordsbydateproperty_{querydate}_{querytype}_{property}" + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) + return loaded_dict + + result_dict = influxdb_api.query_all_record_by_date_property( + query_date=querydate, type=querytype, property=property + ) + packed = msgpack.packb(result_dict, default=encode_datetime) + redis_client.set(cache_key, packed) + + return result_dict + + +# def query_curve_by_ID_property_daterange(ID: str, type: str, property: str, start_date: str, end_date: str, bucket: str="realtime_data", client: InfluxDBClient=client) -> list: +@app.get("/querynodecurvebyidpropertydaterange/") +async def fastapi_query_node_curve_by_id_property_daterange( + id: str, prop: str, startdate: str, enddate: str +): + return influxdb_api.query_curve_by_ID_property_daterange( + id, type="node", property=prop, start_date=startdate, end_date=enddate + ) + + +@app.get("/querylinkcurvebyidpropertydaterange/") +async def fastapi_query_link_curve_by_id_property_daterange( + id: str, prop: str, startdate: str, enddate: str +): + return influxdb_api.query_curve_by_ID_property_daterange( + id, type="link", property=prop, start_date=startdate, end_date=enddate + ) + + +# ids 用,隔开 +# 返回 { 'id': value1, 'id2': value2 } +# def query_SCADA_data_by_device_ID_and_time(query_ids_list: List[str], query_time: str, bucket: str="SCADA_data", client: InfluxDBClient=client) -> Dict[str, float]: +@app.get("/queryscadadatabydeviceidandtime/") +async def fastapi_query_scada_data_by_device_id_and_time(ids: str, querytime: str): + query_ids = ids.split(",") + logger.info(querytime) + return influxdb_api.query_SCADA_data_by_device_ID_and_time( + query_ids_list=query_ids, query_time=querytime + ) + + +# 2025/05/04 DingZQ +# 对于SCAD的曲线数据,我们需要有4 套数据值 +# 1. 原始数据 +# 2. 补充的数据 (补充前面第一步缺失的数据) +# 3. 清洗后的数据点 (用五角星表示) +# 4. 模拟曲线 + + +# 查询到的SCADA原始数据 +# 数据1 +@app.get("/queryscadadatabydeviceidandtimerange/") +async def fastapi_query_scada_data_by_device_id_and_time_range( + ids: str, starttime: str, endtime: str +): + + print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") + + query_ids = ids.split(",") + return influxdb_api.query_SCADA_data_by_device_ID_and_timerange( + query_ids_list=query_ids, start_time=starttime, end_time=endtime + ) + + +# 查询到的SCADA补充的数据 +# 数据2 +# 注意: 这里的id是 scada_info中的 api_query_id +@app.get("/queryfillingscadadatabydeviceidandtimerange/") +async def fastapi_query_filling_scada_data_by_device_id_and_time_range( + ids: str, starttime: str, endtime: str +): + + print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") + + query_ids = ids.split(",") + return influxdb_api.query_filling_SCADA_data_by_device_ID_and_timerange( + query_ids_list=query_ids, start_time=starttime, end_time=endtime + ) + + +# 查询到的SCADA清洗后的数据点 +# 数据3 +# 注意: 这里的id是 scada_info中的 api_query_id +@app.get("/querycleaningscadadatabydeviceidandtimerange/") +async def fastapi_query_cleaning_scada_data_by_device_id_and_time_range( + ids: str, starttime: str, endtime: str +): + + print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") + + query_ids = ids.split(",") + return influxdb_api.query_cleaning_SCADA_data_by_device_ID_and_timerange( + query_ids_list=query_ids, start_time=starttime, end_time=endtime + ) + + +# 查询到的SCADA模拟数据(从 realtime_simulation bucket 中查找) +@app.get("/querysimulationscadadatabydeviceidandtimerange/") +async def fastapi_query_simulation_scada_data_by_device_id_and_time_range( + ids: str, starttime: str, endtime: str +): + + print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") + + query_ids = ids.split(",") + return influxdb_api.query_simulation_SCADA_data_by_device_ID_and_timerange( + query_ids_list=query_ids, start_time=starttime, end_time=endtime + ) + + +# 查询指定时间范围内,多个SCADA设备的清洗后的数据 +# DingZQ, 2025-04-19 +# 2025/05/04 DingZQ 这个是将原始数据跟清洗后的数据合并到一起,暂时不需要用这个API +@app.get("/querycleanedscadadatabydeviceidandtimerange/") +async def fastapi_query_cleaned_scada_data_by_device_id_and_time_range( + ids: str, starttime: str, endtime: str +): + + print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") + + query_ids = ids.split(",") + return influxdb_api.query_cleaned_SCADA_data_by_device_ID_and_timerange( + query_ids_list=query_ids, start_time=starttime, end_time=endtime + ) + + +@app.get("/queryscadadatabydeviceidanddate/") +async def fastapi_query_scada_data_by_device_id_and_date(ids: str, querydate: str): + query_ids = ids.split(",") + return influxdb_api.query_SCADA_data_by_device_ID_and_date( + query_ids_list=query_ids, query_date=querydate + ) + + +# DingZQ, 2025-03-08 +# 返回所有SCADA设备在指定日期的所有记录 +@app.get("/queryallscadarecordsbydate/") +async def fastapi_query_all_scada_records_by_date(querydate: str): + global redis_client + + is_today_or_future = time_api.is_today_or_future(querydate) + logger.info(f"isToday or future: {is_today_or_future}") + + # 今天的不要去缓存 + if not is_today_or_future: + cache_key = f"queryallscadarecordsbydate_{querydate}" + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) + logger.info(f"return from cache redis") + return loaded_dict + + logger.info(f"query from influxdb") + result_dict = influxdb_api.query_all_SCADA_records_by_date(query_date=querydate) + + # 今天的不要去缓存 + if not is_today_or_future: + logger.info(f"save to cache redis") + packed = msgpack.packb(result_dict, default=encode_datetime) + redis_client.set(cache_key, packed) + + logger.info(f"return results") + + return result_dict + + +# DingZQ, 2025-03-15 +# Scheme +@app.get("/queryallschemeallrecords/") +async def fastapi_query_all_scheme_all_records( + schemetype: str, schemename: str, querydate: str +) -> tuple: + # 缓存查询结果提高性能 + global redis_client + cache_key = f"queryallschemeallrecords_{schemetype}_{schemename}_{querydate}" + data = redis_client.get(cache_key) + if data: + # 使用自定义的反序列化函数 + loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) + return loaded_dict + + results = influxdb_api.query_scheme_all_record( + scheme_Type=schemetype, scheme_Name=schemename, query_date=querydate + ) + packed = msgpack.packb(results, default=encode_datetime) + redis_client.set(cache_key, packed) + + return results + + +# DingZQ, 2025-03-21 +# 缓存是用的queryallschemeallrecords的缓存 +@app.get("/queryschemeallrecordsproperty/") +async def fastapi_query_all_scheme_all_records_property( + schemetype: str, schemename: str, querydate: str, querytype: str, queryproperty: str +) -> list: + # 缓存查询结果提高性能 + global redis_client + cache_key = f"queryallschemeallrecords_{schemetype}_{schemename}_{querydate}" + data = redis_client.get(cache_key) + all_results = None + if data: + # 使用自定义的反序列化函数 + all_results = msgpack.unpackb(data, object_hook=decode_datetime) + else: + all_results = influxdb_api.query_scheme_all_record( + scheme_Type=schemetype, scheme_Name=schemename, query_date=querydate + ) + packed = msgpack.packb(all_results, default=encode_datetime) + redis_client.set(cache_key, packed) + + results = None + if querytype == "node": + results = all_results[0] + elif querytype == "link": + results = all_results[1] + + return results + + +@app.post("/clearrediskey/") +async def fastapi_clear_redis_key(key: str): + redis_client.delete(key) + return True + + +@app.post("/clearrediskeys/") +async def fastapi_clear_redis_keys(keys: str): + # delete keys contains the key + matched_keys = redis_client.keys(f"*{keys}*") + redis_client.delete(*matched_keys) + + return True + + +@app.post("/clearallredis/") +async def fastapi_clear_all_redis(): + redis_client.flushdb() + return True + + +@app.get("/queryredis/") +async def fastapi_query_redis(): + return redis_client.keys("*") + + +@app.get("/queryinfluxdbbuckets/") +async def fastapi_query_influxdb_buckets(): + return influxdb_api.query_buckets() + + +@app.get("/queryinfluxdbbucketmeasurements/") +async def fastapi_query_influxdb_bucket_measurements(bucket: str): + return influxdb_api.query_measurements(bucket=bucket) + + +# DingZQ, 2024-12-31, generate openapi.json +def generate_openapi_json(): + openapi_json_path = "openapi.json" + with open(openapi_json_path, "w") as file: + json.dump(app.openapi(), file, indent=4) + + +############################################################ +# real_time api 37 +# example: http://127.0.0.1:8000/runsimulation?network=beibeizone&start_time=2024-04-01T08:00:00Z +############################################################ +# 必须用这个PlainTextResponse,不然每个key都有引号 +# @app.get("/runsimulation/", response_class = PlainTextResponse) +# async def fastapi_run_project(network: str,start_time:str,end_time=None) -> str: +# filename = 'c:/lock.simulation' +# filename2 = 'c:/lock.simulation2' +# if os.path.exists(filename2): +# print('file exists') +# raise HTTPException(status_code=409, detail="is in simulation") +# else: +# print('file doesnt exists') +# #os.rename(filename, filename2) +# result = run_simulation(network,start_time,end_time) +# #os.rename(filename2, filename) +# return result + + +############################################################ +# real_time api 37 +# example: http://127.0.0.1:8000/runsimulation?network=beibeizone&start_time=2024-04-01T08:00:00Z +############################################################ + + +# 必须用这个PlainTextResponse,不然每个key都有引号 +# @app.get("/runsimulation/", response_class = PlainTextResponse) +# async def fastapi_run_project(network: str,start_time:str,end_time=None) -> str: +# filename = 'c:/lock.simulation' +# filename2 = 'c:/lock.simulation2' +# if os.path.exists(filename2): +# print('file exists') +# raise HTTPException(status_code=409, detail="is in simulation") +# else: +# print('file doesnt exists') +# #os.rename(filename, filename2) +# result = run_simulation_ex(name=network, simulation_type='realtime', start_datetime=start_time, end_datetime=end_time) +# #os.rename(filename2, filename) +# return result + + +# DingZQ, 2025-05-17 +class Download_History_Data_Manually(BaseModel): + """ + download_date:样式如 datetime(2025, 5, 4) + """ + + download_date: datetime + + +# DingZQ, 2025-05-17 +@app.post("/download_history_data_manually/") +async def fastapi_download_history_data_manually( + data: Download_History_Data_Manually, +) -> None: + item = data.dict() + # 创建东八区时区对象 + tz = timezone(timedelta(hours=8)) + begin_dt = datetime.combine(item["download_date"].date(), time.min).replace( + tzinfo=tz + ) + end_dt = datetime.combine(item["download_date"].date(), time(23, 59, 59)).replace( + tzinfo=tz + ) + + # 2. 转为字符串 + begin_time = begin_dt.isoformat() + end_time = end_dt.isoformat() + + influxdb_api.download_history_data_manually( + begin_time=begin_time, end_time=end_time + ) + + +# DingZQ, 2025-05-17 +# 新增开始时间和持续时间参数 +class Run_Simulation_Manually_by_Date(BaseModel): + """ + name:数据库名称 + simulation_date:样式如 2025-05-04 + start_time:开始时间,样式如 08:00:00 + duration:持续时间,单位为分钟 + """ + + name: str + simulation_date: str + start_time: str + duration: int + + +def run_simulation_manually_by_date( + network_name: str, base_date: datetime, start_time: str, duration: int +) -> None: + # 解析开始时间 + start_hour, start_minute, start_second = map(int, start_time.split(":")) + start_datetime = base_date.replace( + hour=start_hour, minute=start_minute, second=start_second + ) + + # 计算结束时间 + end_datetime = start_datetime + timedelta(minutes=duration) + + # 生成时间点,每15分钟一个 + current_time = start_datetime + while current_time < end_datetime: + # 格式化成ISO8601带时区格式 + iso_time = current_time.strftime("%Y-%m-%dT%H:%M:%S") + "+08:00" + + ## 执行函数调用 + simulation.run_simulation( + name=network_name, + simulation_type="realtime", + modify_pattern_start_time=iso_time, + ) + + # 增加15分钟 + current_time += timedelta(minutes=15) + + +@app.post("/runsimulationmanuallybydate/") +async def fastapi_run_simulation_manually_by_date( + data: Run_Simulation_Manually_by_Date, +) -> dict[str, str]: + item = data.dict() + print(f"item: {item}") + + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" + if os.path.exists(filename2): + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") + else: + print("file doesnt exists") + try: + simulation.query_corresponding_element_id_and_query_id(item["name"]) + simulation.query_corresponding_pattern_id_and_query_id(item["name"]) + region_result = simulation.query_non_realtime_region(item["name"]) + + globals.source_outflow_region_id = simulation.get_source_outflow_region_id( + item["name"], region_result + ) + globals.realtime_region_pipe_flow_and_demand_id = ( + simulation.query_realtime_region_pipe_flow_and_demand_id( + item["name"], region_result + ) + ) + globals.pipe_flow_region_patterns = ( + simulation.query_pipe_flow_region_patterns(item["name"]) + ) + + globals.non_realtime_region_patterns = ( + simulation.query_non_realtime_region_patterns( + item["name"], region_result + ) + ) + + ( + globals.source_outflow_region_patterns, + globals.realtime_region_pipe_flow_and_demand_patterns, + ) = simulation.get_realtime_region_patterns( + item["name"], + globals.source_outflow_region_id, + globals.realtime_region_pipe_flow_and_demand_id, + ) + + base_date = datetime.strptime(item["simulation_date"], "%Y-%m-%d") + + thread = threading.Thread( + target=lambda: run_simulation_manually_by_date( + item["name"], base_date, item["start_time"], item["duration"] + ) + ) + + thread.start() + thread.join() # 等待线程完成 + + return {"status": "success"} + except Exception as e: + return {"status": "error", "message": str(e)} + + # thread.join() + # DingZQ 08152025 + # matched_keys = redis_client.keys(f"*{item['simulation_date']}*") + # redis_client.delete(*matched_keys) + + +############################################################ +# real_Time api 37.5 +# example: +# response = requests.post("http://127.0.0.1:8000/runsimulation", +# data=json.dumps({'network': 'bb_server', 'simulation_type': 'extended', +# 'start_time': '2024-05-17T09:30:00Z', 'duration': 900, +# 'pump_control': {'1#': [0, 0], '2#': [1, 1], '3#': [1, 1], '4#': [1, 0], +# '5#': [45, 43], '6#': [0, 0], '7#': [0, 0]}}), +# headers={'accept': 'application/json', 'Content-Type': 'application/json'}) +############################################################ + + +# class RunSimuItem(BaseModel): +# network: str +# simulation_type: str +# start_time: str +# end_time: Optional[str] = None +# duration: Optional[int] = 900 +# pump_control: Optional[dict] = None +# +# +# @app.post("/runsimulation/") +# async def fastapi_run_project(item: RunSimuItem) -> str: +# item = item.dict() +# filename = 'c:/lock.simulation' +# filename2 = 'c:/lock.simulation2' +# if os.path.exists(filename2): +# print('file exists') +# raise HTTPException(status_code=409, detail="is in simulation") +# else: +# print('file doesnt exists') +# #os.rename(filename, filename2) +# result = run_simulation_ex(item['network'], item['simulation_type'], +# item['start_time'], item['end_time'], +# item['duration'], item['pump_control']) +# #os.rename(filename2, filename) +# return result + + +############################################################ +# burst analysis api 38 +# example:http://127.0.0.1:8000/burst_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&burst_ID=ZBBGXSZW000001&burst_size=200&duration=1800 +############################################################ + +# @app.get("/burst_analysis/", response_class = PlainTextResponse) +# async def fastapi_burst_analysis(network: str,start_time:str,burst_ID:str,burst_size:float,burst_flow:float=None,duration:int=None) -> str: +# filename = 'c:/lock.simulation' +# filename2 = 'c:/lock.simulation2' +# if os.path.exists(filename2): +# print('file exists') +# raise HTTPException(status_code=409, detail="is in simulation") +# else: +# print('file doesnt exists') +# #os.rename(filename, filename2) +# result = burst_analysis(network,start_time,burst_ID,burst_size,burst_flow,duration) +# #os.rename(filename2, filename) +# return result + + +############################################################ +# burst analysis api 38.5 +# example: +# response = requests.post("http://127.0.0.1:8000/burst_analysis", +# data=json.dumps({'network': 'bb_server', +# 'start_time': '2024-05-17T09:30:00Z', +# 'burst_ID': ['ZBBGXSZW000001'], +# 'burst_size': [200], +# 'duration': 1800, +# 'pump_control': {'1#': [0, 0, 0], '2#': [1, 1, 1], '3#': [1, 1, 1], '4#': [1, 1, 1], +# '5#': [45, 45, 45], '6#': [0, 0, 0], '7#': [0, 0, 0]} +# 'valve_closed': ['GSD2307192058576667FF7B41FF']), +# headers={'accept': 'application/json', 'Content-Type': 'application/json'}) +############################################################ + + +class BurstAnalysis(BaseModel): + name: str + modify_pattern_start_time: str + burst_ID: Union[List[str], str] = None + burst_size: Union[List[float], float, int] = None + modify_total_duration: int = 900 + modify_fixed_pump_pattern: Optional[dict[str, list]] = None + modify_variable_pump_pattern: Optional[dict[str, list]] = None + modify_valve_opening: Optional[dict[str, float]] = None + scheme_Name: Optional[str] = None + + +@app.post("/burst_analysis/") +async def fastapi_burst_analysis(data: BurstAnalysis) -> str: + item = data.dict() + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" + if os.path.exists(filename2): + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") + else: + print("file doesnt exists") + # os.rename(filename, filename2) + burst_analysis( + name=item["name"], + modify_pattern_start_time=item["modify_pattern_start_time"], + burst_ID=item["burst_ID"], + burst_size=item["burst_size"], + modify_total_duration=item["modify_total_duration"], + modify_fixed_pump_pattern=item["modify_fixed_pump_pattern"], + modify_variable_pump_pattern=item["modify_variable_pump_pattern"], + modify_valve_opening=item["modify_valve_opening"], + scheme_Name=item["scheme_Name"], + ) + # os.rename(filename2, filename) + + """ + # 将 时间转换成日期,然后缓存这个计算结果 + # 缓存key: burst_analysis__ + global redis_client + schemename = data.scheme_Name + + print(data.modify_pattern_start_time) + + querydate = time_api.get_date_from_time(data.modify_pattern_start_time) + + print(f"schemename: {schemename}, querydate: {querydate}") + + cache_key = f"queryallschemeallrecords_burst_Analysis_{schemename}_{querydate}" + data = redis_client.get(cache_key) + if not data: + results = influxdb_api.query_scheme_all_record("burst_Analysis", scheme_Name=schemename, query_date=querydate) + packed = msgpack.packb(results, default=encode_datetime) + redis_client.set(cache_key, packed) + """ + + return "success" + + +############################################################ +# valve close analysis api 39 +# example:http://127.0.0.1:8000/valve_close_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&valves=GSD2307192058577780A3287D78&valves=GSD2307192058572E953B707226(S2)&duration=1800 +############################################################ + + +@app.get("/valve_close_analysis/", response_class=PlainTextResponse) +async def fastapi_valve_close_analysis( + network: str, + start_time: str, + valves: Annotated[list[str], Query()], + duration: int = None, +) -> str: + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" + if os.path.exists(filename2): + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") + else: + print("file doesnt exists") + # os.rename(filename, filename2) + result = valve_close_analysis(network, start_time, valves, duration) + # os.rename(filename2, filename) + return result + + +############################################################ +# pipe flushing analysis api 40 +# example:http://127.0.0.1:8000/flushing_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&valves=GSD230719205857733F8F5214FF&valves=GSD230719205857C0AF65B6A170&valves_k=0.5&valves_k=0.5&drainage_node_ID=GSD2307192058570DEDF28E4F73&flush_flow=0&duration=1800 +############################################################ + + +@app.get("/flushing_analysis/", response_class=PlainTextResponse) +async def fastapi_flushing_analysis( + network: str, + start_time: str, + valves: Annotated[list[str], Query()], + valves_k: Annotated[list[float], Query()], + drainage_node_ID: str, + flush_flow: float = 0, + duration: int = None, +) -> str: + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" + if os.path.exists(filename2): + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") + else: + print("file doesnt exists") + # os.rename(filename, filename2) + result = flushing_analysis( + network, + start_time, + valves, + valves_k, + drainage_node_ID, + flush_flow, + duration, + ) + # os.rename(filename2, filename) + return result + + +############################################################ +# contaminant_simulation api 41 +# example:http://127.0.0.1:8000/contaminant_simulation?network=beibeizone&start_time=2024-04-01T08:00:00Z&source=ZBBDTZDP002677&concentration=100&duration=1800 +############################################################ + + +@app.get("/contaminant_simulation/", response_class=PlainTextResponse) +async def fastapi_contaminant_simulation( + network: str, + start_time: str, + source: str, + concentration: float, + duration: int = 900, + pattern: str = None, +) -> str: + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" + if os.path.exists(filename2): + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") + else: + print("file doesnt exists") + # os.rename(filename, filename2) + result = contaminant_simulation( + network, start_time, source, concentration, duration, pattern + ) + # os.rename(filename2, filename) + return result + + +############################################################ +# age analysis api 42 +# example:http://127.0.0.1:8000/age_analysis/?network=bb&start_time=2024-04-01T00:00:00Z&end_time=2024-04-01T08:00:00Z&duration=28800 +############################################################ + + +@app.get("/age_analysis/", response_class=PlainTextResponse) +async def fastapi_age_analysis( + network: str, start_time: str, end_time: str, duration: int +) -> str: + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" + if os.path.exists(filename2): + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") + else: + print("file doesnt exists") + # os.rename(filename, filename2) + result = age_analysis(network, start_time, end_time, duration) + # os.rename(filename2, filename) + return result + + +############################################################ +# scheduling analysis api 43 +############################################################ + + +class SchedulingAnalysis(BaseModel): + network: str + start_time: str + pump_control: dict + tank_id: str + water_plant_output_id: str + time_delta: Optional[int] = 300 + + +@app.post("/scheduling_analysis/") +async def fastapi_scheduling_analysis(data: SchedulingAnalysis) -> str: + data = data.dict() + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" + if os.path.exists(filename2): + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") + else: + print("file doesnt exists") + # os.rename(filename, filename2) + result = scheduling_simulation( + data["network"], + data["start_time"], + data["pump_control"], + data["tank_id"], + data["water_plant_output_id"], + data["time_delta"], + ) + # os.rename(filename2, filename) + return result + + +############################################################ +# pressure_regulating api 44 +# example: +# response = requests.post("http://127.0.0.1:8000/pressure_regulating", +# data=json.dumps({'network': 'bb_server', +# 'start_time': '2024-05-17T09:30:00Z', +# 'pump_control': {'1#': [0, 0], '2#': [1, 1], '3#': [1, 1], '4#': [1, 1], +# '5#': [45, 45], '6#': [0, 0], '7#': [0, 0]} +# 'tank_init_level': {'ZBBDTJSC000002': 2, 'ZBBDTJSC000001': 2}}), +# headers={'accept': 'application/json', 'Content-Type': 'application/json'}) +############################################################ + + +class PressureRegulation(BaseModel): + network: str + start_time: str + pump_control: dict + tank_init_level: Optional[dict] = None + + +@app.post("/pressure_regulation/") +async def fastapi_pressure_regulation(data: PressureRegulation) -> str: + item = data.dict() + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" + if os.path.exists(filename2): + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") + else: + print("file doesnt exists") + # os.rename(filename, filename2) + result = pressure_regulation( + prj_name=item["network"], + start_datetime=item["start_time"], + pump_control=item["pump_control"], + tank_initial_level_control=item["tank_init_level"], + ) + # os.rename(filename2, filename) + return result + + +############################################################ +# project_management api 45 +# example: +# response = requests.post("http://127.0.0.1:8000/project_management", +# data=json.dumps({'network': 'bb_server', +# 'start_time': '2024-05-17T00:00:00Z', +# 'pump_control': +# {'1#':(list:97), '2#':(list:97), '3#':(list:97), '4#':(list:97), +# '5#':(list:97), '6#':(list:97), '7#':(list:97)} +# 'tank_init_level': {'ZBBDTJSC000002': 2, 'ZBBDTJSC000001': 2} +# 'region_demand': {'hp': 150000, 'lp': 40000}}), +# headers={'accept': 'application/json', 'Content-Type': 'application/json'}) +############################################################ + + +class ProjectManagement(BaseModel): + network: str + start_time: str + pump_control: dict + tank_init_level: Optional[dict] = None + region_demand: Optional[dict] = None + + +@app.post("/project_management/") +async def fastapi_project_management(data: ProjectManagement) -> str: + item = data.dict() + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" + if os.path.exists(filename2): + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") + else: + print("file doesnt exists") + # os.rename(filename, filename2) + result = project_management( + prj_name=item["network"], + start_datetime=item["start_time"], + pump_control=item["pump_control"], + tank_initial_level_control=item["tank_init_level"], + region_demand_control=item["region_demand"], + ) + # os.rename(filename2, filename) + return result + + +############################################################ +# project_management api 46 +# example: +# with open('./inp/bb_temp.inp', 'rb') as file: +# response = requests.post("http://127.0.0.1:8000/network_project", +# files={'file': file}) +############################################################ + + +@app.post("/network_project/") +async def fastapi_network_project(file: UploadFile = File()) -> str: + temp_file_path = "./inp/" + if not os.path.exists(temp_file_path): + os.mkdir(temp_file_path) + temp_file_name = f'network_project_{datetime.now().strftime("%Y%m%d")}' + temp_file_path = f"{temp_file_path}{temp_file_name}.inp" + + with open(temp_file_path, "wb") as buffer: + shutil.copyfileobj(file.file, buffer) + buffer.close() + + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" + if os.path.exists(filename2): + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") + else: + print("file doesnt exists") + result = run_inp(temp_file_name) + # os.rename(filename2, filename) + return result + + +############################################################ +# daily scheduling analysis api 47 +############################################################ + + +class DailySchedulingAnalysis(BaseModel): + network: str + start_time: str + pump_control: dict + reservoir_id: str + tank_id: str + water_plant_output_id: str + time_delta: Optional[int] = 300 + + +@app.post("/daily_scheduling_analysis/") +async def fastapi_daily_scheduling_analysis(data: DailySchedulingAnalysis) -> str: + data = data.dict() + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" + if os.path.exists(filename2): + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") + else: + print("file doesnt exists") + # os.rename(filename, filename2) + result = daily_scheduling_simulation( + data["network"], + data["start_time"], + data["pump_control"], + data["reservoir_id"], + data["tank_id"], + data["water_plant_output_id"], + ) + # os.rename(filename2, filename) + return result + + +############################################################ +# network_update api 48 +############################################################ + + +@app.post("/network_update/") +async def fastapi_network_update(file: UploadFile = File()) -> str: + # 默认文件夹 + default_folder = "./" + + # 使用当前时间生成临时文件名 + temp_file_name = f'network_update_{datetime.now().strftime("%Y%m%d")}' + temp_file_path = os.path.join(default_folder, temp_file_name) + + # 保存上传的文件到服务器 + try: + with open(temp_file_path, "wb") as buffer: + shutil.copyfileobj(file.file, buffer) + buffer.close() + print(f"文件 {temp_file_name} 已成功保存。") + except Exception as e: + raise HTTPException(status_code=500, detail=f"文件保存失败: {e}") + + # 更新数据库 + try: + network_update(temp_file_path) + return json.dumps({"message": "管网更新成功"}) + except Exception as e: + raise HTTPException(status_code=500, detail=f"数据库操作失败: {e}") + + +############################################################ +# pump failure api 49 +############################################################ + + +class PumpFailureState(BaseModel): + time: str + pump_status: dict + + +@app.post("/pump_failure/") +async def fastapi_pump_failure(data: PumpFailureState) -> str: + item = data.dict() + + with open("./pump_failure_message.txt", "a", encoding="utf-8-sig") as f1: + f1.write( + "[{}] {}\n".format(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), item) + ) # save message + + status_info = item.copy() + with open("./pump_failure_status.txt", "r", encoding="utf-8-sig") as f2: + lines = f2.readlines() + first_stage_pump_status_dict = json.loads(json.dumps(eval(lines[0]))) + second_stage_pump_status_dict = json.loads( + json.dumps(eval(lines[-1])) + ) # read local file + pump_status_dict = { + "first": first_stage_pump_status_dict, # first-stage pump + "second": second_stage_pump_status_dict, + } # second-stage pump + for pump_type in status_info["pump_status"].keys(): # 'first' or 'second' + if pump_type in pump_status_dict.keys(): # the type of pumps exists + if all( + pump_id in pump_status_dict[pump_type].keys() + for pump_id in status_info["pump_status"][pump_type].keys() + ): # all pump IDs exist + for pump_id in status_info["pump_status"][pump_type].keys(): + pump_status_dict[pump_type][pump_id] = int( + status_info["pump_status"][pump_type][pump_id] + ) # modify status dict + else: + return json.dumps("ERROR: Wrong Pump ID") + else: + return json.dumps("ERROR: Wrong Pump Type") + + with open("./pump_failure_status.txt", "w", encoding="utf-8-sig") as f2_: + f2_.write( + "{}\n{}".format(pump_status_dict["first"], pump_status_dict["second"]) + ) # save local file + + return json.dumps("SUCCESS") + + +############################################################ +# pressure_sensor_placement_sensitivity api 50 +############################################################ +# 2025/05/17 +class Pressure_Sensor_Placement(BaseModel): + name: str + scheme_name: str + sensor_number: int + min_diameter: int = 0 + username: str + + +@app.post("/pressure_sensor_placement_sensitivity/") +async def fastapi_pressure_sensor_placement_sensitivity( + data: Pressure_Sensor_Placement, +) -> None: + item = data.dict() + pressure_sensor_placement_sensitivity( + name=item["name"], + scheme_name=item["scheme_name"], + sensor_number=item["sensor_number"], + min_diameter=item["min_diameter"], + username=item["username"], + ) + + +@app.post("/pressure_sensor_placement_kmeans/") +async def fastapi_pressure_sensor_placement_kmeans( + data: Pressure_Sensor_Placement, +) -> None: + item = data.dict() + pressure_sensor_placement_kmeans( + name=item["name"], + scheme_name=item["scheme_name"], + sensor_number=item["sensor_number"], + min_diameter=item["min_diameter"], + username=item["username"], + ) + + +# 后续改进:合并两个接口为一个,增加method、sensor_type参数选择方法 +@app.post("/sensorplacementscheme/create") +async def fastapi_pressure_sensor_placement( + network: str = Query(...), + scheme_name: str = Query(...), + sensor_type: str = Query(...), + method: str = Query(...), + sensor_count: int = Query(...), + min_diameter: int = Query(0), + user_name: str = Query(...), +) -> str: + item = { + "network": network, + "scheme_name": scheme_name, + "sensor_type": sensor_type, + "method": method, + "sensor_count": sensor_count, + "min_diameter": min_diameter, + "user_name": user_name, + } + + # 验证方法参数 + if item["method"] not in ["sensitivity", "kmeans"]: + raise HTTPException( + status_code=400, detail="Invalid method. Must be 'sensitivity' or 'kmeans'" + ) + + try: + if item["method"] == "sensitivity": + pressure_sensor_placement_sensitivity( + name=item["network"], + scheme_name=item["scheme_name"], + sensor_number=item["sensor_count"], + min_diameter=item["min_diameter"], + username=item["user_name"], + ) + elif item["method"] == "kmeans": + pressure_sensor_placement_kmeans( + name=item["network"], + scheme_name=item["scheme_name"], + sensor_number=item["sensor_count"], + min_diameter=item["min_diameter"], + username=item["user_name"], + ) + + return "success" + + except Exception as e: + raise HTTPException(status_code=500, detail=f"执行失败: {str(e)}") + + +# 新增 SCADA 设备清洗接口 +@app.post("/scadadevicedatacleaning/") +async def fastapi_scada_device_data_cleaning( + network: str = Query(...), + ids_list: List[str] = Query(...), + start_time: str = Query(...), + end_time: str = Query(...), + user_name: str = Query(...), +) -> str: + import pandas as pd # 假设可以使用 pandas 处理表格数据 + + item = { + "network": network, + "ids": ids_list, + "start_time": start_time, + "end_time": end_time, + "user_name": user_name, + } + query_ids_list = item["ids"][0].split(",") + # 先调用 query_SCADA_data_by_device_ID_and_timerange 获取原始数据 + scada_data = influxdb_api.query_SCADA_data_by_device_ID_and_timerange( + query_ids_list=query_ids_list, + start_time=item["start_time"], + end_time=item["end_time"], + ) + + # 获取对应管网的所有 SCADA 设备信息 + scada_device_info = influxdb_api.query_pg_scada_info(item["network"]) + # 将列表转换为字典,以 device_id 为键 + scada_device_info_dict = {info["id"]: info for info in scada_device_info} + + # 按设备类型分组设备 + type_groups = {} + for device_id in query_ids_list: + device_info = scada_device_info_dict.get(device_id, {}) + device_type = device_info.get("type", "unknown") + if device_type not in type_groups: + type_groups[device_type] = [] + type_groups[device_type].append(device_id) + + # 批量处理每种类型的设备 + for device_type, device_ids in type_groups.items(): + if device_type not in ["pressure", "pipe_flow"]: + continue # 跳过未知类型 + + # 过滤该类型的设备数据 + type_scada_data = { + device_id: scada_data[device_id] + for device_id in device_ids + if device_id in scada_data + } + + if not type_scada_data: + continue + + # 假设所有设备的时间点相同,提取 time 列表 + time_list = [record["time"] for record in next(iter(type_scada_data.values()))] + + # 创建 DataFrame,第一列是 time,然后是每个设备的 value 列 + df = pd.DataFrame({"time": time_list}) + for device_id in device_ids: + if device_id in type_scada_data: + values = [record["value"] for record in type_scada_data[device_id]] + df[device_id] = values + + # 移除 time 列,准备输入给清洗方法(清洗方法期望 value 表格) + value_df = df.drop(columns=["time"]) + + # 调用清洗方法 + if device_type == "pressure": + cleaned_value_df = api_ex.Pdataclean.clean_pressure_data_dict_km(value_df) + elif device_type == "pipe_flow": + cleaned_value_df = api_ex.Fdataclean.clean_flow_data_dict(value_df) + + # 添加 time 列到首列 + cleaned_value_df = pd.DataFrame(cleaned_value_df) + # # 只选择以 '_cleaned' 结尾的清洗数据列 + # cleaned_columns = [ + # col for col in cleaned_value_df.columns if col.endswith("_cleaned") + # ] + # cleaned_value_df = cleaned_value_df[cleaned_columns] + # # 重命名列,移除 '_cleaned' 后缀 + # cleaned_value_df = cleaned_value_df.rename( + # columns={ + # col: col.replace("_cleaned", "") for col in cleaned_value_df.columns + # } + # ) + cleaned_df = pd.concat([df["time"], cleaned_value_df], axis=1) + + # 调试输出,确认列名 + print(f"清洗后的列名: {cleaned_df.columns.tolist()}") + + # 将清洗后的数据写回数据库 + influxdb_api.import_multicolumn_data_from_dict( + data_dict=cleaned_df.to_dict("list"), # 转换为 {column_name: [values]} 格式 + raw=False, + ) + + return "success" + + +class Item(BaseModel): + str_info: str + dict_info: Optional[dict] = None + + +@app.post("/test_dict/") +async def get_dict(item: Item): + print(item.dict()) + return item + + +if __name__ == "__main__": + # uvicorn.run(app, host="0.0.0.0", port=8000) + # url='http://127.0.0.1:8000/valve_close_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&valve_IDs=GSD2307192058577780A3287D78&valve_IDs=GSD2307192058572E953B707226(S2)&duration=1800' + url = "http://127.0.0.1:8000/burst_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&burst_ID=ZBBGXSZW000001&duration=1800" + # url = "http://192.168.1.36:8000/queryallschemeallrecords/?schemename=Fangan0817114448&querydate=2025-08-13&schemetype=burst_Analysis" + # response = Request.get(url) + + import requests + + response = requests.get(url) diff --git a/scripts/main_api_endpoints.md b/scripts/main_api_endpoints.md new file mode 100644 index 0000000..89e2132 --- /dev/null +++ b/scripts/main_api_endpoints.md @@ -0,0 +1,398 @@ +# API Endpoints (scripts/main.py) + +Non-commented FastAPI routes defined in `scripts/main.py`. + +- `POST /login/` +- `GET /getallextensiondatakeys/` +- `GET /getallextensiondata/` +- `GET /getextensiondata/` +- `POST /setextensiondata` +- `GET /listprojects/` +- `GET /haveproject/` +- `POST /createproject/` +- `POST /deleteproject/` +- `GET /isprojectopen/` +- `POST /openproject/` +- `POST /closeproject/` +- `POST /copyproject/` +- `POST /importinp/` +- `GET /exportinp/` +- `POST /readinp/` +- `GET /dumpinp/` +- `GET /runproject/` +- `GET /runprojectreturndict/` +- `GET /runinp/` +- `GET /dumpoutput/` +- `GET /isprojectlocked/` +- `GET /isprojectlockedbyme/` +- `POST /lockproject/` +- `POST /unlockproject/` +- `GET /getcurrentoperationid/` +- `POST /undo/` +- `POST /redo/` +- `GET /getsnapshots/` +- `GET /havesnapshot/` +- `GET /havesnapshotforoperation/` +- `GET /havesnapshotforcurrentoperation/` +- `POST /takesnapshotforoperation/` +- `POST takenapshotforcurrentoperation` +- `POST /takesnapshot/` +- `POST /picksnapshot/` +- `POST /pickoperation/` +- `GET /syncwithserver/` +- `POST /batch/` +- `POST /compressedbatch/` +- `GET /getrestoreoperation/` +- `POST /setrestoreoperation/` +- `GET /isnode/` +- `GET /isjunction/` +- `GET /isreservoir/` +- `GET /istank/` +- `GET /islink/` +- `GET /ispipe/` +- `GET /ispump/` +- `GET /isvalve/` +- `GET /getnodetype/` +- `GET /getlinktype/` +- `GET /getelementtype/` +- `GET /getelementtypevalue/` +- `GET /iscurve/` +- `GET /ispattern/` +- `GET /getnodes/` +- `GET /getlinks/` +- `GET /getcurves/` +- `GET /getpatterns/` +- `GET /getnodelinks/` +- `GET /getnodeproperties/` +- `GET /getlinkproperties/` +- `GET /getscadaproperties/` +- `GET /getallscadaproperties/` +- `GET /getelementpropertieswithtype/` +- `GET /getelementproperties/` +- `GET /gettitleschema/` +- `GET /gettitle/` +- `GET /settitle/` +- `GET /getjunctionschema` +- `POST /addjunction/` +- `POST /deletejunction/` +- `GET /getjunctionelevation/` +- `GET /getjunctionx/` +- `GET /getjunctiony/` +- `GET /getjunctioncoord/` +- `GET /getjunctiondemand/` +- `GET /getjunctionpattern/` +- `POST /setjunctionelevation/` +- `POST /setjunctionx/` +- `POST /setjunctiony/` +- `POST /setjunctioncoord/` +- `POST /setjunctiondemand/` +- `POST /setjunctionpattern/` +- `GET /getjunctionproperties/` +- `GET /getalljunctionproperties/` +- `POST /setjunctionproperties/` +- `GET /getreservoirschema` +- `POST /addreservoir/` +- `POST /deletereservoir/` +- `GET /getreservoirhead/` +- `GET /getreservoirpattern/` +- `GET /getreservoirx/` +- `GET /getreservoiry/` +- `GET /getreservoircoord/` +- `POST /setreservoirhead/` +- `POST /setreservoirpattern/` +- `POST /setreservoirx/` +- `POST /setreservoirx/` +- `POST /setreservoircoord/` +- `GET /getreservoirproperties/` +- `GET /getallreservoirproperties/` +- `POST /setreservoirproperties/` +- `GET /gettankschema` +- `POST /addtank/` +- `POST /deletetank/` +- `GET /gettankelevation/` +- `GET /gettankinitlevel/` +- `GET /gettankminlevel/` +- `GET /gettankmaxlevel/` +- `GET /gettankdiameter/` +- `GET /gettankminvol/` +- `GET /gettankvolcurve/` +- `GET /gettankoverflow/` +- `GET /gettankx/` +- `GET /gettanky/` +- `GET /gettankcoord/` +- `POST /settankelevation/` +- `POST /settankinitlevel/` +- `POST /settankminlevel/` +- `POST /settankmaxlevel/` +- `POST settankdiameter//` +- `POST /settankminvol/` +- `POST /settankvolcurve/` +- `POST /settankoverflow/` +- `POST /settankx/` +- `POST /settanky/` +- `POST /settankcoord/` +- `GET /gettankproperties/` +- `GET /getalltankproperties/` +- `POST /settankproperties/` +- `GET /getpipeschema` +- `POST /addpipe/` +- `POST /deletepipe/` +- `GET /getpipenode1/` +- `GET /getpipenode2/` +- `GET /getpipelength/` +- `GET /getpipediameter/` +- `GET /getpiperoughness/` +- `GET /getpipeminorloss/` +- `GET /getpipestatus/` +- `POST /setpipenode1/` +- `POST /setpipenode2/` +- `POST /setpipelength/` +- `POST /setpipediameter/` +- `POST /setpiperoughness/` +- `POST /setpipeminorloss/` +- `POST /setpipestatus/` +- `GET /getpipeproperties/` +- `GET /getallpipeproperties/` +- `POST /setpipeproperties/` +- `GET /getpumpschema` +- `POST /addpump/` +- `POST /deletepump/` +- `GET /getpumpnode1/` +- `GET /getpumpnode2/` +- `POST /setpumpnode1/` +- `POST /setpumpnode2/` +- `GET /getpumpproperties/` +- `GET /getallpumpproperties/` +- `POST /setpumpproperties/` +- `GET /getvalveschema` +- `POST /addvalve/` +- `POST /deletevalve/` +- `GET /getvalvenode1/` +- `GET /getvalvenode2/` +- `GET /getvalvediameter/` +- `GET /getvalvetype/` +- `GET /getvalvesetting/` +- `GET /getvalveminorloss/` +- `POST /setvalvenode1/` +- `POST /setvalvenode2/` +- `POST /setvalvenodediameter/` +- `POST /setvalvetype/` +- `POST /setvalvesetting/` +- `GET /getvalveproperties/` +- `GET /getallvalveproperties/` +- `POST /setvalveproperties/` +- `POST /deletenode/` +- `POST /deletelink/` +- `GET /gettagschema/` +- `GET /gettag/` +- `GET /gettags/` +- `POST /settag/` +- `GET /getdemandschema` +- `GET /getdemandproperties/` +- `POST /setdemandproperties/` +- `GET /getstatusschema` +- `GET /getstatus/` +- `POST /setstatus/` +- `GET /getpatternschema` +- `POST /addpattern/` +- `POST /deletepattern/` +- `GET /getpatternproperties/` +- `POST /setpatternproperties/` +- `GET /getcurveschema` +- `POST /addcurve/` +- `POST /deletecurve/` +- `GET /getcurveproperties/` +- `POST /setcurveproperties/` +- `GET /getcontrolschema/` +- `GET /getcontrolproperties/` +- `POST /setcontrolproperties/` +- `GET /getruleschema/` +- `GET /getruleproperties/` +- `POST /setruleproperties/` +- `GET /getenergyschema/` +- `GET /getenergyproperties/` +- `POST /setenergyproperties/` +- `GET /getpumpenergyschema/` +- `GET /getpumpenergyproperties//` +- `GET /setpumpenergyproperties//` +- `GET /getemitterschema` +- `GET /getemitterproperties/` +- `POST /setemitterproperties/` +- `GET /getqualityschema/` +- `GET /getqualityproperties/` +- `POST /setqualityproperties/` +- `GET /getsourcechema/` +- `GET /getsource/` +- `POST /setsource/` +- `POST /addsource/` +- `POST /deletesource/` +- `GET /getreactionschema/` +- `GET /getreaction/` +- `POST /setreaction/` +- `GET /getpipereactionschema/` +- `GET /getpipereaction/` +- `POST /setpipereaction/` +- `GET /gettankreactionschema/` +- `GET /gettankreaction/` +- `POST /settankreaction/` +- `GET /getmixingschema/` +- `GET /getmixing/` +- `POST /setmixing/` +- `POST /addmixing/` +- `POST /deletemixing/` +- `GET /gettimeschema` +- `GET /gettimeproperties/` +- `POST /settimeproperties/` +- `GET /getoptionschema/` +- `GET /getoptionproperties/` +- `POST /setoptionproperties/` +- `GET /getnodecoord/` +- `GET /getnetworkgeometries/` +- `GET /getmajornodecoords/` +- `GET /getnetworkinextent/` +- `GET /getnetworklinknodes/` +- `GET /getmajorpipenodes/` +- `GET /getvertexschema/` +- `GET /getvertexproperties/` +- `POST /setvertexproperties/` +- `POST /addvertex/` +- `POST /deletevertex/` +- `GET /getallvertexlinks/` +- `GET /getallvertices/` +- `GET /getlabelschema/` +- `GET /getlabelproperties/` +- `POST /setlabelproperties/` +- `POST /addlabel/` +- `POST /deletelabel/` +- `GET /getbackdropschema/` +- `GET /getbackdropproperties/` +- `POST /setbackdropproperties/` +- `GET /getscadadeviceschema/` +- `GET /getscadadevice/` +- `POST /setscadadevice/` +- `POST /addscadadevice/` +- `POST /deletescadadevice/` +- `POST /cleanscadadevice/` +- `GET /getallscadadeviceids/` +- `GET /getallscadadevices/` +- `GET /getscadadevicedataschema/` +- `GET /getscadadevicedata/` +- `POST /setscadadevicedata/` +- `POST /addscadadevicedata/` +- `POST /deletescadadevicedata/` +- `POST /cleanscadadevicedata/` +- `GET /getscadaelementschema/` +- `GET /getscadaelements/` +- `GET /getscadaelement/` +- `POST /setscadaelement/` +- `POST /addscadaelement/` +- `POST /deletescadaelement/` +- `POST /cleanscadaelement/` +- `GET /getregionschema/` +- `GET /getregion/` +- `POST /setregion/` +- `POST /addregion/` +- `POST /deleteregion/` +- `GET /calculatedistrictmeteringareafornodes/` +- `GET /calculatedistrictmeteringareaforregion/` +- `GET /calculatedistrictmeteringareafornetwork/` +- `GET /getdistrictmeteringareaschema/` +- `GET /getdistrictmeteringarea/` +- `POST /setdistrictmeteringarea/` +- `POST /adddistrictmeteringarea/` +- `POST /deletedistrictmeteringarea/` +- `GET /getalldistrictmeteringareaids/` +- `GET /getalldistrictmeteringareas/` +- `POST /generatedistrictmeteringarea/` +- `POST /generatesubdistrictmeteringarea/` +- `GET /calculateservicearea/` +- `GET /getserviceareaschema/` +- `GET /getservicearea/` +- `POST /setservicearea/` +- `POST /addservicearea/` +- `POST /deleteservicearea/` +- `GET /getallserviceareas/` +- `POST /generateservicearea/` +- `GET /calculatevirtualdistrict/` +- `GET /getvirtualdistrictschema/` +- `GET /getvirtualdistrict/` +- `POST /setvirtualdistrict/` +- `POST /addvirtualdistrict/` +- `POST /deletevirtualdistrict/` +- `GET /getallvirtualdistrict/` +- `POST /generatevirtualdistrict/` +- `GET /calculatedemandtonodes/` +- `GET /calculatedemandtoregion/` +- `GET /calculatedemandtonetwork/` +- `GET /getscadainfoschema/` +- `GET /getscadainfo/` +- `GET /getallscadainfo/` +- `GET /getuserschema/` +- `GET /getuser/` +- `GET /getallusers/` +- `GET /getschemeschema/` +- `GET /getscheme/` +- `GET /getallschemes/` +- `GET /getpiperiskprobabilitynow/` +- `GET /getpiperiskprobability/` +- `GET /getpipesriskprobability/` +- `GET /getnetworkpiperiskprobabilitynow/` +- `GET /getpiperiskprobabilitygeometries/` +- `GET /getallsensorplacements/` +- `GET /getallburstlocateresults/` +- `POST /uploadinp/` +- `GET /downloadinp/` +- `GET /convertv3tov2/` +- `GET /getjson/` +- `GET /getrealtimedata/` +- `GET /getsimulationresult/` +- `GET /querynodelatestrecordbyid/` +- `GET /querylinklatestrecordbyid/` +- `GET /queryscadalatestrecordbyid/` +- `GET /queryallrecordsbytime/` +- `GET /queryallrecordsbytimeproperty/` +- `GET /queryallschemerecordsbytimeproperty/` +- `GET /querysimulationrecordsbyidtime/` +- `GET /queryschemesimulationrecordsbyidtime/` +- `GET /queryallrecordsbydate/` +- `GET /queryallrecordsbytimerange/` +- `GET /queryallrecordsbydatewithtype/` +- `GET /queryallrecordsbyidsdatetype/` +- `GET /queryallrecordsbydateproperty/` +- `GET /querynodecurvebyidpropertydaterange/` +- `GET /querylinkcurvebyidpropertydaterange/` +- `GET /queryscadadatabydeviceidandtime/` +- `GET /queryscadadatabydeviceidandtimerange/` +- `GET /queryfillingscadadatabydeviceidandtimerange/` +- `GET /querycleaningscadadatabydeviceidandtimerange/` +- `GET /querysimulationscadadatabydeviceidandtimerange/` +- `GET /querycleanedscadadatabydeviceidandtimerange/` +- `GET /queryscadadatabydeviceidanddate/` +- `GET /queryallscadarecordsbydate/` +- `GET /queryallschemeallrecords/` +- `GET /queryschemeallrecordsproperty/` +- `POST /clearrediskey/` +- `POST /clearrediskeys/` +- `POST /clearallredis/` +- `GET /queryredis/` +- `GET /queryinfluxdbbuckets/` +- `GET /queryinfluxdbbucketmeasurements/` +- `POST /download_history_data_manually/` +- `POST /runsimulationmanuallybydate/` +- `POST /burst_analysis/` +- `GET /valve_close_analysis/` +- `GET /flushing_analysis/` +- `GET /contaminant_simulation/` +- `GET /age_analysis/` +- `POST /scheduling_analysis/` +- `POST /pressure_regulation/` +- `POST /project_management/` +- `POST /network_project/` +- `POST /daily_scheduling_analysis/` +- `POST /network_update/` +- `POST /pump_failure/` +- `POST /pressure_sensor_placement_sensitivity/` +- `POST /pressure_sensor_placement_kmeans/` +- `POST /sensorplacementscheme/create` +- `POST /scadadevicedatacleaning/` +- `POST /test_dict/` diff --git a/scripts/missing_api_endpoints.md b/scripts/missing_api_endpoints.md new file mode 100644 index 0000000..d0410e4 --- /dev/null +++ b/scripts/missing_api_endpoints.md @@ -0,0 +1,26 @@ +# Missing API Endpoints + +- Legacy endpoints checked: 392 +- Current endpoints found: 401 +- Missing endpoints: 17 + +Note: Current endpoints are defined under app/api and are typically served with the /api/v1 prefix. + +## Missing endpoints (legacy present, current missing) +- `GET /age_analysis/` +- `GET /contaminant_simulation/` +- `GET /flushing_analysis/` +- `GET /valve_close_analysis/` +- `POST /burst_analysis/` +- `POST /daily_scheduling_analysis/` +- `POST /network_project/` +- `POST /network_update/` +- `POST /pressure_regulation/` +- `POST /pressure_sensor_placement_kmeans/` +- `POST /pressure_sensor_placement_sensitivity/` +- `POST /project_management/` +- `POST /pump_failure/` +- `POST /runsimulationmanuallybydate/` +- `POST /scadadevicedatacleaning/` +- `POST /scheduling_analysis/` +- `POST /sensorplacementscheme/create` diff --git a/scripts/online_Analysis.py b/scripts/online_Analysis.py new file mode 100644 index 0000000..011bd48 --- /dev/null +++ b/scripts/online_Analysis.py @@ -0,0 +1,1568 @@ +import os +from app.services.tjnetwork import * +from app.native.api.project import copy_project +from app.algorithms.api_ex.run_simulation import run_simulation_ex, from_clock_to_seconds_2 +from math import sqrt, pi +from app.services.epanet.epanet import Output +import json +from datetime import datetime +import time +import pytz +import psycopg +from psycopg import sql +import pandas as pd +import csv +import chardet +import app.services.simulation as simulation +import geopandas as gpd +from sqlalchemy import create_engine +import ast +import app.services.project_info as project_info +import app.algorithms.api_ex.kmeans_sensor as kmeans_sensor +import app.algorithms.api_ex.Fdataclean as Fdataclean +import app.algorithms.api_ex.Pdataclean as Pdataclean +import app.algorithms.api_ex.sensitivity as sensitivity +from app.native.api.postgresql_info import get_pgconn_string + + +############################################################ +# burst analysis 01 +############################################################ +def convert_to_local_unit(proj: str, emitters: float) -> float: + open_project(proj) + proj_opt = get_option(proj) + str_unit = proj_opt.get("UNITS") + + if str_unit == "CMH": + return emitters * 3.6 + elif str_unit == "LPS": + return emitters + elif str_unit == "CMS": + return emitters / 1000.0 + elif str_unit == "MGD": + return emitters * 0.0438126 + + # Unknown unit: log and return original value + print(str_unit) + return emitters + + +def burst_analysis( + name: str, + modify_pattern_start_time: str, + burst_ID: list | str = None, + burst_size: list | float | int = None, + modify_total_duration: int = 900, + modify_fixed_pump_pattern: dict[str, list] = None, + modify_variable_pump_pattern: dict[str, list] = None, + modify_valve_opening: dict[str, float] = None, + scheme_Name: str = None, +) -> None: + """ + 爆管模拟 + :param name: 模型名称,数据库中对应的名字 + :param modify_pattern_start_time: 模拟开始时间,格式为'2024-11-25T09:00:00+08:00' + :param burst_ID: 爆管管道的ID,选取的是管道,单独传入一个爆管管道,可以是str或list,传入多个爆管管道是用list + :param burst_size: 爆管管道破裂的孔口面积,和burst_ID列表各位置的ID对应,以cm*cm计算 + :param modify_total_duration: 模拟总历时,秒 + :param modify_fixed_pump_pattern: dict中包含多个水泵模式,str为工频水泵的id,list为修改后的pattern + :param modify_variable_pump_pattern: dict中包含多个水泵模式,str为变频水泵的id,list为修改后的pattern + :param modify_valve_opening: dict中包含多个阀门开启度,str为阀门的id,float为修改后的阀门开启度 + :param scheme_Name: 方案名称 + :return: + """ + scheme_detail: dict = { + "burst_ID": burst_ID, + "burst_size": burst_size, + "modify_total_duration": modify_total_duration, + "modify_fixed_pump_pattern": modify_fixed_pump_pattern, + "modify_variable_pump_pattern": modify_variable_pump_pattern, + "modify_valve_opening": modify_valve_opening, + } + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Analysis." + ) + new_name = f"burst_Anal_{name}" + if have_project(new_name): + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Copying Database." + ) + # CopyProjectEx()(name, new_name, + # ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table']) + copy_project(name + "_template", new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Opening Database." + ) + open_project(new_name) + simulation.run_simulation( + name=new_name, + simulation_type="manually_temporary", + modify_pattern_start_time=modify_pattern_start_time, + ) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Database Loading OK." + ) + ##step 1 set the emitter coefficient of end node of busrt pipe + if isinstance(burst_ID, list): + if (burst_size is not None) and (type(burst_size) is not list): + return json.dumps("Type mismatch.") + elif isinstance(burst_ID, str): + burst_ID = [burst_ID] + if burst_size is not None: + if isinstance(burst_size, float) or isinstance(burst_size, int): + burst_size = [burst_size] + else: + return json.dumps("Type mismatch.") + else: + return json.dumps("Type mismatch.") + if burst_size is None: + burst_size = [-1] * len(burst_ID) + elif len(burst_size) < len(burst_ID): + burst_size += [-1] * (len(burst_ID) - len(burst_size)) + elif len(burst_size) > len(burst_ID): + # burst_size = burst_size[:len(burst_ID)] + return json.dumps("Length mismatch.") + for burst_ID_, burst_size_ in zip(burst_ID, burst_size): + pipe = get_pipe(new_name, burst_ID_) + str_start_node = pipe["node1"] + str_end_node = pipe["node2"] + d_pipe = pipe["diameter"] / 1000.0 + if burst_size_ <= 0: + burst_size_ = 3.14 * d_pipe * d_pipe / 4 / 8 + else: + burst_size_ = burst_size_ / 10000 + emitter_coeff = ( + 0.65 * burst_size_ * sqrt(19.6) * 1000 + ) # 1/8开口面积作为coeff,单位 L/S + emitter_coeff = convert_to_local_unit(new_name, emitter_coeff) + emitter_node = "" + if is_junction(new_name, str_end_node): + emitter_node = str_end_node + elif is_junction(new_name, str_start_node): + emitter_node = str_start_node + old_emitter = get_emitter(new_name, emitter_node) + if old_emitter != None: + old_emitter["coefficient"] = emitter_coeff # 爆管的emitter coefficient设置 + else: + old_emitter = {"junction": emitter_node, "coefficient": emitter_coeff} + new_emitter = ChangeSet() + new_emitter.append(old_emitter) + set_emitter(new_name, new_emitter) + # step 2. run simulation + # 涉及关阀计算,可能导致关阀后仍有流量,改为压力驱动PDA + options = get_option(new_name) + options["DEMAND MODEL"] = OPTION_DEMAND_MODEL_PDA + options["REQUIRED PRESSURE"] = "10.0000" + cs_options = ChangeSet() + cs_options.append(options) + set_option(new_name, cs_options) + # valve_control = None + # if modify_valve_opening is not None: + # valve_control = {} + # for valve in modify_valve_opening: + # valve_control[valve] = {'status': 'CLOSED'} + # result = run_simulation_ex(new_name,'realtime', modify_pattern_start_time, + # end_datetime=modify_pattern_start_time, + # modify_total_duration=modify_total_duration, + # modify_pump_pattern=modify_pump_pattern, + # valve_control=valve_control, + # downloading_prohibition=True) + simulation.run_simulation( + name=new_name, + simulation_type="extended", + modify_pattern_start_time=modify_pattern_start_time, + modify_total_duration=modify_total_duration, + modify_fixed_pump_pattern=modify_fixed_pump_pattern, + modify_variable_pump_pattern=modify_variable_pump_pattern, + modify_valve_opening=modify_valve_opening, + scheme_Type="burst_Analysis", + scheme_Name=scheme_Name, + ) + # step 3. restore the base model status + # execute_undo(name) #有疑惑 + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + # return result + store_scheme_info( + name=name, + scheme_name=scheme_Name, + scheme_type="burst_Analysis", + username="admin", + scheme_start_time=modify_pattern_start_time, + scheme_detail=scheme_detail, + ) + + +############################################################ +# valve closing analysis 02 +############################################################ +def valve_close_analysis( + name: str, + modify_pattern_start_time: str, + modify_total_duration: int = 900, + modify_valve_opening: dict[str, float] = None, + scheme_Name: str = None, +) -> None: + """ + 关阀模拟 + :param name: 模型名称,数据库中对应的名字 + :param modify_pattern_start_time: 模拟开始时间,格式为'2024-11-25T09:00:00+08:00' + :param modify_total_duration: 模拟总历时,秒 + :param modify_valve_opening: dict中包含多个阀门开启度,str为阀门的id,float为修改后的阀门开启度 + :param scheme_Name: 方案名称 + :return: + """ + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Analysis." + ) + new_name = f"valve_close_Anal_{name}" + if have_project(new_name): + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Copying Database." + ) + # CopyProjectEx()(name, new_name, + # ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table']) + copy_project(name + "_template", new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Opening Database." + ) + open_project(new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Database Loading OK." + ) + # step 1. change the valves status to 'closed' + # for valve in valves: + # if not is_valve(new_name,valve): + # result='ID:{}is not a valve type'.format(valve) + # return result + # cs=ChangeSet() + # status=get_status(new_name,valve) + # status['status']='CLOSED' + # cs.append(status) + # set_status(new_name,cs) + # step 2. run simulation + # 涉及关阀计算,可能导致关阀后仍有流量,改为压力驱动PDA + options = get_option(new_name) + options["DEMAND MODEL"] = OPTION_DEMAND_MODEL_PDA + options["REQUIRED PRESSURE"] = "20.0000" + cs_options = ChangeSet() + cs_options.append(options) + set_option(new_name, cs_options) + # result = run_simulation_ex(new_name,'realtime', modify_pattern_start_time, modify_pattern_start_time, modify_total_duration, + # downloading_prohibition=True) + simulation.run_simulation( + name=new_name, + simulation_type="extended", + modify_pattern_start_time=modify_pattern_start_time, + modify_total_duration=modify_total_duration, + modify_valve_opening=modify_valve_opening, + scheme_Type="valve_close_Analysis", + scheme_Name=scheme_Name, + ) + # step 3. restore the base model + # for valve in valves: + # execute_undo(name) + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + # return result + + +############################################################ +# flushing analysis 03 +# Pipe_Flushing_Analysis(prj_name,date_time, Valve_id_list, Drainage_Node_Id, Flushing_flow[opt], Flushing_duration[opt])->out_file:string +############################################################ +def flushing_analysis( + name: str, + modify_pattern_start_time: str, + modify_total_duration: int = 900, + modify_valve_opening: dict[str, float] = None, + drainage_node_ID: str = None, + flushing_flow: float = 0, + scheme_Name: str = None, +) -> None: + """ + 管道冲洗模拟 + :param name: 模型名称,数据库中对应的名字 + :param modify_pattern_start_time: 模拟开始时间,格式为'2024-11-25T09:00:00+08:00' + :param modify_total_duration: 模拟总历时,秒 + :param modify_valve_opening: dict中包含多个阀门开启度,str为阀门的id,float为修改后的阀门开启度 + :param drainage_node_ID: 冲洗排放口所在节点ID + :param flushing_flow: 冲洗水量,传入参数单位为m3/h + :param scheme_Name: 方案名称 + :return: + """ + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Analysis." + ) + new_name = f"flushing_Anal_{name}" + if have_project(new_name): + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + # if is_project_open(name): + # close_project(name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Copying Database." + ) + # CopyProjectEx()(name, new_name, + # ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table']) + copy_project(name + "_template", new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Opening Database." + ) + open_project(new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Database Loading OK." + ) + if not is_junction(new_name, drainage_node_ID): + return "Wrong Drainage node type" + # step 1. change the valves status to 'closed' + # for valve, valve_k in zip(valves, valves_k): + # cs=ChangeSet() + # status=get_status(new_name,valve) + # # status['status']='CLOSED' + # if valve_k == 0: + # status['status'] = 'CLOSED' + # elif valve_k < 1: + # status['status'] = 'OPEN' + # status['setting'] = 0.1036 * pow(valve_k, -3.105) + # cs.append(status) + # set_status(new_name,cs) + units = get_option(new_name) + # step 2. set the emitter coefficient of drainage node or add flush flow to the drainage node + emitter_demand = get_demand(new_name, drainage_node_ID) + cs = ChangeSet() + if flushing_flow > 0: + for r in emitter_demand["demands"]: + if units == "LPS": + r["demand"] += flushing_flow / 3.6 + elif units == "CMH": + r["demand"] += flushing_flow + cs.append(emitter_demand) + set_demand(new_name, cs) + else: + pipes = get_node_links(new_name, drainage_node_ID) + flush_diameter = 50 + for pipe in pipes: + d = get_pipe(new_name, pipe)["diameter"] + if flush_diameter < d: + flush_diameter = d + flush_diameter /= 1000 + emitter_coeff = ( + 0.65 * 3.14 * (flush_diameter * flush_diameter / 4) * sqrt(19.6) * 1000 + ) # 全开口面积作为coeff + + old_emitter = get_emitter(new_name, drainage_node_ID) + if old_emitter != None: + old_emitter["coefficient"] = emitter_coeff # 爆管的emitter coefficient设置 + else: + old_emitter = {"junction": drainage_node_ID, "coefficient": emitter_coeff} + new_emitter = ChangeSet() + new_emitter.append(old_emitter) + set_emitter(new_name, new_emitter) + # step 3. run simulation + # 涉及关阀计算,可能导致关阀后仍有流量,改为压力驱动PDA + options = get_option(new_name) + options["DEMAND MODEL"] = OPTION_DEMAND_MODEL_PDA + options["REQUIRED PRESSURE"] = "20.0000" + cs_options = ChangeSet() + cs_options.append(options) + set_option(new_name, cs_options) + # result = run_simulation_ex(new_name,'realtime', modify_pattern_start_time, modify_pattern_start_time, modify_total_duration, + # downloading_prohibition=True) + simulation.run_simulation( + name=new_name, + simulation_type="extended", + modify_pattern_start_time=modify_pattern_start_time, + modify_total_duration=modify_total_duration, + modify_valve_opening=modify_valve_opening, + scheme_Type="flushing_Analysis", + scheme_Name=scheme_Name, + ) + # step 4. restore the base model + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + # return result + + +############################################################ +# Contaminant simulation 04 +# +############################################################ +def contaminant_simulation( + name: str, + modify_pattern_start_time: str, # 模拟开始时间,格式为'2024-11-25T09:00:00+08:00' + modify_total_duration: int = 900, # 模拟总历时,秒 + source: str = None,# 污染源节点ID + concentration: float = None, # 污染源浓度,单位mg/L + source_pattern: str = None, # 污染源时间变化模式名称 + scheme_Name: str = None, +) -> None: + """ + 污染模拟 + :param name: 模型名称,数据库中对应的名字 + :param modify_pattern_start_time: 模拟开始时间,格式为'2024-11-25T09:00:00+08:00' + :param modify_total_duration: 模拟总历时,秒 + :param source: 污染源所在的节点ID + :param concentration: 污染源位置处的浓度,单位mg/L,即默认的污染模拟setting为concentration(应改为 Set point booster) + :param source_pattern: 污染源的时间变化模式,若不传入则默认以恒定浓度持续模拟,时间长度等于duration; + 若传入,则格式为{1.0,0.5,1.1}等系数列表pattern_step模拟等于模型的hydraulic time step + :param scheme_Name: 方案名称 + :return: + """ + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Analysis." + ) + new_name = f"contaminant_Sim_{name}" + if have_project(new_name): + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + # if is_project_open(name): + # close_project(name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Copying Database." + ) + # CopyProjectEx()(name, new_name, + # ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table']) + copy_project(name + "_template", new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Opening Database." + ) + open_project(new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Database Loading OK." + ) + dic_time = get_time(new_name) + dic_time["QUALITY TIMESTEP"] = "0:05:00" + cs = ChangeSet() + cs.operations.append(dic_time) + set_time(new_name, cs) # set QUALITY TIMESTEP + time_option = get_time(new_name) + hydraulic_step = time_option["HYDRAULIC TIMESTEP"] + secs = from_clock_to_seconds_2(hydraulic_step) + operation_step = 0 + # step 1. set duration + if modify_total_duration == None: + modify_total_duration = secs + # step 2. set pattern + if source_pattern != None: + pt = get_pattern(new_name, source_pattern) + if pt == None: + str_response = str("cant find source_pattern") + return str_response + else: + cs_pattern = ChangeSet() + pt = {} + factors = [] + tmp_duration = modify_total_duration + while tmp_duration > 0: + factors.append(1.0) + tmp_duration = tmp_duration - secs + pt["id"] = "contam_pt" + pt["factors"] = factors + cs_pattern.append(pt) + add_pattern(new_name, cs_pattern) + operation_step += 1 + # step 3. set source/initial quality + # source quality + cs_source = ChangeSet() + source_schema = { + "node": source, + "s_type": SOURCE_TYPE_CONCEN, + "strength": concentration, + "pattern": pt["id"], + } + cs_source.append(source_schema) + source_node = get_source(new_name, source) + if len(source_node) == 0: + add_source(new_name, cs_source) + else: + set_source(new_name, cs_source) + dict_demand = get_demand(new_name, source) + for demands in dict_demand["demands"]: + dict_demand["demands"][dict_demand["demands"].index(demands)]["demand"] = -1 + dict_demand["demands"][dict_demand["demands"].index(demands)]["pattern"] = None + cs = ChangeSet() + cs.append(dict_demand) + set_demand(new_name, cs) # set inflow node + # # initial quality + # dict_quality = get_quality(new_name, source) + # dict_quality['quality'] = concentration + # cs = ChangeSet() + # cs.append(dict_quality) + # set_quality(new_name, cs) + operation_step += 1 + # step 4 set option of quality to chemical + opt = get_option(new_name) + opt["QUALITY"] = OPTION_QUALITY_CHEMICAL + cs_option = ChangeSet() + cs_option.append(opt) + set_option(new_name, cs_option) + operation_step += 1 + # step 5. run simulation + # result = run_simulation_ex(new_name,'realtime', modify_pattern_start_time, modify_pattern_start_time, modify_total_duration, + # downloading_prohibition=True) + simulation.run_simulation( + name=new_name, + simulation_type="extended", + modify_pattern_start_time=modify_pattern_start_time, + modify_total_duration=modify_total_duration, + scheme_Type="contaminant_Analysis", + scheme_Name=scheme_Name, + ) + + # for i in range(1,operation_step): + # execute_undo(name) + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + # return result + + +############################################################ +# age analysis 05 ***水龄模拟目前还没和实时模拟打通,不确定是否需要,先不要使用*** +############################################################ + + +def age_analysis( + name: str, modify_pattern_start_time: str, modify_total_duration: int = 900 +) -> None: + """ + 水龄模拟 + :param name: 模型名称,数据库中对应的名字 + :param modify_pattern_start_time: 模拟开始时间,格式为'2024-11-25T09:00:00+08:00' + :param modify_total_duration: 模拟总历时,秒 + :return: + """ + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Analysis." + ) + new_name = f"age_Anal_{name}" + if have_project(new_name): + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + # if is_project_open(name): + # close_project(name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Copying Database." + ) + # CopyProjectEx()(name, new_name, + # ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table']) + copy_project(name + "_template", new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Opening Database." + ) + open_project(new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Database Loading OK." + ) + # step 1. run simulation + + result = run_simulation_ex( + new_name, + "realtime", + modify_pattern_start_time, + modify_total_duration, + downloading_prohibition=True, + ) + # step 2. restore the base model status + # execute_undo(name) #有疑惑 + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + output = Output("./temp/{}.db.out".format(new_name)) + # element_name = output.element_name() + # node_name = element_name['nodes'] + # link_name = element_name['links'] + nodes_age = [] + node_result = output.node_results() + for node in node_result: + nodes_age.append(node["result"][-1]["quality"]) + links_age = [] + link_result = output.link_results() + for link in link_result: + links_age.append(link["result"][-1]["quality"]) + age_result = {"nodes": nodes_age, "links": links_age} + # age_result = {'nodes': nodes_age, 'links': links_age, 'nodeIDs': node_name, 'linkIDs': link_name} + return json.dumps(age_result) + + +############################################################ +# pressure regulation 06 +############################################################ + + +def pressure_regulation( + name: str, + modify_pattern_start_time: str, + modify_total_duration: int = 900, + modify_tank_initial_level: dict[str, float] = None, + modify_fixed_pump_pattern: dict[str, list] = None, + modify_variable_pump_pattern: dict[str, list] = None, + scheme_Name: str = None, +) -> None: + """ + 区域调压模拟,用来模拟未来15分钟内,开关水泵对区域压力的影响 + :param name: 模型名称,数据库中对应的名字 + :param modify_pattern_start_time: 模拟开始时间,格式为'2024-11-25T09:00:00+08:00' + :param modify_total_duration: 模拟总历时,秒 + :param modify_tank_initial_level: dict中包含多个水塔,str为水塔的id,float为修改后的initial_level + :param modify_fixed_pump_pattern: dict中包含多个水泵模式,str为工频水泵的id,list为修改后的pattern + :param modify_variable_pump_pattern: dict中包含多个水泵模式,str为变频水泵的id,list为修改后的pattern + :param scheme_Name: 模拟方案名称 + :return: + """ + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Analysis." + ) + new_name = f"pressure_regulation_{name}" + if have_project(new_name): + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + # if is_project_open(name): + # close_project(name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Copying Database." + ) + # CopyProjectEx()(name, new_name, + # ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table']) + copy_project(name + "_template", new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Opening Database." + ) + open_project(new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Database Loading OK." + ) + # 全部关泵后,压力计算不合理,改为压力驱动PDA + options = get_option(new_name) + options["DEMAND MODEL"] = OPTION_DEMAND_MODEL_PDA + options["REQUIRED PRESSURE"] = "15.0000" + cs_options = ChangeSet() + cs_options.append(options) + set_option(new_name, cs_options) + # result = run_simulation_ex(name=new_name, + # simulation_type='realtime', + # start_datetime=start_datetime, + # duration=900, + # pump_control=pump_control, + # tank_initial_level_control=tank_initial_level_control, + # downloading_prohibition=True) + simulation.run_simulation( + name=new_name, + simulation_type="extended", + modify_pattern_start_time=modify_pattern_start_time, + modify_total_duration=modify_total_duration, + modify_tank_initial_level=modify_tank_initial_level, + modify_fixed_pump_pattern=modify_fixed_pump_pattern, + modify_variable_pump_pattern=modify_variable_pump_pattern, + scheme_Type="pressure_regulation", + scheme_Name=scheme_Name, + ) + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + # return result + + +############################################################ +# project management 07 ***暂时不使用,与业务需求无关*** +############################################################ + + +def project_management( + prj_name, + start_datetime, + pump_control, + tank_initial_level_control=None, + region_demand_control=None, +) -> str: + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Analysis." + ) + new_name = f"project_management_{prj_name}" + if have_project(new_name): + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + # if is_project_open(prj_name): + # close_project(prj_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Copying Database." + ) + # CopyProjectEx()(prj_name, new_name, + # ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table']) + copy_project(prj_name + "_template", new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Opening Database." + ) + open_project(new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Database Loading OK." + ) + result = run_simulation_ex( + name=new_name, + simulation_type="realtime", + start_datetime=start_datetime, + duration=86400, + pump_control=pump_control, + tank_initial_level_control=tank_initial_level_control, + region_demand_control=region_demand_control, + downloading_prohibition=True, + ) + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + return result + + +############################################################ +# scheduling analysis 08 ***暂时不使用,与业务需求无关*** +############################################################ + + +def scheduling_simulation( + prj_name, start_time, pump_control, tank_id, water_plant_output_id, time_delta=300 +) -> str: + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Analysis." + ) + new_name = f"scheduling_{prj_name}" + + if have_project(new_name): + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + # if is_project_open(prj_name): + # close_project(prj_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Copying Database." + ) + # CopyProjectEx()(prj_name, new_name, + # ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table']) + copy_project(prj_name + "_template", new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Opening Database." + ) + open_project(new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Database Loading OK." + ) + + run_simulation_ex( + new_name, "realtime", start_time, duration=0, pump_control=pump_control + ) + + if not is_project_open(new_name): + open_project(new_name) + + tank = get_tank(new_name, tank_id) # 水塔信息 + tank_floor_space = pi * pow(tank["diameter"] / 2, 2) # 水塔底面积(m^2) + tank_init_level = tank["init_level"] # 水塔初始水位(m) + tank_pipes_id = tank["links"] # pipes list + + tank_pipe_flow_direction = ( + {} + ) # 管道流向修正系数, 水塔为下游节点时为1, 水塔为上游节点时为-1 + for pipe_id in tank_pipes_id: + if get_pipe(new_name, pipe_id)["node2"] == tank_id: # 水塔为下游节点 + tank_pipe_flow_direction[pipe_id] = 1 + else: + tank_pipe_flow_direction[pipe_id] = -1 + + output = Output("./temp/{}.db.out".format(new_name)) + + node_results = ( + output.node_results() + ) # [{'node': str, 'result': [{'pressure': float}]}] + water_plant_output_pressure = 0 + for node_result in node_results: + if node_result["node"] == water_plant_output_id: # 水厂出水压力(m) + water_plant_output_pressure = node_result["result"][-1]["pressure"] + water_plant_output_pressure /= 100 # 预计水厂出水压力(Mpa) + + pipe_results = output.link_results() # [{'link': str, 'result': [{'flow': float}]}] + tank_inflow = 0 + for pipe_result in pipe_results: + for pipe_id in tank_pipes_id: # 遍历与水塔相连的管道 + if pipe_result["link"] == pipe_id: # 水塔入流流量(L/s) + tank_inflow += ( + pipe_result["result"][-1]["flow"] + * tank_pipe_flow_direction[pipe_id] + ) + tank_inflow /= 1000 # 水塔入流流量(m^3/s) + tank_level_delta = tank_inflow * time_delta / tank_floor_space # 水塔水位改变值(m) + tank_level = tank_init_level + tank_level_delta # 预计水塔水位(m) + + simulation_results = { + "water_plant_output_pressure": water_plant_output_pressure, + "tank_init_level": tank_init_level, + "tank_level": tank_level, + } + + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + + return json.dumps(simulation_results) + + +def daily_scheduling_simulation( + prj_name, start_time, pump_control, reservoir_id, tank_id, water_plant_output_id +) -> str: + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Analysis." + ) + new_name = f"daily_scheduling_{prj_name}" + + if have_project(new_name): + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + # if is_project_open(prj_name): + # close_project(prj_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Copying Database." + ) + # CopyProjectEx()(prj_name, new_name, + # ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table']) + copy_project(prj_name + "_template", new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Start Opening Database." + ) + open_project(new_name) + print( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + + " -- Database Loading OK." + ) + + run_simulation_ex( + new_name, "realtime", start_time, duration=86400, pump_control=pump_control + ) + + if not is_project_open(new_name): + open_project(new_name) + + output = Output("./temp/{}.db.out".format(new_name)) + + node_results = ( + output.node_results() + ) # [{'node': str, 'result': [{'pressure': float, 'head': float}]}] + water_plant_output_pressure = [] + reservoir_level = [] + tank_level = [] + for node_result in node_results: + if node_result["node"] == water_plant_output_id: + for result in node_result["result"]: + water_plant_output_pressure.append( + result["pressure"] / 100 + ) # 水厂出水压力(Mpa) + elif node_result["node"] == reservoir_id: + for result in node_result["result"]: + reservoir_level.append(result["head"] - 250.35) # 清水池液位(m) + elif node_result["node"] == tank_id: + for result in node_result["result"]: + tank_level.append(result["pressure"]) # 调节池液位(m) + + simulation_results = { + "water_plant_output_pressure": water_plant_output_pressure, + "reservoir_level": reservoir_level, + "tank_level": tank_level, + } + + if is_project_open(new_name): + close_project(new_name) + delete_project(new_name) + + return json.dumps(simulation_results) + + +############################################################ +# network_update 10 +############################################################ + + +def network_update(file_path: str) -> None: + """ + 更新pg数据库中的inp文件 + :param file_path: inp文件 + :return: + """ + read_inp("szh", file_path) + + csv_path = "./history_pattern_flow.csv" + + # # 检查文件是否存在 + # if os.path.exists(csv_path): + # print(f"history_patterns_flows文件存在,开始处理...") + # + # # 读取 CSV 文件 + # df = pd.read_csv(csv_path) + # + # # 连接到 PostgreSQL 数据库(这里是数据库 "bb") + # with psycopg.connect("dbname=bb host=127.0.0.1") as conn: + # with conn.cursor() as cur: + # for index, row in df.iterrows(): + # # 直接将数据插入,不进行唯一性检查 + # insert_sql = sql.SQL(""" + # INSERT INTO history_patterns_flows (id, factor, flow) + # VALUES (%s, %s, %s); + # """) + # # 将数据插入数据库 + # cur.execute(insert_sql, (row['id'], row['factor'], row['flow'])) + # conn.commit() + # print("数据成功导入到 'history_patterns_flows' 表格。") + # else: + # print(f"history_patterns_flows文件不存在。") + # 检查文件是否存在 + if os.path.exists(csv_path): + print(f"history_patterns_flows文件存在,开始处理...") + + # 连接到 PostgreSQL 数据库(这里是数据库 "bb") + with psycopg.connect(f"dbname={project_info.name} host=127.0.0.1") as conn: + with conn.cursor() as cur: + with open(csv_path, newline="", encoding="utf-8-sig") as csvfile: + reader = csv.DictReader(csvfile) + for row in reader: + # 直接将数据插入,不进行唯一性检查 + insert_sql = sql.SQL( + """ + INSERT INTO history_patterns_flows (id, factor, flow) + VALUES (%s, %s, %s); + """ + ) + # 将数据插入数据库 + cur.execute(insert_sql, (row["id"], row["factor"], row["flow"])) + conn.commit() + print("数据成功导入到 'history_patterns_flows' 表格。") + else: + print(f"history_patterns_flows文件不存在。") + + +def submit_scada_info(name: str, coord_id: str) -> None: + """ + 将scada信息表导入pg数据库 + :param name: 项目名称(数据库名称) + :param coord_id: 坐标系的id,如4326,根据原始坐标信息输入 + :return: + """ + scada_info_path = "./scada_info.csv" + # 检查文件是否存在 + if os.path.exists(scada_info_path): + print(f"scada_info文件存在,开始处理...") + + # 自动检测文件编码 + with open(scada_info_path, "rb") as file: + raw_data = file.read() + detected = chardet.detect(raw_data) + file_encoding = detected["encoding"] + print(f"检测到的文件编码:{file_encoding}") + try: + # 动态替换数据库名称 + conn_string = get_pgconn_string(db_name=name) + + # 连接到 PostgreSQL 数据库(这里是数据库 "bb") + with psycopg.connect(conn_string) as conn: + with conn.cursor() as cur: + # 检查 scada_info 表是否为空 + cur.execute("SELECT COUNT(*) FROM scada_info;") + count = cur.fetchone()[0] + + if count > 0: + print("scada_info表中已有数据,正在清空记录...") + cur.execute("DELETE FROM scada_info;") + print("表记录已清空。") + + with open( + scada_info_path, newline="", encoding=file_encoding + ) as csvfile: + reader = csv.DictReader(csvfile) + for row in reader: + # 将CSV单元格值为空的字段转换为 None + cleaned_row = { + key: (value if value.strip() else None) + for key, value in row.items() + } + + # 处理 associated_source_outflow_id 列动态变化 + associated_columns = [ + f"associated_source_outflow_id{i}" for i in range(1, 21) + ] + associated_values = [ + ( + cleaned_row.get(col).strip() + if cleaned_row.get(col) + and cleaned_row.get(col).strip() + else None + ) + for col in associated_columns + ] + + # 将 X_coor 和 Y_coor 转换为 geometry 类型 + x_coor = ( + float(cleaned_row["X_coor"]) + if cleaned_row["X_coor"] + else None + ) + y_coor = ( + float(cleaned_row["Y_coor"]) + if cleaned_row["Y_coor"] + else None + ) + coord = ( + f"SRID={coord_id};POINT({x_coor} {y_coor})" + if x_coor and y_coor + else None + ) + + # 准备插入 SQL 语句 + insert_sql = sql.SQL( + """ + INSERT INTO scada_info ( + id, type, associated_element_id, associated_pattern, + associated_pipe_flow_id, {associated_columns}, + API_query_id, transmission_mode, transmission_frequency, + reliability, X_coor, Y_coor, coord + ) + VALUES ( + %s, %s, %s, %s, %s, {associated_placeholders}, + %s, %s, %s, %s, %s, %s, %s + ); + """ + ).format( + associated_columns=sql.SQL(", ").join( + sql.Identifier(col) for col in associated_columns + ), + associated_placeholders=sql.SQL(", ").join( + sql.Placeholder() for _ in associated_columns + ), + ) + # 将数据插入数据库 + cur.execute( + insert_sql, + ( + cleaned_row["id"], + cleaned_row["type"], + cleaned_row["associated_element_id"], + cleaned_row.get("associated_pattern"), + cleaned_row.get("associated_pipe_flow_id"), + *associated_values, + cleaned_row.get("API_query_id"), + cleaned_row["transmission_mode"], + cleaned_row["transmission_frequency"], + cleaned_row["reliability"], + x_coor, + y_coor, + coord, + ), + ) + conn.commit() + print("数据成功导入到 'scada_info' 表格。") + except Exception as e: + print(f"导入时出错:{e}") + else: + print(f"scada_info文件不存在。") + + +# 2025/03/23 +def create_user(name: str, username: str, password: str): + """ + 创建用户 + :param name: 数据库名称 + :param username: 用户名 + :param password: 密码 + :return: + """ + try: + # 动态替换数据库名称 + conn_string = get_pgconn_string(db_name=name) + # 连接到 PostgreSQL 数据库(这里是数据库 "bb") + with psycopg.connect(conn_string) as conn: + with conn.cursor() as cur: + cur.execute( + "INSERT INTO users (username, password) VALUES (%s, %s)", + (username, password), + ) + # 提交事务 + conn.commit() + print("新用户创建成功!") + except Exception as e: + print(f"创建用户出错:{e}") + + +# 2025/03/23 +def delete_user(name: str, username: str): + """ + 删除用户 + :param name: 数据库名称 + :param username: 用户名 + :return: + """ + try: + # 动态替换数据库名称 + conn_string = get_pgconn_string(db_name=name) + # 连接到 PostgreSQL 数据库(这里是数据库 "bb") + with psycopg.connect(conn_string) as conn: + with conn.cursor() as cur: + cur.execute("DELETE FROM users WHERE username = %s", (username,)) + conn.commit() + print(f"用户 {username} 删除成功!") + except Exception as e: + print(f"删除用户出错:{e}") + + +# 2025/03/23 +def scheme_name_exists(name: str, scheme_name: str) -> bool: + """ + 判断传入的 scheme_name 是否已存在于 scheme_list 表中,用于输入框判断 + :param name: 数据库名称 + :param scheme_name: 需要判断的方案名称 + :return: 如果存在返回 True,否则返回 False + """ + try: + conn_string = get_pgconn_string(db_name=name) + with psycopg.connect(conn_string) as conn: + with conn.cursor() as cur: + cur.execute( + "SELECT COUNT(*) FROM scheme_list WHERE scheme_name = %s", + (scheme_name,), + ) + result = cur.fetchone() + if result is not None and result[0] > 0: + return True + else: + return False + except Exception as e: + print(f"查询 scheme_name 时出错:{e}") + return False + + +# 2025/03/23 +def store_scheme_info( + name: str, + scheme_name: str, + scheme_type: str, + username: str, + scheme_start_time: str, + scheme_detail: dict, +): + """ + 将一条方案记录插入 scheme_list 表中 + :param name: 数据库名称 + :param scheme_name: 方案名称 + :param scheme_type: 方案类型 + :param username: 用户名(需在 users 表中已存在) + :param scheme_start_time: 方案起始时间(字符串) + :param scheme_detail: 方案详情(字典,会转换为 JSON) + :return: + """ + try: + conn_string = get_pgconn_string(db_name=name) + with psycopg.connect(conn_string) as conn: + with conn.cursor() as cur: + sql = """ + INSERT INTO scheme_list (scheme_name, scheme_type, username, scheme_start_time, scheme_detail) + VALUES (%s, %s, %s, %s, %s) + """ + # 将字典转换为 JSON 字符串 + scheme_detail_json = json.dumps(scheme_detail) + cur.execute( + sql, + ( + scheme_name, + scheme_type, + username, + scheme_start_time, + scheme_detail_json, + ), + ) + conn.commit() + print("方案信息存储成功!") + except Exception as e: + print(f"存储方案信息时出错:{e}") + + +# 2025/03/23 +def delete_scheme_info(name: str, scheme_name: str) -> None: + """ + 从 scheme_list 表中删除指定的方案 + :param name: 数据库名称 + :param scheme_name: 要删除的方案名称 + """ + try: + conn_string = get_pgconn_string(db_name=name) + with psycopg.connect(conn_string) as conn: + with conn.cursor() as cur: + # 使用参数化查询删除方案记录 + cur.execute( + "DELETE FROM scheme_list WHERE scheme_name = %s", (scheme_name,) + ) + conn.commit() + print(f"方案 {scheme_name} 删除成功!") + except Exception as e: + print(f"删除方案时出错:{e}") + + +# 2025/03/23 +def query_scheme_list(name: str) -> list: + """ + 查询pg数据库中的scheme_list,按照 create_time 降序排列,离现在时间最近的记录排在最前面 + :param name: 项目名称(数据库名称) + :return: 返回查询结果的所有行 + """ + try: + # 动态替换数据库名称 + conn_string = get_pgconn_string(db_name=name) + # 连接到 PostgreSQL 数据库(这里是数据库 "bb") + with psycopg.connect(conn_string) as conn: + with conn.cursor() as cur: + # 按 create_time 降序排列 + cur.execute("SELECT * FROM scheme_list ORDER BY create_time DESC") + rows = cur.fetchall() + return rows + + except Exception as e: + print(f"查询错误:{e}") + + +# 2025/03/23 +def upload_shp_to_pg(name: str, table_name: str, role: str, shp_file_path: str): + """ + 将 Shapefile 文件上传到 PostgreSQL 数据库 + :param name: 项目名称(数据库名称) + :param table_name: 创建表的名字 + :param role: 数据库角色名,位于c盘user中查看 + :param shp_file_path: shp文件的路径 + :return: + """ + try: + # 动态连接到指定的数据库 + conn_string = get_pgconn_string(db_name=name) + with psycopg.connect(conn_string) as conn: + # 读取 Shapefile 文件 + gdf = gpd.read_file(shp_file_path) + + # 检查投影坐标系(CRS),并确保是 EPSG:4326 + if gdf.crs.to_string() != "EPSG:4490": + gdf = gdf.to_crs(epsg=4490) + + # 使用 GeoDataFrame 的 .to_postgis 方法将数据写入 PostgreSQL + # 需要在数据库中提前安装 PostGIS 扩展 + engine = create_engine(f"postgresql+psycopg2://{role}:@127.0.0.1/{name}") + gdf.to_postgis( + table_name, engine, if_exists="replace", index=True, index_label="id" + ) + + print( + f"Shapefile 文件成功上传到 PostgreSQL 数据库 '{name}' 的表 '{table_name}'." + ) + + except Exception as e: + print(f"上传 Shapefile 到 PostgreSQL 时出错:{e}") + + +def submit_risk_probability_result(name: str, result_file_path: str) -> None: + """ + 将管网风险评估结果导入pg数据库 + :param name: 项目名称(数据库名称) + :param result_file_path: 结果文件路径 + :return: + """ + # 自动检测文件编码 + # with open({result_file_path}, 'rb') as file: + # raw_data = file.read() + # detected = chardet.detect(raw_data) + # file_encoding = detected['encoding'] + # print(f"检测到的文件编码:{file_encoding}") + + try: + # 动态替换数据库名称 + conn_string = get_pgconn_string(db_name=name) + + # 连接到 PostgreSQL 数据库 + with psycopg.connect(conn_string) as conn: + with conn.cursor() as cur: + # 检查 scada_info 表是否为空 + cur.execute("SELECT COUNT(*) FROM pipe_risk_probability;") + count = cur.fetchone()[0] + + if count > 0: + print("pipe_risk_probability表中已有数据,正在清空记录...") + cur.execute("DELETE FROM pipe_risk_probability;") + print("表记录已清空。") + + # 读取Excel并转换x/y列为列表 + df = pd.read_excel(result_file_path, sheet_name="Sheet1") + df["x"] = df["x"].apply(ast.literal_eval) + df["y"] = df["y"].apply(ast.literal_eval) + + # 批量插入数据 + for index, row in df.iterrows(): + insert_query = """ + INSERT INTO pipe_risk_probability + (pipeID, pipeage, risk_probability_now, x, y) + VALUES (%s, %s, %s, %s, %s) + """ + cur.execute( + insert_query, + ( + row["pipeID"], + row["pipeage"], + row["risk_probability_now"], + row["x"], # 直接传递列表 + row["y"], # 同上 + ), + ) + + conn.commit() + print("风险评估结果导入成功") + + except Exception as e: + print(f"导入时出错:{e}") + + +def pressure_sensor_placement_sensitivity( + name: str, scheme_name: str, sensor_number: int, min_diameter: int, username: str +) -> None: + """ + 基于改进灵敏度法进行压力监测点优化布置 + :param name: 数据库名称 + :param scheme_name: 监测优化布置方案名称 + :param sensor_number: 传感器数目 + :param min_diameter: 最小管径 + :param username: 用户名 + :return: + """ + sensor_location = sensitivity.get_ID( + name=name, sensor_num=sensor_number, min_diameter=min_diameter + ) + try: + conn_string = get_pgconn_string(db_name=name) + with psycopg.connect(conn_string) as conn: + with conn.cursor() as cur: + sql = """ + INSERT INTO sensor_placement (scheme_name, sensor_number, min_diameter, username, sensor_location) + VALUES (%s, %s, %s, %s, %s) + """ + + cur.execute( + sql, + ( + scheme_name, + sensor_number, + min_diameter, + username, + sensor_location, + ), + ) + conn.commit() + print("方案信息存储成功!") + except Exception as e: + print(f"存储方案信息时出错:{e}") + + +# 2025/08/21 +# 基于kmeans聚类法进行压力监测点优化布置 +def pressure_sensor_placement_kmeans( + name: str, scheme_name: str, sensor_number: int, min_diameter: int, username: str +) -> None: + """ + 基于聚类法进行压力监测点优化布置 + :param name: 数据库名称(注意,此处数据库名称也是inp文件名称,inp文件与pg库名要一样) + :param scheme_name: 监测优化布置方案名称 + :param sensor_number: 传感器数目 + :param min_diameter: 最小管径 + :param username: 用户名 + :return: + """ + # dump_inp + inp_name = f"./db_inp/{name}.db.inp" + dump_inp(name, inp_name, "2") + sensor_location = kmeans_sensor.kmeans_sensor_placement( + name=name, sensor_num=sensor_number, min_diameter=min_diameter + ) + try: + conn_string = get_pgconn_string(db_name=name) + with psycopg.connect(conn_string) as conn: + with conn.cursor() as cur: + sql = """ + INSERT INTO sensor_placement (scheme_name, sensor_number, min_diameter, username, sensor_location) + VALUES (%s, %s, %s, %s, %s) + """ + + cur.execute( + sql, + ( + scheme_name, + sensor_number, + min_diameter, + username, + sensor_location, + ), + ) + conn.commit() + print("方案信息存储成功!") + except Exception as e: + print(f"存储方案信息时出错:{e}") + + +############################################################ +# 流量监测数据清洗 ***卡尔曼滤波法*** +############################################################ +# 2025/08/21 hxyan + + +def flow_data_clean(input_csv_file: str) -> str: + """ + 读取 input_csv_path 中的每列时间序列,使用一维 Kalman 滤波平滑并用预测值替换基于 3σ 检测出的异常点。 + 保存输出为:_cleaned.xlsx(与输入同目录),并返回输出文件的绝对路径。如有同名文件存在,则覆盖。 + :param: input_csv_file: 输入的 CSV 文件明或路径 + :return: 输出文件的绝对路径 + """ + + # 提供的 input_csv_path 绝对路径,以下为 默认脚本目录下同名 CSV 文件,构建绝对路径,可根据情况修改 + script_dir = os.path.dirname(os.path.abspath(__file__)) + input_csv_path = os.path.join(script_dir, input_csv_file) + + # 检查文件是否存在 + if not os.path.exists(input_csv_path): + raise FileNotFoundError(f"指定的文件不存在: {input_csv_path}") + # 调用 Fdataclean.clean_flow_data_kf 函数进行数据清洗 + out_xlsx_path = Fdataclean.clean_flow_data_kf(input_csv_path) + print("清洗后的数据已保存到:", out_xlsx_path) + + +############################################################ +# 压力监测数据清洗 ***kmean++法*** +############################################################ +# 2025/08/21 hxyan + + +def pressure_data_clean(input_csv_file: str) -> str: + """ + 读取 input_csv_path 中的每列时间序列,使用Kmean++清洗数据。 + 保存输出为:_cleaned.xlsx(与输入同目录),并返回输出文件的绝对路径。如有同名文件存在,则覆盖。 + 原始数据在 sheet 'raw_pressure_data',处理后数据在 sheet 'cleaned_pressusre_data'。 + :param input_csv_path: 输入的 CSV 文件路径 + :return: 输出文件的绝对路径 + """ + + # 提供的 input_csv_path 绝对路径,以下为 默认脚本目录下同名 CSV 文件,构建绝对路径,可根据情况修改 + script_dir = os.path.dirname(os.path.abspath(__file__)) + input_csv_path = os.path.join(script_dir, input_csv_file) + + # 检查文件是否存在 + if not os.path.exists(input_csv_path): + raise FileNotFoundError(f"指定的文件不存在: {input_csv_path}") + # 调用 Fdataclean.clean_flow_data_kf 函数进行数据清洗 + out_xlsx_path = Pdataclean.clean_pressure_data_km(input_csv_path) + print("清洗后的数据已保存到:", out_xlsx_path) + + +if __name__ == "__main__": + # contaminant_simulation('bb_model','2024-06-24T00:00:00Z','ZBBDTZDP009034',30,1800) + # flushing_analysis('bb_model','2024-04-01T08:00:00Z',{'GSD230719205857733F8F5214FF','GSD230719205857C0AF65B6A170'},'GSD2307192058570DEDF28E4F73',0,duration=900) + # flushing_analysis('bb_model', '2024-08-26T08:00:00Z', ['GSD2307192058572E5C0E14D83E'], [0.5], 'ZBBDTZDP009410', 0, + # duration=1800) + # valve_close_analysis('bb_model','2024-04-01T08:00:00Z',['GSD2307192058576122D929EE99(L)'],duration=1800) + + # burst_analysis('bb','2024-04-01T08:00:00Z','ZBBGXSZW000001',burst_size=200,duration=1800) + # run_simulation('beibeizone','2024-04-01T08:00:00Z') + # str_dump=dump_output('h:\\OneDrive\\tjwaterserver\\temp\\beibeizone.db_no_burst.out') + # with open("out_dump.txt", "w") as f: + # f.write(str_dump) + # str_dump=dump_output('h:\\OneDrive\\tjwaterserver\\temp\\beibeizone.db_busrtID(ZBBGXSZW000001).out') + # with open("burst_out_dump.txt", "w") as f: + # f.write(str_dump) + + # # 更新inp文件,并插入history_patterns_flows + # network_update('fx0217-mass injection.inp') + + # # 更新scada_info文件 + # submit_scada_info(project_info.name, '4490') + + # 示例:scheme_name_exists + # if scheme_name_exists(name='bb', scheme_name='burst_scheme'): + # print(f"方案名已存在,请更改!") + # else: + # print(f"方案名不存在,可以使用。") + + # 示例1:burst_analysis + # burst_analysis(name='bb', modify_pattern_start_time='2025-04-17T00:00:00+08:00', + # burst_ID='GSD230112144241FA18292A84CB', burst_size=400, modify_total_duration=1800, scheme_Name='GSD230112144241FA18292A84CB_400') + + # 示例:create_user + # create_user(name=project_info.name, username='tjwater dev', password='123456') + + # # 示例:delete_user + # delete_user(name=project_info.name, username='admin_test') + + # # 示例:query_scheme_list + # result = query_scheme_list(name=project_info.name) + # print(result) + + # # 示例:delete_scheme_info + # delete_scheme_info(name=project_info.name, scheme_name='burst_scheme') + + # # 示例:upload_shp_to_pg + # # 这里的role是 电脑的用户名,服务器上是 Administrator + # upload_shp_to_pg(name=project_info.name, table_name='GIS_pipe', role='Administrator', shp_file_path='市政管线.shp') + + # # 示例:submit_risk_probability_result + # submit_risk_probability_result(name=project_info.name, result_file_path='./北碚市政管线风险评价结果.xlsx') + + # # 示例:pressure_sensor_placement_sensitivity + # pressure_sensor_placement_sensitivity(name=project_info.name, scheme_name='20250517', sensor_number=10, min_diameter=300, username='admin') + + # 示例:pressure_sensor_placement_kmeans + # pressure_sensor_placement_kmeans(name=project_info.name, scheme_name='sensor_1103', sensor_number=35, min_diameter=300, username='admin') + + # 测试:convert emitters coefficients + convert_to_local_unit("szh", 100) diff --git a/tests/test_pipeline_health_analyzer.py b/tests/test_pipeline_health_analyzer.py index 489ff31..a6413a2 100644 --- a/tests/test_pipeline_health_analyzer.py +++ b/tests/test_pipeline_health_analyzer.py @@ -3,9 +3,7 @@ from api_ex.pipeline_health_analyzer import PipelineHealthAnalyzer def test_pipeline_health_analyzer(): # 初始化分析器,假设模型文件路径为'models/rsf_model.joblib' - analyzer = PipelineHealthAnalyzer( - model_path="api_ex/model/my_survival_forest_model_quxi.joblib" - ) + analyzer = PipelineHealthAnalyzer() # 创建示例输入数据(9个样本) import pandas as pd import time