Compare commits

...

10 Commits

419 changed files with 4864 additions and 6004 deletions

7
.gitignore vendored
View File

@@ -1,7 +1,8 @@
*.pyc
.env
db_inp/ db_inp/
temp/ temp/
data/ data/
build/
*.pyc
.env
*.dump *.dump
api_ex/model/my_survival_forest_model_quxi.joblib app/algorithms/api_ex/model/my_survival_forest_model_quxi.joblib

View File

@@ -0,0 +1,28 @@
from app.algorithms.data_cleaning import flow_data_clean, pressure_data_clean
from app.algorithms.sensors import (
pressure_sensor_placement_sensitivity,
pressure_sensor_placement_kmeans,
)
from app.algorithms.simulations import (
convert_to_local_unit,
burst_analysis,
valve_close_analysis,
flushing_analysis,
contaminant_simulation,
age_analysis,
pressure_regulation,
)
__all__ = [
"flow_data_clean",
"pressure_data_clean",
"pressure_sensor_placement_sensitivity",
"pressure_sensor_placement_kmeans",
"convert_to_local_unit",
"burst_analysis",
"valve_close_analysis",
"flushing_analysis",
"contaminant_simulation",
"age_analysis",
"pressure_regulation",
]

View File

@@ -1,5 +1,5 @@
import numpy as np import numpy as np
from tjnetwork import * from app.services.tjnetwork import *
from api.s36_wda_cal import * from api.s36_wda_cal import *
# from get_real_status import * # from get_real_status import *
from datetime import datetime,timedelta from datetime import datetime,timedelta
@@ -8,7 +8,7 @@ import json
import pytz import pytz
import requests import requests
import time import time
import project_info import app.services.project_info as project_info
url_path = 'http://10.101.15.16:9000/loong' # 内网 url_path = 'http://10.101.15.16:9000/loong' # 内网
# url_path = 'http://183.64.62.100:9057/loong' # 外网 # url_path = 'http://183.64.62.100:9057/loong' # 外网

View File

@@ -11,7 +11,7 @@ from sklearn.cluster import KMeans
from wntr.epanet.toolkit import EpanetException from wntr.epanet.toolkit import EpanetException
from numpy.linalg import slogdet from numpy.linalg import slogdet
import random import random
from tjnetwork import * from app.services.tjnetwork import *
from matplotlib.lines import Line2D from matplotlib.lines import Line2D
from sklearn.cluster import SpectralClustering from sklearn.cluster import SpectralClustering
import libpysal as ps import libpysal as ps
@@ -19,7 +19,7 @@ from spopt.region import Skater
from shapely.geometry import Point from shapely.geometry import Point
import geopandas as gpd import geopandas as gpd
from sklearn.metrics import pairwise_distances from sklearn.metrics import pairwise_distances
import project_info import app.services.project_info as project_info
# 2025/03/12 # 2025/03/12
# Step1: 获取节点坐标 # Step1: 获取节点坐标

View File

@@ -11,8 +11,8 @@ from sklearn.cluster import KMeans
from wntr.epanet.toolkit import EpanetException from wntr.epanet.toolkit import EpanetException
from numpy.linalg import slogdet from numpy.linalg import slogdet
import random import random
from tjnetwork import * from app.services.tjnetwork import *
import project_info import app.services.project_info as project_info
# 2025/03/12 # 2025/03/12
# Step1: 获取节点坐标 # Step1: 获取节点坐标

View File

@@ -0,0 +1,57 @@
import os
import app.algorithms.api_ex.Fdataclean as Fdataclean
import app.algorithms.api_ex.Pdataclean as Pdataclean
############################################################
# 流量监测数据清洗 ***卡尔曼滤波法***
############################################################
# 2025/08/21 hxyan
def flow_data_clean(input_csv_file: str) -> str:
"""
读取 input_csv_path 中的每列时间序列,使用一维 Kalman 滤波平滑并用预测值替换基于 3σ 检测出的异常点。
保存输出为:<input_filename>_cleaned.xlsx与输入同目录并返回输出文件的绝对路径。如有同名文件存在则覆盖。
:param: input_csv_file: 输入的 CSV 文件明或路径
:return: 输出文件的绝对路径
"""
# 提供的 input_csv_path 绝对路径,以下为 默认脚本目录下同名 CSV 文件,构建绝对路径,可根据情况修改
script_dir = os.path.dirname(os.path.abspath(__file__))
input_csv_path = os.path.join(script_dir, input_csv_file)
# 检查文件是否存在
if not os.path.exists(input_csv_path):
raise FileNotFoundError(f"指定的文件不存在: {input_csv_path}")
# 调用 Fdataclean.clean_flow_data_kf 函数进行数据清洗
out_xlsx_path = Fdataclean.clean_flow_data_kf(input_csv_path)
print("清洗后的数据已保存到:", out_xlsx_path)
############################################################
# 压力监测数据清洗 ***kmean++法***
############################################################
# 2025/08/21 hxyan
def pressure_data_clean(input_csv_file: str) -> str:
"""
读取 input_csv_path 中的每列时间序列使用Kmean++清洗数据。
保存输出为:<input_filename>_cleaned.xlsx与输入同目录并返回输出文件的绝对路径。如有同名文件存在则覆盖。
原始数据在 sheet 'raw_pressure_data',处理后数据在 sheet 'cleaned_pressusre_data'
:param input_csv_path: 输入的 CSV 文件路径
:return: 输出文件的绝对路径
"""
# 提供的 input_csv_path 绝对路径,以下为 默认脚本目录下同名 CSV 文件,构建绝对路径,可根据情况修改
script_dir = os.path.dirname(os.path.abspath(__file__))
input_csv_path = os.path.join(script_dir, input_csv_file)
# 检查文件是否存在
if not os.path.exists(input_csv_path):
raise FileNotFoundError(f"指定的文件不存在: {input_csv_path}")
# 调用 Fdataclean.clean_flow_data_kf 函数进行数据清洗
out_xlsx_path = Pdataclean.clean_pressure_data_km(input_csv_path)
print("清洗后的数据已保存到:", out_xlsx_path)

File diff suppressed because it is too large Load Diff

91
app/algorithms/sensors.py Normal file
View File

@@ -0,0 +1,91 @@
import psycopg
import app.algorithms.api_ex.kmeans_sensor as kmeans_sensor
import app.algorithms.api_ex.sensitivity as sensitivity
from app.native.api.postgresql_info import get_pgconn_string
from app.services.tjnetwork import dump_inp
def pressure_sensor_placement_sensitivity(
name: str, scheme_name: str, sensor_number: int, min_diameter: int, username: str
) -> None:
"""
基于改进灵敏度法进行压力监测点优化布置
:param name: 数据库名称
:param scheme_name: 监测优化布置方案名称
:param sensor_number: 传感器数目
:param min_diameter: 最小管径
:param username: 用户名
:return:
"""
sensor_location = sensitivity.get_ID(
name=name, sensor_num=sensor_number, min_diameter=min_diameter
)
try:
conn_string = get_pgconn_string(db_name=name)
with psycopg.connect(conn_string) as conn:
with conn.cursor() as cur:
sql = """
INSERT INTO sensor_placement (scheme_name, sensor_number, min_diameter, username, sensor_location)
VALUES (%s, %s, %s, %s, %s)
"""
cur.execute(
sql,
(
scheme_name,
sensor_number,
min_diameter,
username,
sensor_location,
),
)
conn.commit()
print("方案信息存储成功!")
except Exception as e:
print(f"存储方案信息时出错:{e}")
# 2025/08/21
# 基于kmeans聚类法进行压力监测点优化布置
def pressure_sensor_placement_kmeans(
name: str, scheme_name: str, sensor_number: int, min_diameter: int, username: str
) -> None:
"""
基于聚类法进行压力监测点优化布置
:param name: 数据库名称注意此处数据库名称也是inp文件名称inp文件与pg库名要一样
:param scheme_name: 监测优化布置方案名称
:param sensor_number: 传感器数目
:param min_diameter: 最小管径
:param username: 用户名
:return:
"""
# dump_inp
inp_name = f"./db_inp/{name}.db.inp"
dump_inp(name, inp_name, "2")
sensor_location = kmeans_sensor.kmeans_sensor_placement(
name=name, sensor_num=sensor_number, min_diameter=min_diameter
)
try:
conn_string = get_pgconn_string(db_name=name)
with psycopg.connect(conn_string) as conn:
with conn.cursor() as cur:
sql = """
INSERT INTO sensor_placement (scheme_name, sensor_number, min_diameter, username, sensor_location)
VALUES (%s, %s, %s, %s, %s)
"""
cur.execute(
sql,
(
scheme_name,
sensor_number,
min_diameter,
username,
sensor_location,
),
)
conn.commit()
print("方案信息存储成功!")
except Exception as e:
print(f"存储方案信息时出错:{e}")

View File

@@ -0,0 +1,688 @@
import json
from datetime import datetime
from math import pi, sqrt
import pytz
import app.services.simulation as simulation
from app.algorithms.api_ex.run_simulation import run_simulation_ex, from_clock_to_seconds_2
from app.native.api.project import copy_project
from app.services.epanet.epanet import Output
from app.services.scheme_management import store_scheme_info
from app.services.tjnetwork import *
############################################################
# burst analysis 01
############################################################
def convert_to_local_unit(proj: str, emitters: float) -> float:
open_project(proj)
proj_opt = get_option(proj)
str_unit = proj_opt.get("UNITS")
if str_unit == "CMH":
return emitters * 3.6
elif str_unit == "LPS":
return emitters
elif str_unit == "CMS":
return emitters / 1000.0
elif str_unit == "MGD":
return emitters * 0.0438126
# Unknown unit: log and return original value
print(str_unit)
return emitters
def burst_analysis(
name: str,
modify_pattern_start_time: str,
burst_ID: list | str = None,
burst_size: list | float | int = None,
modify_total_duration: int = 900,
modify_fixed_pump_pattern: dict[str, list] = None,
modify_variable_pump_pattern: dict[str, list] = None,
modify_valve_opening: dict[str, float] = None,
scheme_Name: str = None,
) -> None:
"""
爆管模拟
:param name: 模型名称,数据库中对应的名字
:param modify_pattern_start_time: 模拟开始时间,格式为'2024-11-25T09:00:00+08:00'
:param burst_ID: 爆管管道的ID选取的是管道单独传入一个爆管管道可以是str或list传入多个爆管管道是用list
:param burst_size: 爆管管道破裂的孔口面积和burst_ID列表各位置的ID对应以cm*cm计算
:param modify_total_duration: 模拟总历时,秒
:param modify_fixed_pump_pattern: dict中包含多个水泵模式str为工频水泵的idlist为修改后的pattern
:param modify_variable_pump_pattern: dict中包含多个水泵模式str为变频水泵的idlist为修改后的pattern
:param modify_valve_opening: dict中包含多个阀门开启度str为阀门的idfloat为修改后的阀门开启度
:param scheme_Name: 方案名称
:return:
"""
scheme_detail: dict = {
"burst_ID": burst_ID,
"burst_size": burst_size,
"modify_total_duration": modify_total_duration,
"modify_fixed_pump_pattern": modify_fixed_pump_pattern,
"modify_variable_pump_pattern": modify_variable_pump_pattern,
"modify_valve_opening": modify_valve_opening,
}
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Analysis."
)
new_name = f"burst_Anal_{name}"
if have_project(new_name):
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Copying Database."
)
# CopyProjectEx()(name, new_name,
# ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table'])
copy_project(name + "_template", new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Opening Database."
)
open_project(new_name)
simulation.run_simulation(
name=new_name,
simulation_type="manually_temporary",
modify_pattern_start_time=modify_pattern_start_time,
)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Database Loading OK."
)
##step 1 set the emitter coefficient of end node of busrt pipe
if isinstance(burst_ID, list):
if (burst_size is not None) and (type(burst_size) is not list):
return json.dumps("Type mismatch.")
elif isinstance(burst_ID, str):
burst_ID = [burst_ID]
if burst_size is not None:
if isinstance(burst_size, float) or isinstance(burst_size, int):
burst_size = [burst_size]
else:
return json.dumps("Type mismatch.")
else:
return json.dumps("Type mismatch.")
if burst_size is None:
burst_size = [-1] * len(burst_ID)
elif len(burst_size) < len(burst_ID):
burst_size += [-1] * (len(burst_ID) - len(burst_size))
elif len(burst_size) > len(burst_ID):
# burst_size = burst_size[:len(burst_ID)]
return json.dumps("Length mismatch.")
for burst_ID_, burst_size_ in zip(burst_ID, burst_size):
pipe = get_pipe(new_name, burst_ID_)
str_start_node = pipe["node1"]
str_end_node = pipe["node2"]
d_pipe = pipe["diameter"] / 1000.0
if burst_size_ <= 0:
burst_size_ = 3.14 * d_pipe * d_pipe / 4 / 8
else:
burst_size_ = burst_size_ / 10000
emitter_coeff = (
0.65 * burst_size_ * sqrt(19.6) * 1000
) # 1/8开口面积作为coeff单位 L/S
emitter_coeff = convert_to_local_unit(new_name, emitter_coeff)
emitter_node = ""
if is_junction(new_name, str_end_node):
emitter_node = str_end_node
elif is_junction(new_name, str_start_node):
emitter_node = str_start_node
old_emitter = get_emitter(new_name, emitter_node)
if old_emitter != None:
old_emitter["coefficient"] = emitter_coeff # 爆管的emitter coefficient设置
else:
old_emitter = {"junction": emitter_node, "coefficient": emitter_coeff}
new_emitter = ChangeSet()
new_emitter.append(old_emitter)
set_emitter(new_name, new_emitter)
# step 2. run simulation
# 涉及关阀计算可能导致关阀后仍有流量改为压力驱动PDA
options = get_option(new_name)
options["DEMAND MODEL"] = OPTION_DEMAND_MODEL_PDA
options["REQUIRED PRESSURE"] = "10.0000"
cs_options = ChangeSet()
cs_options.append(options)
set_option(new_name, cs_options)
# valve_control = None
# if modify_valve_opening is not None:
# valve_control = {}
# for valve in modify_valve_opening:
# valve_control[valve] = {'status': 'CLOSED'}
# result = run_simulation_ex(new_name,'realtime', modify_pattern_start_time,
# end_datetime=modify_pattern_start_time,
# modify_total_duration=modify_total_duration,
# modify_pump_pattern=modify_pump_pattern,
# valve_control=valve_control,
# downloading_prohibition=True)
simulation.run_simulation(
name=new_name,
simulation_type="extended",
modify_pattern_start_time=modify_pattern_start_time,
modify_total_duration=modify_total_duration,
modify_fixed_pump_pattern=modify_fixed_pump_pattern,
modify_variable_pump_pattern=modify_variable_pump_pattern,
modify_valve_opening=modify_valve_opening,
scheme_Type="burst_Analysis",
scheme_Name=scheme_Name,
)
# step 3. restore the base model status
# execute_undo(name) #有疑惑
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
# return result
store_scheme_info(
name=name,
scheme_name=scheme_Name,
scheme_type="burst_Analysis",
username="admin",
scheme_start_time=modify_pattern_start_time,
scheme_detail=scheme_detail,
)
############################################################
# valve closing analysis 02
############################################################
def valve_close_analysis(
name: str,
modify_pattern_start_time: str,
modify_total_duration: int = 900,
modify_valve_opening: dict[str, float] = None,
scheme_Name: str = None,
) -> None:
"""
关阀模拟
:param name: 模型名称,数据库中对应的名字
:param modify_pattern_start_time: 模拟开始时间,格式为'2024-11-25T09:00:00+08:00'
:param modify_total_duration: 模拟总历时,秒
:param modify_valve_opening: dict中包含多个阀门开启度str为阀门的idfloat为修改后的阀门开启度
:param scheme_Name: 方案名称
:return:
"""
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Analysis."
)
new_name = f"valve_close_Anal_{name}"
if have_project(new_name):
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Copying Database."
)
# CopyProjectEx()(name, new_name,
# ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table'])
copy_project(name + "_template", new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Opening Database."
)
open_project(new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Database Loading OK."
)
# step 1. change the valves status to 'closed'
# for valve in valves:
# if not is_valve(new_name,valve):
# result='ID:{}is not a valve type'.format(valve)
# return result
# cs=ChangeSet()
# status=get_status(new_name,valve)
# status['status']='CLOSED'
# cs.append(status)
# set_status(new_name,cs)
# step 2. run simulation
# 涉及关阀计算可能导致关阀后仍有流量改为压力驱动PDA
options = get_option(new_name)
options["DEMAND MODEL"] = OPTION_DEMAND_MODEL_PDA
options["REQUIRED PRESSURE"] = "20.0000"
cs_options = ChangeSet()
cs_options.append(options)
set_option(new_name, cs_options)
# result = run_simulation_ex(new_name,'realtime', modify_pattern_start_time, modify_pattern_start_time, modify_total_duration,
# downloading_prohibition=True)
simulation.run_simulation(
name=new_name,
simulation_type="extended",
modify_pattern_start_time=modify_pattern_start_time,
modify_total_duration=modify_total_duration,
modify_valve_opening=modify_valve_opening,
scheme_Type="valve_close_Analysis",
scheme_Name=scheme_Name,
)
# step 3. restore the base model
# for valve in valves:
# execute_undo(name)
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
# return result
############################################################
# flushing analysis 03
# Pipe_Flushing_Analysis(prj_name,date_time, Valve_id_list, Drainage_Node_Id, Flushing_flow[opt], Flushing_duration[opt])->out_file:string
############################################################
def flushing_analysis(
name: str,
modify_pattern_start_time: str,
modify_total_duration: int = 900,
modify_valve_opening: dict[str, float] = None,
drainage_node_ID: str = None,
flushing_flow: float = 0,
scheme_Name: str = None,
) -> None:
"""
管道冲洗模拟
:param name: 模型名称,数据库中对应的名字
:param modify_pattern_start_time: 模拟开始时间,格式为'2024-11-25T09:00:00+08:00'
:param modify_total_duration: 模拟总历时,秒
:param modify_valve_opening: dict中包含多个阀门开启度str为阀门的idfloat为修改后的阀门开启度
:param drainage_node_ID: 冲洗排放口所在节点ID
:param flushing_flow: 冲洗水量传入参数单位为m3/h
:param scheme_Name: 方案名称
:return:
"""
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Analysis."
)
new_name = f"flushing_Anal_{name}"
if have_project(new_name):
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
# if is_project_open(name):
# close_project(name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Copying Database."
)
# CopyProjectEx()(name, new_name,
# ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table'])
copy_project(name + "_template", new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Opening Database."
)
open_project(new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Database Loading OK."
)
if not is_junction(new_name, drainage_node_ID):
return "Wrong Drainage node type"
# step 1. change the valves status to 'closed'
# for valve, valve_k in zip(valves, valves_k):
# cs=ChangeSet()
# status=get_status(new_name,valve)
# # status['status']='CLOSED'
# if valve_k == 0:
# status['status'] = 'CLOSED'
# elif valve_k < 1:
# status['status'] = 'OPEN'
# status['setting'] = 0.1036 * pow(valve_k, -3.105)
# cs.append(status)
# set_status(new_name,cs)
units = get_option(new_name)
# step 2. set the emitter coefficient of drainage node or add flush flow to the drainage node
emitter_demand = get_demand(new_name, drainage_node_ID)
cs = ChangeSet()
if flushing_flow > 0:
for r in emitter_demand["demands"]:
if units == "LPS":
r["demand"] += flushing_flow / 3.6
elif units == "CMH":
r["demand"] += flushing_flow
cs.append(emitter_demand)
set_demand(new_name, cs)
else:
pipes = get_node_links(new_name, drainage_node_ID)
flush_diameter = 50
for pipe in pipes:
d = get_pipe(new_name, pipe)["diameter"]
if flush_diameter < d:
flush_diameter = d
flush_diameter /= 1000
emitter_coeff = (
0.65 * 3.14 * (flush_diameter * flush_diameter / 4) * sqrt(19.6) * 1000
) # 全开口面积作为coeff
old_emitter = get_emitter(new_name, drainage_node_ID)
if old_emitter != None:
old_emitter["coefficient"] = emitter_coeff # 爆管的emitter coefficient设置
else:
old_emitter = {"junction": drainage_node_ID, "coefficient": emitter_coeff}
new_emitter = ChangeSet()
new_emitter.append(old_emitter)
set_emitter(new_name, new_emitter)
# step 3. run simulation
# 涉及关阀计算可能导致关阀后仍有流量改为压力驱动PDA
options = get_option(new_name)
options["DEMAND MODEL"] = OPTION_DEMAND_MODEL_PDA
options["REQUIRED PRESSURE"] = "20.0000"
cs_options = ChangeSet()
cs_options.append(options)
set_option(new_name, cs_options)
# result = run_simulation_ex(new_name,'realtime', modify_pattern_start_time, modify_pattern_start_time, modify_total_duration,
# downloading_prohibition=True)
simulation.run_simulation(
name=new_name,
simulation_type="extended",
modify_pattern_start_time=modify_pattern_start_time,
modify_total_duration=modify_total_duration,
modify_valve_opening=modify_valve_opening,
scheme_Type="flushing_Analysis",
scheme_Name=scheme_Name,
)
# step 4. restore the base model
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
# return result
############################################################
# Contaminant simulation 04
#
############################################################
def contaminant_simulation(
name: str,
modify_pattern_start_time: str, # 模拟开始时间,格式为'2024-11-25T09:00:00+08:00'
modify_total_duration: int = 900, # 模拟总历时,秒
source: str = None, # 污染源节点ID
concentration: float = None, # 污染源浓度单位mg/L
source_pattern: str = None, # 污染源时间变化模式名称
scheme_Name: str = None,
) -> None:
"""
污染模拟
:param name: 模型名称,数据库中对应的名字
:param modify_pattern_start_time: 模拟开始时间,格式为'2024-11-25T09:00:00+08:00'
:param modify_total_duration: 模拟总历时,秒
:param source: 污染源所在的节点ID
:param concentration: 污染源位置处的浓度单位mg/L即默认的污染模拟setting为concentration应改为 Set point booster
:param source_pattern: 污染源的时间变化模式若不传入则默认以恒定浓度持续模拟时间长度等于duration;
若传入,则格式为{1.0,0.5,1.1}等系数列表pattern_step模拟等于模型的hydraulic time step
:param scheme_Name: 方案名称
:return:
"""
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Analysis."
)
new_name = f"contaminant_Sim_{name}"
if have_project(new_name):
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
# if is_project_open(name):
# close_project(name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Copying Database."
)
# CopyProjectEx()(name, new_name,
# ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table'])
copy_project(name + "_template", new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Opening Database."
)
open_project(new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Database Loading OK."
)
dic_time = get_time(new_name)
dic_time["QUALITY TIMESTEP"] = "0:05:00"
cs = ChangeSet()
cs.operations.append(dic_time)
set_time(new_name, cs) # set QUALITY TIMESTEP
time_option = get_time(new_name)
hydraulic_step = time_option["HYDRAULIC TIMESTEP"]
secs = from_clock_to_seconds_2(hydraulic_step)
operation_step = 0
# step 1. set duration
if modify_total_duration == None:
modify_total_duration = secs
# step 2. set pattern
if source_pattern != None:
pt = get_pattern(new_name, source_pattern)
if pt == None:
str_response = str("cant find source_pattern")
return str_response
else:
cs_pattern = ChangeSet()
pt = {}
factors = []
tmp_duration = modify_total_duration
while tmp_duration > 0:
factors.append(1.0)
tmp_duration = tmp_duration - secs
pt["id"] = "contam_pt"
pt["factors"] = factors
cs_pattern.append(pt)
add_pattern(new_name, cs_pattern)
operation_step += 1
# step 3. set source/initial quality
# source quality
cs_source = ChangeSet()
source_schema = {
"node": source,
"s_type": SOURCE_TYPE_CONCEN,
"strength": concentration,
"pattern": pt["id"],
}
cs_source.append(source_schema)
source_node = get_source(new_name, source)
if len(source_node) == 0:
add_source(new_name, cs_source)
else:
set_source(new_name, cs_source)
dict_demand = get_demand(new_name, source)
for demands in dict_demand["demands"]:
dict_demand["demands"][dict_demand["demands"].index(demands)]["demand"] = -1
dict_demand["demands"][dict_demand["demands"].index(demands)]["pattern"] = None
cs = ChangeSet()
cs.append(dict_demand)
set_demand(new_name, cs) # set inflow node
# # initial quality
# dict_quality = get_quality(new_name, source)
# dict_quality['quality'] = concentration
# cs = ChangeSet()
# cs.append(dict_quality)
# set_quality(new_name, cs)
operation_step += 1
# step 4 set option of quality to chemical
opt = get_option(new_name)
opt["QUALITY"] = OPTION_QUALITY_CHEMICAL
cs_option = ChangeSet()
cs_option.append(opt)
set_option(new_name, cs_option)
operation_step += 1
# step 5. run simulation
# result = run_simulation_ex(new_name,'realtime', modify_pattern_start_time, modify_pattern_start_time, modify_total_duration,
# downloading_prohibition=True)
simulation.run_simulation(
name=new_name,
simulation_type="extended",
modify_pattern_start_time=modify_pattern_start_time,
modify_total_duration=modify_total_duration,
scheme_Type="contaminant_Analysis",
scheme_Name=scheme_Name,
)
# for i in range(1,operation_step):
# execute_undo(name)
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
# return result
############################################################
# age analysis 05 ***水龄模拟目前还没和实时模拟打通,不确定是否需要,先不要使用***
############################################################
def age_analysis(
name: str, modify_pattern_start_time: str, modify_total_duration: int = 900
) -> None:
"""
水龄模拟
:param name: 模型名称,数据库中对应的名字
:param modify_pattern_start_time: 模拟开始时间,格式为'2024-11-25T09:00:00+08:00'
:param modify_total_duration: 模拟总历时,秒
:return:
"""
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Analysis."
)
new_name = f"age_Anal_{name}"
if have_project(new_name):
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
# if is_project_open(name):
# close_project(name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Copying Database."
)
# CopyProjectEx()(name, new_name,
# ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table'])
copy_project(name + "_template", new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Opening Database."
)
open_project(new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Database Loading OK."
)
# step 1. run simulation
result = run_simulation_ex(
new_name,
"realtime",
modify_pattern_start_time,
modify_total_duration,
downloading_prohibition=True,
)
# step 2. restore the base model status
# execute_undo(name) #有疑惑
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
output = Output("./temp/{}.db.out".format(new_name))
# element_name = output.element_name()
# node_name = element_name['nodes']
# link_name = element_name['links']
nodes_age = []
node_result = output.node_results()
for node in node_result:
nodes_age.append(node["result"][-1]["quality"])
links_age = []
link_result = output.link_results()
for link in link_result:
links_age.append(link["result"][-1]["quality"])
age_result = {"nodes": nodes_age, "links": links_age}
# age_result = {'nodes': nodes_age, 'links': links_age, 'nodeIDs': node_name, 'linkIDs': link_name}
return json.dumps(age_result)
############################################################
# pressure regulation 06
############################################################
def pressure_regulation(
name: str,
modify_pattern_start_time: str,
modify_total_duration: int = 900,
modify_tank_initial_level: dict[str, float] = None,
modify_fixed_pump_pattern: dict[str, list] = None,
modify_variable_pump_pattern: dict[str, list] = None,
scheme_Name: str = None,
) -> None:
"""
区域调压模拟用来模拟未来15分钟内开关水泵对区域压力的影响
:param name: 模型名称,数据库中对应的名字
:param modify_pattern_start_time: 模拟开始时间,格式为'2024-11-25T09:00:00+08:00'
:param modify_total_duration: 模拟总历时,秒
:param modify_tank_initial_level: dict中包含多个水塔str为水塔的idfloat为修改后的initial_level
:param modify_fixed_pump_pattern: dict中包含多个水泵模式str为工频水泵的idlist为修改后的pattern
:param modify_variable_pump_pattern: dict中包含多个水泵模式str为变频水泵的idlist为修改后的pattern
:param scheme_Name: 模拟方案名称
:return:
"""
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Analysis."
)
new_name = f"pressure_regulation_{name}"
if have_project(new_name):
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
# if is_project_open(name):
# close_project(name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Copying Database."
)
# CopyProjectEx()(name, new_name,
# ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table'])
copy_project(name + "_template", new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Opening Database."
)
open_project(new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Database Loading OK."
)
# 全部关泵后压力计算不合理改为压力驱动PDA
options = get_option(new_name)
options["DEMAND MODEL"] = OPTION_DEMAND_MODEL_PDA
options["REQUIRED PRESSURE"] = "15.0000"
cs_options = ChangeSet()
cs_options.append(options)
set_option(new_name, cs_options)
# result = run_simulation_ex(name=new_name,
# simulation_type='realtime',
# start_datetime=start_datetime,
# duration=900,
# pump_control=pump_control,
# tank_initial_level_control=tank_initial_level_control,
# downloading_prohibition=True)
simulation.run_simulation(
name=new_name,
simulation_type="extended",
modify_pattern_start_time=modify_pattern_start_time,
modify_total_duration=modify_total_duration,
modify_tank_initial_level=modify_tank_initial_level,
modify_fixed_pump_pattern=modify_fixed_pump_pattern,
modify_variable_pump_pattern=modify_variable_pump_pattern,
scheme_Type="pressure_regulation",
scheme_Name=scheme_Name,
)
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
# return result

View File

@@ -0,0 +1,52 @@
from typing import Annotated, List, Optional
from fastapi import APIRouter, Depends, HTTPException, Header, status
from pydantic import BaseModel
router = APIRouter()
# 简易令牌验证(实际项目中应替换为 JWT/OAuth2 等)
AUTH_TOKEN = "567e33c876a2" # 预设的有效令牌
WHITE_LIST = ["/docs", "/openapi.json", "/redoc", "/api/v1/auth/login/"]
async def verify_token(authorization: Annotated[str, Header()] = None):
# 检查请求头是否存在
if not authorization:
raise HTTPException(status_code=401, detail="Authorization header missing")
# 提取 Bearer 后的令牌 (格式: Bearer <token>)
try:
token_type, token = authorization.split(" ", 1)
if token_type.lower() != "bearer":
raise ValueError
except ValueError:
raise HTTPException(
status_code=401, detail="Invalid authorization format. Use: Bearer <token>"
)
# 验证令牌
if token != AUTH_TOKEN:
raise HTTPException(status_code=403, detail="Invalid authentication token")
return True
def generate_access_token(username: str, password: str) -> str:
"""
根据用户名和密码生成JWT access token
参数:
username: 用户名
password: 密码
返回:
JWT access token字符串
"""
if username != "tjwater" or password != "tjwater@123":
raise ValueError("用户名或密码错误")
token = "567e33c876a2"
return token
@router.post("/login/")
async def login(username: str, password: str) -> str:
return generate_access_token(username, password)

View File

@@ -0,0 +1,37 @@
from fastapi import APIRouter
from app.infra.cache.redis_client import redis_client
router = APIRouter()
@router.post("/clearrediskey/")
async def fastapi_clear_redis_key(key: str):
redis_client.delete(key)
return True
@router.post("/clearrediskeys/")
async def fastapi_clear_redis_keys(keys: str):
# delete keys contains the key
matched_keys = redis_client.keys(f"*{keys}*")
if matched_keys:
redis_client.delete(*matched_keys)
return True
@router.post("/clearallredis/")
async def fastapi_clear_all_redis():
redis_client.flushdb()
return True
@router.get("/queryredis/")
async def fastapi_query_redis():
# Helper to decode bytes to str for JSON response if needed,
# but original just returned keys (which might be bytes in redis-py unless decode_responses=True)
# create_redis_client usually sets decode_responses=False by default.
# We will assume user handles bytes or we should decode.
# Original just returned redis_client.keys("*")
keys = redis_client.keys("*")
# Clean output for API
return [k.decode('utf-8') if isinstance(k, bytes) else k for k in keys]

View File

@@ -0,0 +1,31 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
@router.get("/getcontrolschema/")
async def fastapi_get_control_schema(network: str) -> dict[str, dict[str, Any]]:
return get_control_schema(network)
@router.get("/getcontrolproperties/")
async def fastapi_get_control_properties(network: str) -> dict[str, Any]:
return get_control(network)
@router.post("/setcontrolproperties/", response_model=None)
async def fastapi_set_control_properties(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_control(network, ChangeSet(props))
@router.get("/getruleschema/")
async def fastapi_get_rule_schema(network: str) -> dict[str, dict[str, Any]]:
return get_rule_schema(network)
@router.get("/getruleproperties/")
async def fastapi_get_rule_properties(network: str) -> dict[str, Any]:
return get_rule(network)
@router.post("/setruleproperties/", response_model=None)
async def fastapi_set_rule_properties(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_rule(network, ChangeSet(props))

View File

@@ -0,0 +1,42 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
@router.get("/getcurveschema")
async def fastapi_get_curve_schema(network: str) -> dict[str, dict[str, Any]]:
return get_curve_schema(network)
@router.post("/addcurve/", response_model=None)
async def fastapi_add_curve(network: str, curve: str, req: Request) -> ChangeSet:
props = await req.json()
ps = {
"id": curve,
} | props
return add_curve(network, ChangeSet(ps))
@router.post("/deletecurve/", response_model=None)
async def fastapi_delete_curve(network: str, curve: str) -> ChangeSet:
ps = {"id": curve}
return delete_curve(network, ChangeSet(ps))
@router.get("/getcurveproperties/")
async def fastapi_get_curve_properties(network: str, curve: str) -> dict[str, Any]:
return get_curve(network, curve)
@router.post("/setcurveproperties/", response_model=None)
async def fastapi_set_curve_properties(
network: str, curve: str, req: Request
) -> ChangeSet:
props = await req.json()
ps = {"id": curve} | props
return set_curve(network, ChangeSet(ps))
@router.get("/getcurves/")
async def fastapi_get_curves(network: str) -> list[str]:
return get_curves(network)
@router.get("/iscurve/")
async def fastapi_is_curve(network: str, curve: str) -> bool:
return is_curve(network, curve)

View File

@@ -0,0 +1,60 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
@router.get("/gettimeschema")
async def fastapi_get_time_schema(network: str) -> dict[str, dict[str, Any]]:
return get_time_schema(network)
@router.get("/gettimeproperties/")
async def fastapi_get_time_properties(network: str) -> dict[str, Any]:
return get_time(network)
@router.post("/settimeproperties/", response_model=None)
async def fastapi_set_time_properties(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_time(network, ChangeSet(props))
@router.get("/getenergyschema/")
async def fastapi_get_energy_schema(network: str) -> dict[str, dict[str, Any]]:
return get_energy_schema(network)
@router.get("/getenergyproperties/")
async def fastapi_get_energy_properties(network: str) -> dict[str, Any]:
return get_energy(network)
@router.post("/setenergyproperties/", response_model=None)
async def fastapi_set_energy_properties(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_energy(network, ChangeSet(props))
@router.get("/getpumpenergyschema/")
async def fastapi_get_pump_energy_schema(network: str) -> dict[str, dict[str, Any]]:
return get_pump_energy_schema(network)
@router.get("/getpumpenergyproperties//")
async def fastapi_get_pump_energy_proeprties(network: str, pump: str) -> dict[str, Any]:
return get_pump_energy(network, pump)
@router.get("/setpumpenergyproperties//", response_model=None)
async def fastapi_set_pump_energy_properties(
network: str, pump: str, req: Request
) -> ChangeSet:
props = await req.json()
ps = {"id": pump} | props
return set_pump_energy(network, ChangeSet(ps))
@router.get("/getoptionschema/")
async def fastapi_get_option_schema(network: str) -> dict[str, dict[str, Any]]:
return get_option_v3_schema(network)
@router.get("/getoptionproperties/")
async def fastapi_get_option_properties(network: str) -> dict[str, Any]:
return get_option_v3(network)
@router.post("/setoptionproperties/", response_model=None)
async def fastapi_set_option_properties(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_option_v3(network, ChangeSet(props))

View File

@@ -0,0 +1,42 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
@router.get("/getpatternschema")
async def fastapi_get_pattern_schema(network: str) -> dict[str, dict[str, Any]]:
return get_pattern_schema(network)
@router.post("/addpattern/", response_model=None)
async def fastapi_add_pattern(network: str, pattern: str, req: Request) -> ChangeSet:
props = await req.json()
ps = {
"id": pattern,
} | props
return add_pattern(network, ChangeSet(ps))
@router.post("/deletepattern/", response_model=None)
async def fastapi_delete_pattern(network: str, pattern: str) -> ChangeSet:
ps = {"id": pattern}
return delete_pattern(network, ChangeSet(ps))
@router.get("/getpatternproperties/")
async def fastapi_get_pattern_properties(network: str, pattern: str) -> dict[str, Any]:
return get_pattern(network, pattern)
@router.post("/setpatternproperties/", response_model=None)
async def fastapi_set_pattern_properties(
network: str, pattern: str, req: Request
) -> ChangeSet:
props = await req.json()
ps = {"id": pattern} | props
return set_pattern(network, ChangeSet(ps))
@router.get("/ispattern/")
async def fastapi_is_pattern(network: str, pattern: str) -> bool:
return is_pattern(network, pattern)
@router.get("/getpatterns/")
async def fastapi_get_patterns(network: str) -> list[str]:
return get_patterns(network)

View File

@@ -0,0 +1,119 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
@router.get("/getqualityschema/")
async def fastapi_get_quality_schema(network: str) -> dict[str, dict[str, Any]]:
return get_quality_schema(network)
@router.get("/getqualityproperties/")
async def fastapi_get_quality_properties(network: str, node: str) -> dict[str, Any]:
return get_quality(network, node)
@router.post("/setqualityproperties/", response_model=None)
async def fastapi_set_quality_properties(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_quality(network, ChangeSet(props))
@router.get("/getemitterschema")
async def fastapi_get_emitter_schema(network: str) -> dict[str, dict[str, Any]]:
return get_emitter_schema(network)
@router.get("/getemitterproperties/")
async def fastapi_get_emitter_properties(network: str, junction: str) -> dict[str, Any]:
return get_emitter(network, junction)
@router.post("/setemitterproperties/", response_model=None)
async def fastapi_set_emitter_properties(
network: str, junction: str, req: Request
) -> ChangeSet:
props = await req.json()
ps = {"junction": junction} | props
return set_emitter(network, ChangeSet(ps))
@router.get("/getsourcechema/")
async def fastapi_get_source_schema(network: str) -> dict[str, dict[str, Any]]:
return get_source_schema(network)
@router.get("/getsource/")
async def fastapi_get_source(network: str, node: str) -> dict[str, Any]:
return get_source(network, node)
@router.post("/setsource/", response_model=None)
async def fastapi_set_source(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_source(network, ChangeSet(props))
@router.post("/addsource/", response_model=None)
async def fastapi_add_source(network: str, req: Request) -> ChangeSet:
props = await req.json()
return add_source(network, ChangeSet(props))
@router.post("/deletesource/", response_model=None)
async def fastapi_delete_source(network: str, node: str) -> ChangeSet:
props = {"node": node}
return delete_source(network, ChangeSet(props))
@router.get("/getreactionschema/")
async def fastapi_get_reaction_schema(network: str) -> dict[str, dict[str, Any]]:
return get_reaction_schema(network)
@router.get("/getreaction/")
async def fastapi_get_reaction(network: str) -> dict[str, Any]:
return get_reaction(network)
@router.post("/setreaction/", response_model=None)
async def fastapi_set_reaction(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_reaction(network, ChangeSet(props))
@router.get("/getpipereactionschema/")
async def fastapi_get_pipe_reaction_schema(network: str) -> dict[str, dict[str, Any]]:
return get_pipe_reaction_schema(network)
@router.get("/getpipereaction/")
async def fastapi_get_pipe_reaction(network: str, pipe: str) -> dict[str, Any]:
return get_pipe_reaction(network, pipe)
@router.post("/setpipereaction/", response_model=None)
async def fastapi_set_pipe_reaction(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_pipe_reaction(network, ChangeSet(props))
@router.get("/gettankreactionschema/")
async def fastapi_get_tank_reaction_schema(network: str) -> dict[str, dict[str, Any]]:
return get_tank_reaction_schema(network)
@router.get("/gettankreaction/")
async def fastapi_get_tank_reaction(network: str, tank: str) -> dict[str, Any]:
return get_tank_reaction(network, tank)
@router.post("/settankreaction/", response_model=None)
async def fastapi_set_tank_reaction(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_tank_reaction(network, ChangeSet(props))
@router.get("/getmixingschema/")
async def fastapi_get_mixing_schema(network: str) -> dict[str, dict[str, Any]]:
return get_mixing_schema(network)
@router.get("/getmixing/")
async def fastapi_get_mixing(network: str, tank: str) -> dict[str, Any]:
return get_mixing(network, tank)
@router.post("/setmixing/", response_model=None)
async def fastapi_set_mixing(network: str, req: Request) -> ChangeSet:
props = await req.json()
return api.set_mixing(network, ChangeSet(props))
@router.post("/addmixing/", response_model=None)
async def fastapi_add_mixing(network: str, req: Request) -> ChangeSet:
props = await req.json()
return add_mixing(network, ChangeSet(props))
@router.post("/deletemixing/", response_model=None)
async def fastapi_delete_mixing(network: str, req: Request) -> ChangeSet:
props = await req.json()
return delete_mixing(network, ChangeSet(props))

View File

@@ -0,0 +1,76 @@
from fastapi import APIRouter, Request, Response
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
from fastapi.responses import PlainTextResponse
import json
router = APIRouter()
@router.get("/getvertexschema/")
async def fastapi_get_vertex_schema(network: str) -> dict[str, dict[str, Any]]:
return get_vertex_schema(network)
@router.get("/getvertexproperties/")
async def fastapi_get_vertex_properties(network: str, link: str) -> dict[str, Any]:
return get_vertex(network, link)
@router.post("/setvertexproperties/", response_model=None)
async def fastapi_set_vertex_properties(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_vertex(network, ChangeSet(props))
@router.post("/addvertex/", response_model=None)
async def fastapi_add_vertex(network: str, req: Request) -> ChangeSet:
props = await req.json()
return add_vertex(network, ChangeSet(props))
@router.post("/deletevertex/", response_model=None)
async def fastapi_delete_vertex(network: str, req: Request) -> ChangeSet:
props = await req.json()
return delete_vertex(network, ChangeSet(props))
@router.get("/getallvertexlinks/", response_class=PlainTextResponse)
async def fastapi_get_all_vertex_links(network: str) -> list[str]:
return json.dumps(get_all_vertex_links(network))
@router.get("/getallvertices/", response_class=PlainTextResponse)
async def fastapi_get_all_vertices(network: str) -> list[dict[str, Any]]:
return json.dumps(get_all_vertices(network))
@router.get("/getlabelschema/")
async def fastapi_get_label_schema(network: str) -> dict[str, dict[str, Any]]:
return get_label_schema(network)
@router.get("/getlabelproperties/")
async def fastapi_get_label_properties(
network: str, x: float, y: float
) -> dict[str, Any]:
return get_label(network, x, y)
@router.post("/setlabelproperties/", response_model=None)
async def fastapi_set_label_properties(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_label(network, ChangeSet(props))
@router.post("/addlabel/", response_model=None)
async def fastapi_add_label(network: str, req: Request) -> ChangeSet:
props = await req.json()
return add_label(network, ChangeSet(props))
@router.post("/deletelabel/", response_model=None)
async def fastapi_delete_label(network: str, req: Request) -> ChangeSet:
props = await req.json()
return delete_label(network, ChangeSet(props))
@router.get("/getbackdropschema/")
async def fastapi_get_backdrop_schema(network: str) -> dict[str, dict[str, Any]]:
return get_backdrop_schema(network)
@router.get("/getbackdropproperties/")
async def fastapi_get_backdrop_properties(network: str) -> dict[str, Any]:
return get_backdrop(network)
@router.post("/setbackdropproperties/", response_model=None)
async def fastapi_set_backdrop_properties(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_backdrop(network, ChangeSet(props))

View File

@@ -0,0 +1,388 @@
from typing import Any, List, Dict, Optional
import logging
from datetime import datetime, timedelta, timezone, time as dt_time
import msgpack
from fastapi import APIRouter
from pydantic import BaseModel
from py_linq import Enumerable
import app.infra.db.influxdb.api as influxdb_api
import app.services.time_api as time_api
from app.infra.cache.redis_client import redis_client, encode_datetime, decode_datetime
router = APIRouter()
logger = logging.getLogger(__name__)
# Basic Node/Link Latest Record Queries
@router.get("/querynodelatestrecordbyid/")
async def fastapi_query_node_latest_record_by_id(id: str) -> Any:
return influxdb_api.query_latest_record_by_ID(id, type="node")
@router.get("/querylinklatestrecordbyid/")
async def fastapi_query_link_latest_record_by_id(id: str) -> Any:
return influxdb_api.query_latest_record_by_ID(id, type="link")
@router.get("/queryscadalatestrecordbyid/")
async def fastapi_query_scada_latest_record_by_id(id: str) -> Any:
return influxdb_api.query_latest_record_by_ID(id, type="scada")
# Time-based Queries
@router.get("/queryallrecordsbytime/")
async def fastapi_query_all_records_by_time(querytime: str) -> dict[str, list]:
results: tuple = influxdb_api.query_all_records_by_time(query_time=querytime)
return {"nodes": results[0], "links": results[1]}
@router.get("/queryallrecordsbytimeproperty/")
async def fastapi_query_all_record_by_time_property(
querytime: str, type: str, property: str, bucket: str = "realtime_simulation_result"
) -> dict[str, list]:
results: tuple = influxdb_api.query_all_record_by_time_property(
query_time=querytime, type=type, property=property, bucket=bucket
)
return {"results": results}
@router.get("/queryallschemerecordsbytimeproperty/")
async def fastapi_query_all_scheme_record_by_time_property(
querytime: str,
type: str,
property: str,
schemename: str,
bucket: str = "scheme_simulation_result",
) -> dict[str, list]:
"""
查询指定方案某一时刻的所有记录,查询 'node''link' 的某一属性值
"""
results: list = influxdb_api.query_all_scheme_record_by_time_property(
query_time=querytime,
type=type,
property=property,
scheme_name=schemename,
bucket=bucket,
)
return {"results": results}
@router.get("/querysimulationrecordsbyidtime/")
async def fastapi_query_simulation_record_by_ids_time(
id: str, querytime: str, type: str, bucket: str = "realtime_simulation_result"
) -> dict[str, list]:
results: tuple = influxdb_api.query_simulation_result_by_ID_time(
ID=id, type=type, query_time=querytime, bucket=bucket
)
return {"results": results}
@router.get("/queryschemesimulationrecordsbyidtime/")
async def fastapi_query_scheme_simulation_record_by_ids_time(
scheme_name: str,
id: str,
querytime: str,
type: str,
bucket: str = "scheme_simulation_result",
) -> dict[str, list]:
results: tuple = influxdb_api.query_scheme_simulation_result_by_ID_time(
scheme_name=scheme_name, ID=id, type=type, query_time=querytime, bucket=bucket
)
return {"results": results}
# Date-based Queries with Caching
@router.get("/queryallrecordsbydate/")
async def fastapi_query_all_records_by_date(querydate: str) -> dict:
is_today_or_future = time_api.is_today_or_future(querydate)
logger.info(f"isToday or future: {is_today_or_future}")
cache_key = f"queryallrecordsbydate_{querydate}"
if not is_today_or_future:
data = redis_client.get(cache_key)
if data:
results = msgpack.unpackb(data, object_hook=decode_datetime)
logger.info("return from cache redis")
return results
logger.info("query from influxdb")
nodes_links: tuple = influxdb_api.query_all_records_by_date(query_date=querydate)
results = {"nodes": nodes_links[0], "links": nodes_links[1]}
if not is_today_or_future:
logger.info("save to cache redis")
redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime))
logger.info("return results")
return results
@router.get("/queryallrecordsbytimerange/")
async def fastapi_query_all_records_by_time_range(
starttime: str, endtime: str
) -> dict[str, list]:
cache_key = f"queryallrecordsbytimerange_{starttime}_{endtime}"
if not time_api.is_today_or_future(starttime):
data = redis_client.get(cache_key)
if data:
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
return loaded_dict
nodes_links: tuple = influxdb_api.query_all_records_by_time_range(
starttime=starttime, endtime=endtime
)
results = {"nodes": nodes_links[0], "links": nodes_links[1]}
if not time_api.is_today_or_future(starttime):
redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime))
return results
@router.get("/queryallrecordsbydatewithtype/")
async def fastapi_query_all_records_by_date_with_type(
querydate: str, querytype: str
) -> list:
cache_key = f"queryallrecordsbydatewithtype_{querydate}_{querytype}"
data = redis_client.get(cache_key)
if data:
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
return loaded_dict
results = influxdb_api.query_all_records_by_date_with_type(
query_date=querydate, query_type=querytype
)
packed = msgpack.packb(results, default=encode_datetime)
redis_client.set(cache_key, packed)
return results
@router.get("/queryallrecordsbyidsdatetype/")
async def fastapi_query_all_records_by_ids_date_type(
ids: str, querydate: str, querytype: str
) -> list:
cache_key = f"queryallrecordsbydatewithtype_{querydate}_{querytype}"
data = redis_client.get(cache_key)
results = []
if data:
results = msgpack.unpackb(data, object_hook=decode_datetime)
else:
results = influxdb_api.query_all_records_by_date_with_type(
query_date=querydate, query_type=querytype
)
packed = msgpack.packb(results, default=encode_datetime)
redis_client.set(cache_key, packed)
query_ids = ids.split(",")
# Using Enumerable from py_linq as in original code
e_results = Enumerable(results)
lst_results = e_results.where(lambda x: x["ID"] in query_ids).to_list()
return lst_results
@router.get("/queryallrecordsbydateproperty/")
async def fastapi_query_all_records_by_date_property(
querydate: str, querytype: str, property: str
) -> list[dict]:
cache_key = f"queryallrecordsbydateproperty_{querydate}_{querytype}_{property}"
data = redis_client.get(cache_key)
if data:
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
return loaded_dict
result_dict = influxdb_api.query_all_record_by_date_property(
query_date=querydate, type=querytype, property=property
)
packed = msgpack.packb(result_dict, default=encode_datetime)
redis_client.set(cache_key, packed)
return result_dict
# Curve Queries
@router.get("/querynodecurvebyidpropertydaterange/")
async def fastapi_query_node_curve_by_id_property_daterange(
id: str, prop: str, startdate: str, enddate: str
):
return influxdb_api.query_curve_by_ID_property_daterange(
id, type="node", property=prop, start_date=startdate, end_date=enddate
)
@router.get("/querylinkcurvebyidpropertydaterange/")
async def fastapi_query_link_curve_by_id_property_daterange(
id: str, prop: str, startdate: str, enddate: str
):
return influxdb_api.query_curve_by_ID_property_daterange(
id, type="link", property=prop, start_date=startdate, end_date=enddate
)
# SCADA Data Queries
@router.get("/queryscadadatabydeviceidandtime/")
async def fastapi_query_scada_data_by_device_id_and_time(ids: str, querytime: str):
query_ids = ids.split(",")
logger.info(querytime)
return influxdb_api.query_SCADA_data_by_device_ID_and_time(
query_ids_list=query_ids, query_time=querytime
)
@router.get("/queryscadadatabydeviceidandtimerange/")
async def fastapi_query_scada_data_by_device_id_and_time_range(
ids: str, starttime: str, endtime: str
):
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
query_ids = ids.split(",")
return influxdb_api.query_SCADA_data_by_device_ID_and_timerange(
query_ids_list=query_ids, start_time=starttime, end_time=endtime
)
@router.get("/queryfillingscadadatabydeviceidandtimerange/")
async def fastapi_query_filling_scada_data_by_device_id_and_time_range(
ids: str, starttime: str, endtime: str
):
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
query_ids = ids.split(",")
return influxdb_api.query_filling_SCADA_data_by_device_ID_and_timerange(
query_ids_list=query_ids, start_time=starttime, end_time=endtime
)
@router.get("/querycleaningscadadatabydeviceidandtimerange/")
async def fastapi_query_cleaning_scada_data_by_device_id_and_time_range(
ids: str, starttime: str, endtime: str
):
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
query_ids = ids.split(",")
return influxdb_api.query_cleaning_SCADA_data_by_device_ID_and_timerange(
query_ids_list=query_ids, start_time=starttime, end_time=endtime
)
@router.get("/querysimulationscadadatabydeviceidandtimerange/")
async def fastapi_query_simulation_scada_data_by_device_id_and_time_range(
ids: str, starttime: str, endtime: str
):
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
query_ids = ids.split(",")
return influxdb_api.query_simulation_SCADA_data_by_device_ID_and_timerange(
query_ids_list=query_ids, start_time=starttime, end_time=endtime
)
@router.get("/querycleanedscadadatabydeviceidandtimerange/")
async def fastapi_query_cleaned_scada_data_by_device_id_and_time_range(
ids: str, starttime: str, endtime: str
):
print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}")
query_ids = ids.split(",")
return influxdb_api.query_cleaned_SCADA_data_by_device_ID_and_timerange(
query_ids_list=query_ids, start_time=starttime, end_time=endtime
)
@router.get("/queryscadadatabydeviceidanddate/")
async def fastapi_query_scada_data_by_device_id_and_date(ids: str, querydate: str):
query_ids = ids.split(",")
return influxdb_api.query_SCADA_data_by_device_ID_and_date(
query_ids_list=query_ids, query_date=querydate
)
@router.get("/queryallscadarecordsbydate/")
async def fastapi_query_all_scada_records_by_date(querydate: str):
is_today_or_future = time_api.is_today_or_future(querydate)
logger.info(f"isToday or future: {is_today_or_future}")
cache_key = f"queryallscadarecordsbydate_{querydate}"
if not is_today_or_future:
data = redis_client.get(cache_key)
if data:
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
logger.info("return from cache redis")
return loaded_dict
logger.info("query from influxdb")
result_dict = influxdb_api.query_all_SCADA_records_by_date(query_date=querydate)
if not is_today_or_future:
logger.info("save to cache redis")
packed = msgpack.packb(result_dict, default=encode_datetime)
redis_client.set(cache_key, packed)
logger.info("return results")
return result_dict
@router.get("/queryallschemeallrecords/")
async def fastapi_query_all_scheme_all_records(
schemetype: str, schemename: str, querydate: str
) -> tuple:
cache_key = f"queryallschemeallrecords_{schemetype}_{schemename}_{querydate}"
data = redis_client.get(cache_key)
if data:
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
return loaded_dict
results = influxdb_api.query_scheme_all_record(
scheme_Type=schemetype, scheme_Name=schemename, query_date=querydate
)
packed = msgpack.packb(results, default=encode_datetime)
redis_client.set(cache_key, packed)
return results
@router.get("/queryschemeallrecordsproperty/")
async def fastapi_query_all_scheme_all_records_property(
schemetype: str, schemename: str, querydate: str, querytype: str, queryproperty: str
) -> Optional[List]:
cache_key = f"queryallschemeallrecords_{schemetype}_{schemename}_{querydate}"
data = redis_client.get(cache_key)
all_results = None
if data:
all_results = msgpack.unpackb(data, object_hook=decode_datetime)
else:
all_results = influxdb_api.query_scheme_all_record(
scheme_Type=schemetype, scheme_Name=schemename, query_date=querydate
)
packed = msgpack.packb(all_results, default=encode_datetime)
redis_client.set(cache_key, packed)
results = None
if querytype == "node":
results = all_results[0]
elif querytype == "link":
results = all_results[1]
return results
@router.get("/queryinfluxdbbuckets/")
async def fastapi_query_influxdb_buckets():
return influxdb_api.query_buckets()
@router.get("/queryinfluxdbbucketmeasurements/")
async def fastapi_query_influxdb_bucket_measurements(bucket: str):
return influxdb_api.query_measurements(bucket=bucket)
############################################################
# download history data
############################################################
class Download_History_Data_Manually(BaseModel):
"""
download_date样式如 datetime(2025, 5, 4)
"""
download_date: datetime
@router.post("/download_history_data_manually/")
async def fastapi_download_history_data_manually(
data: Download_History_Data_Manually,
) -> None:
item = data.dict()
tz = timezone(timedelta(hours=8))
begin_dt = datetime.combine(item.get("download_date").date(), dt_time.min).replace(
tzinfo=tz
)
end_dt = datetime.combine(item.get("download_date").date(), dt_time(23, 59, 59)).replace(
tzinfo=tz
)
begin_time = begin_dt.isoformat()
end_time = end_dt.isoformat()
influxdb_api.download_history_data_manually(
begin_time=begin_time, end_time=end_time
)

View File

@@ -0,0 +1,31 @@
from typing import List, Any
from fastapi import APIRouter, Request, HTTPException
from app.native.api import ChangeSet
from app.services.tjnetwork import (
get_all_extension_data_keys,
get_all_extension_data,
get_extension_data,
set_extension_data
)
router = APIRouter()
@router.get("/getallextensiondatakeys/")
async def get_all_extension_data_keys_endpoint(network: str) -> list[str]:
return get_all_extension_data_keys(network)
@router.get("/getallextensiondata/")
async def get_all_extension_data_endpoint(network: str) -> dict[str, Any]:
return get_all_extension_data(network)
@router.get("/getextensiondata/")
async def get_extension_data_endpoint(network: str, key: str) -> str | None:
return get_extension_data(network, key)
@router.post("/setextensiondata", response_model=None)
async def set_extension_data_endpoint(network: str, req: Request) -> ChangeSet:
props = await req.json()
print(props)
cs = set_extension_data(network, ChangeSet(props))
print(cs.operations[0])
return cs

View File

@@ -0,0 +1,55 @@
from typing import Any
import random
from fastapi import APIRouter
from fastapi.responses import JSONResponse
from fastapi import status
from pydantic import BaseModel
from app.services.tjnetwork import (
get_all_sensor_placements,
get_all_burst_locate_results,
)
router = APIRouter()
@router.get("/getjson/")
async def fastapi_get_json():
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content={
"code": 400,
"message": "this is message",
"data": 123,
},
)
@router.get("/getallsensorplacements/")
async def fastapi_get_all_sensor_placements(network: str) -> list[dict[Any, Any]]:
return get_all_sensor_placements(network)
@router.get("/getallburstlocateresults/")
async def fastapi_get_all_burst_locate_results(network: str) -> list[dict[Any, Any]]:
return get_all_burst_locate_results(network)
class Item(BaseModel):
str_info: str
@router.post("/test_dict/")
async def fastapi_test_dict(data: Item) -> dict[str, str]:
item = data.dict()
return item
@router.get("/getrealtimedata/")
async def fastapi_get_realtimedata():
data = [random.randint(0, 100) for _ in range(100)]
return data
@router.get("/getsimulationresult/")
async def fastapi_get_simulationresult():
data = [random.randint(0, 100) for _ in range(100)]
return data

View File

@@ -0,0 +1,55 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
############################################################
# demand 9.[DEMANDS]
############################################################
@router.get("/getdemandschema")
async def fastapi_get_demand_schema(network: str) -> dict[str, dict[str, Any]]:
return get_demand_schema(network)
@router.get("/getdemandproperties/")
async def fastapi_get_demand_properties(network: str, junction: str) -> dict[str, Any]:
return get_demand(network, junction)
# example: set_demand(p, ChangeSet({'junction': 'j1', 'demands': [{'demand': 10.0, 'pattern': None, 'category': 'x'}, {'demand': 20.0, 'pattern': None, 'category': None}]}))
@router.post("/setdemandproperties/", response_model=None)
async def fastapi_set_demand_properties(
network: str, junction: str, req: Request
) -> ChangeSet:
props = await req.json()
ps = {"junction": junction} | props
return set_demand(network, ChangeSet(ps))
############################################################
# water distribution 36.[Water Distribution]
############################################################
@router.get("/calculatedemandtonodes/")
async def fastapi_calculate_demand_to_nodes(
network: str, req: Request
) -> dict[str, float]:
props = await req.json()
demand = props["demand"]
nodes = props["nodes"]
return calculate_demand_to_nodes(network, demand, nodes)
@router.get("/calculatedemandtoregion/")
async def fastapi_calculate_demand_to_region(
network: str, req: Request
) -> dict[str, float]:
props = await req.json()
demand = props["demand"]
region = props["region"]
return calculate_demand_to_region(network, demand, region)
@router.get("/calculatedemandtonetwork/")
async def fastapi_calculate_demand_to_network(
network: str, demand: float
) -> dict[str, float]:
return calculate_demand_to_network(network, demand)

View File

@@ -0,0 +1,162 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
############################################################
# type
############################################################
@router.get("/isnode/")
async def fastapi_is_node(network: str, node: str) -> bool:
return is_node(network, node)
@router.get("/isjunction/")
async def fastapi_is_junction(network: str, node: str) -> bool:
return is_junction(network, node)
@router.get("/isreservoir/")
async def fastapi_is_reservoir(network: str, node: str) -> bool:
return is_reservoir(network, node)
@router.get("/istank/")
async def fastapi_is_tank(network: str, node: str) -> bool:
return is_tank(network, node)
@router.get("/islink/")
async def fastapi_is_link(network: str, link: str) -> bool:
return is_link(network, link)
@router.get("/ispipe/")
async def fastapi_is_pipe(network: str, link: str) -> bool:
return is_pipe(network, link)
@router.get("/ispump/")
async def fastapi_is_pump(network: str, link: str) -> bool:
return is_pump(network, link)
@router.get("/isvalve/")
async def fastapi_is_valve(network: str, link: str) -> bool:
return is_valve(network, link)
@router.get("/getnodetype/")
async def fastapi_get_node_type(network: str, node: str) -> str:
return get_node_type(network, node)
@router.get("/getlinktype/")
async def fastapi_get_link_type(network: str, link: str) -> str:
return get_link_type(network, link)
@router.get("/getelementtype/")
async def fastapi_get_element_type(network: str, element: str) -> str:
return get_element_type(network, element)
@router.get("/getelementtypevalue/")
async def fastapi_get_element_type_value(network: str, element: str) -> int:
return get_element_type_value(network, element)
@router.get("/getnodes/")
async def fastapi_get_nodes(network: str) -> list[str]:
return get_nodes(network)
@router.get("/getlinks/")
async def fastapi_get_links(network: str) -> list[str]:
return get_links(network)
@router.get("/getnodelinks/")
def get_node_links_endpoint(network: str, node: str) -> list[str]:
return get_node_links(network, node)
############################################################
# Node & Link properties
############################################################
@router.get("/getnodeproperties/")
async def fast_get_node_properties(network: str, node: str) -> dict[str, Any]:
return get_node_properties(network, node)
@router.get("/getlinkproperties/")
async def fast_get_link_properties(network: str, link: str) -> dict[str, Any]:
return get_link_properties(network, link)
@router.get("/getscadaproperties/")
async def fast_get_scada_properties(network: str, scada: str) -> dict[str, Any]:
return get_scada_info(network, scada)
@router.get("/getallscadaproperties/")
async def fast_get_all_scada_properties(network: str) -> list[dict[str, Any]]:
return get_all_scada_info(network)
@router.get("/getelementpropertieswithtype/")
async def fast_get_element_properties_with_type(
network: str, elementtype: str, element: str
) -> dict[str, Any]:
return get_element_properties_with_type(network, elementtype, element)
@router.get("/getelementproperties/")
async def fast_get_element_properties(network: str, element: str) -> dict[str, Any]:
return get_element_properties(network, element)
############################################################
# title 1.[TITLE]
############################################################
@router.get("/gettitleschema/")
async def fast_get_title_schema(network: str) -> dict[str, dict[str, Any]]:
return get_title_schema(network)
@router.get("/gettitle/")
async def fast_get_title(network: str) -> dict[str, Any]:
return get_title(network)
@router.get("/settitle/", response_model=None)
async def fastapi_set_title(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_title(network, ChangeSet(props))
############################################################
# status 10.[STATUS]
############################################################
@router.get("/getstatusschema")
async def fastapi_get_status_schema(network: str) -> dict[str, dict[str, Any]]:
return get_status_schema(network)
@router.get("/getstatus/")
async def fastapi_get_status(network: str, link: str) -> dict[str, Any]:
return get_status(network, link)
@router.post("/setstatus/", response_model=None)
async def fastapi_set_status_properties(
network: str, link: str, req: Request
) -> ChangeSet:
props = await req.json()
ps = {"link": link} | props
return set_status(network, ChangeSet(ps))
############################################################
# General Deletion
############################################################
@router.post("/deletenode/", response_model=None)
async def fastapi_delete_node(network: str, node: str) -> ChangeSet:
ps = {"id": node}
if is_junction(network, node):
return delete_junction(network, ChangeSet(ps))
elif is_reservoir(network, node):
return delete_reservoir(network, ChangeSet(ps))
elif is_tank(network, node):
return delete_tank(network, ChangeSet(ps))
return ChangeSet() # Should probably raise error or return empty
@router.post("/deletelink/", response_model=None)
async def fastapi_delete_link(network: str, link: str) -> ChangeSet:
ps = {"id": link}
if is_pipe(network, link):
return delete_pipe(network, ChangeSet(ps))
elif is_pump(network, link):
return delete_pump(network, ChangeSet(ps))
elif is_valve(network, link):
return delete_valve(network, ChangeSet(ps))
return ChangeSet()

View File

@@ -0,0 +1,80 @@
from fastapi import APIRouter, Request, Depends
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
from app.api.v1.endpoints.auth import verify_token
from app.infra.cache.redis_client import redis_client, encode_datetime, decode_datetime
import msgpack
router = APIRouter()
############################################################
# coord 24.[COORDINATES]
############################################################
@router.get("/getcoordschema/")
async def fastapi_get_coord_schema(network: str) -> dict[str, dict[str, Any]]:
return get_coord_schema(network)
@router.get("/getcoord/")
async def fastapi_get_coord(network: str, node: str) -> dict[str, Any]:
return get_coord(network, node)
# example: set_coord(p, ChangeSet({'node': 'j1', 'x': 1.0, 'y': 2.0}))
@router.post("/setcoord/", response_model=None)
async def fastapi_set_coord(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_coord(network, ChangeSet(props))
@router.get("/getnodecoord/")
async def fastapi_get_node_coord(network: str, node: str) -> dict[str, float] | None:
return get_node_coord(network, node)
# Additional geometry queries found in main.py logic (implicit or explicit)
@router.get("/getnetworkinextent/")
async def fastapi_get_network_in_extent(
network: str, x1: float, y1: float, x2: float, y2: float
) -> dict[str, Any]:
return get_network_in_extent(network, x1, y1, x2, y2)
@router.get("/getnetworkgeometries/", dependencies=[Depends(verify_token)])
async def fastapi_get_network_geometries(network: str) -> dict[str, Any] | None:
cache_key = f"getnetworkgeometries_{network}"
data = redis_client.get(cache_key)
if data:
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
return loaded_dict
coords = get_network_node_coords(network)
nodes = []
for node_id, coord in coords.items():
nodes.append(f"{node_id}:{coord['type']}:{coord['x']}:{coord['y']}")
links = get_network_link_nodes(network)
scadas = get_all_scada_info(network)
results = {"nodes": nodes, "links": links, "scadas": scadas}
redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime))
return results
@router.get("/getmajornodecoords/")
async def fastapi_get_majornode_coords(
network: str, diameter: int
) -> dict[str, dict[str, float]]:
return get_major_node_coords(network, diameter)
@router.get("/getmajorpipenodes/")
async def fastapi_get_major_pipe_nodes(network: str, diameter: int) -> list[str] | None:
return get_major_pipe_nodes(network, diameter)
@router.get("/getnetworklinknodes/")
async def fastapi_get_network_link_nodes(network: str) -> list[str] | None:
return get_network_link_nodes(network)
@router.get("/getallcoords/")
async def fastapi_get_all_coords(network: str) -> list[Any]:
return get_all_coords(network)
@router.get("/projectcoordinates/")
async def fastapi_project_coordinates(
network: str, from_epsg: int, to_epsg: int
) -> ChangeSet:
return project_coordinates(network, from_epsg, to_epsg)

View File

@@ -0,0 +1,111 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
@router.get("/getjunctionschema")
async def fast_get_junction_schema(network: str) -> dict[str, dict[str, Any]]:
return get_junction_schema(network)
@router.post("/addjunction/", response_model=None)
async def fastapi_add_junction(
network: str, junction: str, x: float, y: float, z: float
) -> ChangeSet:
ps = {"id": junction, "x": x, "y": y, "elevation": z}
return add_junction(network, ChangeSet(ps))
@router.post("/deletejunction/", response_model=None)
async def fastapi_delete_junction(network: str, junction: str) -> ChangeSet:
ps = {"id": junction}
return delete_junction(network, ChangeSet(ps))
@router.get("/getjunctionelevation/")
async def fastapi_get_junction_elevation(network: str, junction: str) -> float:
ps = get_junction(network, junction)
return ps["elevation"]
@router.get("/getjunctionx/")
async def fastapi_get_junction_x(network: str, junction: str) -> float:
ps = get_junction(network, junction)
return ps["x"]
@router.get("/getjunctiony/")
async def fastapi_get_junction_y(network: str, junction: str) -> float:
ps = get_junction(network, junction)
return ps["y"]
@router.get("/getjunctioncoord/")
async def fastapi_get_junction_coord(network: str, junction: str) -> dict[str, float]:
ps = get_junction(network, junction)
coord = {"x": ps["x"], "y": ps["y"]}
return coord
@router.get("/getjunctiondemand/")
async def fastapi_get_junction_demand(network: str, junction: str) -> float:
ps = get_junction(network, junction)
return ps["demand"]
@router.get("/getjunctionpattern/")
async def fastapi_get_junction_pattern(network: str, junction: str) -> str:
ps = get_junction(network, junction)
return ps["pattern"]
@router.post("/setjunctionelevation/", response_model=None)
async def fastapi_set_junction_elevation(
network: str, junction: str, elevation: float
) -> ChangeSet:
ps = {"id": junction, "elevation": elevation}
return set_junction(network, ChangeSet(ps))
@router.post("/setjunctionx/", response_model=None)
async def fastapi_set_junction_x(network: str, junction: str, x: float) -> ChangeSet:
ps = {"id": junction, "x": x}
return set_junction(network, ChangeSet(ps))
@router.post("/setjunctiony/", response_model=None)
async def fastapi_set_junction_y(network: str, junction: str, y: float) -> ChangeSet:
ps = {"id": junction, "y": y}
return set_junction(network, ChangeSet(ps))
@router.post("/setjunctioncoord/", response_model=None)
async def fastapi_set_junction_coord(
network: str, junction: str, x: float, y: float
) -> ChangeSet:
ps = {"id": junction, "x": x, "y": y}
return set_junction(network, ChangeSet(ps))
@router.post("/setjunctiondemand/", response_model=None)
async def fastapi_set_junction_demand(
network: str, junction: str, demand: float
) -> ChangeSet:
ps = {"id": junction, "demand": demand}
return set_junction(network, ChangeSet(ps))
@router.post("/setjunctionpattern/", response_model=None)
async def fastapi_set_junction_pattern(
network: str, junction: str, pattern: str
) -> ChangeSet:
ps = {"id": junction, "pattern": pattern}
return set_junction(network, ChangeSet(ps))
@router.get("/getjunctionproperties/")
async def fastapi_get_junction_properties(
network: str, junction: str
) -> dict[str, Any]:
return get_junction(network, junction)
@router.get("/getalljunctionproperties/")
async def fastapi_get_all_junction_properties(network: str) -> list[dict[str, Any]]:
# 缓存查询结果提高性能
# global redis_client # Redis logic removed for clean split, can be re-added if needed or imported
results = get_all_junctions(network)
return results
@router.post("/setjunctionproperties/", response_model=None)
async def fastapi_set_junction_properties(
network: str, junction: str, req: Request
) -> ChangeSet:
props = await req.json()
ps = {"id": junction} | props
return set_junction(network, ChangeSet(ps))

View File

@@ -0,0 +1,133 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
@router.get("/getpipeschema")
async def fastapi_get_pipe_schema(network: str) -> dict[str, dict[str, Any]]:
return get_pipe_schema(network)
@router.post("/addpipe/", response_model=None)
async def fastapi_add_pipe(
network: str,
pipe: str,
node1: str,
node2: str,
length: float = 0,
diameter: float = 0,
roughness: float = 0,
minor_loss: float = 0,
status: str = PIPE_STATUS_OPEN,
) -> ChangeSet:
ps = {
"id": pipe,
"node1": node1,
"node2": node2,
"length": length,
"diameter": diameter,
"roughness": roughness,
"minor_loss": minor_loss,
"status": status,
}
return add_pipe(network, ChangeSet(ps))
@router.post("/deletepipe/", response_model=None)
async def fastapi_delete_pipe(network: str, pipe: str) -> ChangeSet:
ps = {"id": pipe}
return delete_pipe(network, ChangeSet(ps))
@router.get("/getpipenode1/")
async def fastapi_get_pipe_node1(network: str, pipe: str) -> str | None:
ps = get_pipe(network, pipe)
return ps["node1"]
@router.get("/getpipenode2/")
async def fastapi_get_pipe_node2(network: str, pipe: str) -> str | None:
ps = get_pipe(network, pipe)
return ps["node2"]
@router.get("/getpipelength/")
async def fastapi_get_pipe_length(network: str, pipe: str) -> float | None:
ps = get_pipe(network, pipe)
return ps["length"]
@router.get("/getpipediameter/")
async def fastapi_get_pipe_diameter(network: str, pipe: str) -> float | None:
ps = get_pipe(network, pipe)
return ps["diameter"]
@router.get("/getpiperoughness/")
async def fastapi_get_pipe_roughness(network: str, pipe: str) -> float | None:
ps = get_pipe(network, pipe)
return ps["roughness"]
@router.get("/getpipeminorloss/")
async def fastapi_get_pipe_minor_loss(network: str, pipe: str) -> float | None:
ps = get_pipe(network, pipe)
return ps["minor_loss"]
@router.get("/getpipestatus/")
async def fastapi_get_pipe_status(network: str, pipe: str) -> str | None:
ps = get_pipe(network, pipe)
return ps["status"]
@router.post("/setpipenode1/", response_model=None)
async def fastapi_set_pipe_node1(network: str, pipe: str, node1: str) -> ChangeSet:
ps = {"id": pipe, "node1": node1}
return set_pipe(network, ChangeSet(ps))
@router.post("/setpipenode2/", response_model=None)
async def fastapi_set_pipe_node2(network: str, pipe: str, node2: str) -> ChangeSet:
ps = {"id": pipe, "node2": node2}
return set_pipe(network, ChangeSet(ps))
@router.post("/setpipelength/", response_model=None)
async def fastapi_set_pipe_length(network: str, pipe: str, length: float) -> ChangeSet:
ps = {"id": pipe, "length": length}
return set_pipe(network, ChangeSet(ps))
@router.post("/setpipediameter/", response_model=None)
async def fastapi_set_pipe_diameter(
network: str, pipe: str, diameter: float
) -> ChangeSet:
ps = {"id": pipe, "diameter": diameter}
return set_pipe(network, ChangeSet(ps))
@router.post("/setpiperoughness/", response_model=None)
async def fastapi_set_pipe_roughness(
network: str, pipe: str, roughness: float
) -> ChangeSet:
ps = {"id": pipe, "roughness": roughness}
return set_pipe(network, ChangeSet(ps))
@router.post("/setpipeminorloss/", response_model=None)
async def fastapi_set_pipe_minor_loss(
network: str, pipe: str, minor_loss: float
) -> ChangeSet:
ps = {"id": pipe, "minor_loss": minor_loss}
return set_pipe(network, ChangeSet(ps))
@router.post("/setpipestatus/", response_model=None)
async def fastapi_set_pipe_status(network: str, pipe: str, status: str) -> ChangeSet:
ps = {"id": pipe, "status": status}
return set_pipe(network, ChangeSet(ps))
@router.get("/getpipeproperties/")
async def fastapi_get_pipe_properties(network: str, pipe: str) -> dict[str, Any]:
return get_pipe(network, pipe)
@router.get("/getallpipeproperties/")
async def fastapi_get_all_pipe_properties(network: str) -> list[dict[str, Any]]:
# 缓存查询结果提高性能
# global redis_client
results = get_all_pipes(network)
return results
@router.post("/setpipeproperties/", response_model=None)
async def fastapi_set_pipe_properties(
network: str, pipe: str, req: Request
) -> ChangeSet:
props = await req.json()
ps = {"id": pipe} | props
return set_pipe(network, ChangeSet(ps))

View File

@@ -0,0 +1,60 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
@router.get("/getpumpschema")
async def fastapi_get_pump_schema(network: str) -> dict[str, dict[str, Any]]:
return get_pump_schema(network)
@router.post("/addpump/", response_model=None)
async def fastapi_add_pump(
network: str, pump: str, node1: str, node2: str, power: float = 0.0
) -> ChangeSet:
ps = {"id": pump, "node1": node1, "node2": node2, "power": power}
return add_pump(network, ChangeSet(ps))
@router.post("/deletepump/", response_model=None)
async def fastapi_delete_pump(network: str, pump: str) -> ChangeSet:
ps = {"id": pump}
return delete_pump(network, ChangeSet(ps))
@router.get("/getpumpnode1/")
async def fastapi_get_pump_node1(network: str, pump: str) -> str | None:
ps = get_pump(network, pump)
return ps["node1"]
@router.get("/getpumpnode2/")
async def fastapi_get_pump_node2(network: str, pump: str) -> str | None:
ps = get_pump(network, pump)
return ps["node2"]
@router.post("/setpumpnode1/", response_model=None)
async def fastapi_set_pump_node1(network: str, pump: str, node1: str) -> ChangeSet:
ps = {"id": pump, "node1": node1}
return set_pump(network, ChangeSet(ps))
@router.post("/setpumpnode2/", response_model=None)
async def fastapi_set_pump_node2(network: str, pump: str, node2: str) -> ChangeSet:
ps = {"id": pump, "node2": node2}
return set_pump(network, ChangeSet(ps))
@router.get("/getpumpproperties/")
async def fastapi_get_pump_properties(network: str, pump: str) -> dict[str, Any]:
return get_pump(network, pump)
@router.get("/getallpumpproperties/")
async def fastapi_get_all_pump_properties(network: str) -> list[dict[str, Any]]:
# 缓存查询结果提高性能
# global redis_client
results = get_all_pumps(network)
return results
@router.post("/setpumpproperties/", response_model=None)
async def fastapi_set_pump_properties(
network: str, pump: str, req: Request
) -> ChangeSet:
props = await req.json()
ps = {"id": pump} | props
return set_pump(network, ChangeSet(ps))

View File

@@ -0,0 +1,245 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
############################################################
# region 32
############################################################
@router.get("/calculateregion/")
async def fastapi_calculate_region(network: str, time_index: int) -> dict[str, Any]:
return calculate_region(network, time_index)
@router.get("/getregionschema/")
async def fastapi_get_region_schema(network: str) -> dict[str, dict[str, Any]]:
return get_region_schema(network)
@router.get("/getregion/")
async def fastapi_get_region(network: str, id: str) -> dict[str, Any]:
return get_region(network, id)
@router.post("/setregion/", response_model=None)
async def fastapi_set_region(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_region(network, ChangeSet(props))
@router.post("/addregion/", response_model=None)
async def fastapi_add_region(network: str, req: Request) -> ChangeSet:
props = await req.json()
return add_region(network, ChangeSet(props))
@router.post("/deleteregion/", response_model=None)
async def fastapi_delete_region(network: str, req: Request) -> ChangeSet:
props = await req.json()
return delete_region(network, ChangeSet(props))
@router.get("/getallregions/")
async def fastapi_get_all_regions(network: str) -> list[dict[str, Any]]:
return get_all_regions(network)
@router.post("/generateregion/", response_model=None)
async def fastapi_generate_region(
network: str, inflate_delta: float
) -> ChangeSet:
return generate_region(network, inflate_delta)
############################################################
# district_metering_area 33
############################################################
@router.get("/calculatedistrictmeteringarea/")
async def fastapi_calculate_district_metering_area(
network: str, req: Request
) -> list[list[str]]:
props = await req.json()
nodes = props["nodes"]
part_count = props["part_count"]
part_type = props["part_type"]
return calculate_district_metering_area(
network, nodes, part_count, part_type
)
@router.get("/calculatedistrictmeteringareaforregion/")
async def fastapi_calculate_district_metering_area_for_region(
network: str, req: Request
) -> list[list[str]]:
props = await req.json()
region = props["region"]
part_count = props["part_count"]
part_type = props["part_type"]
return calculate_district_metering_area_for_region(
network, region, part_count, part_type
)
@router.get("/calculatedistrictmeteringareafornetwork/")
async def fastapi_calculate_district_metering_area_for_network(
network: str, req: Request
) -> list[list[str]]:
props = await req.json()
part_count = props["part_count"]
part_type = props["part_type"]
return calculate_district_metering_area_for_network(network, part_count, part_type)
@router.get("/getdistrictmeteringareaschema/")
async def fastapi_get_district_metering_area_schema(
network: str,
) -> dict[str, dict[str, Any]]:
return get_district_metering_area_schema(network)
@router.get("/getdistrictmeteringarea/")
async def fastapi_get_district_metering_area(network: str, id: str) -> dict[str, Any]:
return get_district_metering_area(network, id)
@router.post("/setdistrictmeteringarea/", response_model=None)
async def fastapi_set_district_metering_area(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_district_metering_area(network, ChangeSet(props))
@router.post("/adddistrictmeteringarea/", response_model=None)
async def fastapi_add_district_metering_area(network: str, req: Request) -> ChangeSet:
props = await req.json()
# boundary should be [(x,y), (x,y)]
boundary = props.get("boundary", [])
newBoundary = []
for pt in boundary:
if len(pt) >= 2:
newBoundary.append((pt[0], pt[1]))
props["boundary"] = newBoundary
return add_district_metering_area(network, ChangeSet(props))
@router.post("/deletedistrictmeteringarea/", response_model=None)
async def fastapi_delete_district_metering_area(
network: str, req: Request
) -> ChangeSet:
props = await req.json()
return delete_district_metering_area(network, ChangeSet(props))
@router.get("/getalldistrictmeteringareaids/")
async def fastapi_get_all_district_metering_area_ids(network: str) -> list[str]:
return get_all_district_metering_area_ids(network)
@router.get("/getalldistrictmeteringareas/")
async def getalldistrictmeteringareas(network: str) -> list[dict[str, Any]]:
return get_all_district_metering_areas(network)
@router.post("/generatedistrictmeteringarea/", response_model=None)
async def fastapi_generate_district_metering_area(
network: str, part_count: int, part_type: int, inflate_delta: float
) -> ChangeSet:
return generate_district_metering_area(
network, part_count, part_type, inflate_delta
)
@router.post("/generatesubdistrictmeteringarea/", response_model=None)
async def fastapi_generate_sub_district_metering_area(
network: str, dma: str, part_count: int, part_type: int, inflate_delta: float
) -> ChangeSet:
return generate_sub_district_metering_area(
network, dma, part_count, part_type, inflate_delta
)
############################################################
# service_area 34
############################################################
@router.get("/calculateservicearea/")
async def fastapi_calculate_service_area(
network: str, time_index: int
) -> dict[str, Any]:
return calculate_service_area(network, time_index)
@router.get("/getserviceareaschema/")
async def fastapi_get_service_area_schema(network: str) -> dict[str, dict[str, Any]]:
return get_service_area_schema(network)
@router.get("/getservicearea/")
async def fastapi_get_service_area(network: str, id: str) -> dict[str, Any]:
return get_service_area(network, id)
@router.post("/setservicearea/", response_model=None)
async def fastapi_set_service_area(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_service_area(network, ChangeSet(props))
@router.post("/addservicearea/", response_model=None)
async def fastapi_add_service_area(network: str, req: Request) -> ChangeSet:
props = await req.json()
return add_service_area(network, ChangeSet(props))
@router.post("/deleteservicearea/", response_model=None)
async def fastapi_delete_service_area(network: str, req: Request) -> ChangeSet:
props = await req.json()
return delete_service_area(network, ChangeSet(props))
@router.get("/getallserviceareas/")
async def fastapi_get_all_service_areas(network: str) -> list[dict[str, Any]]:
return get_all_service_areas(network)
@router.post("/generateservicearea/", response_model=None)
async def fastapi_generate_service_area(
network: str, inflate_delta: float
) -> ChangeSet:
return generate_service_area(network, inflate_delta)
############################################################
# virtual_district 35
############################################################
@router.get("/calculatevirtualdistrict/")
async def fastapi_calculate_virtual_district(
network: str, centers: list[str]
) -> dict[str, list[Any]]:
return calculate_virtual_district(network, centers)
@router.get("/getvirtualdistrictschema/")
async def fastapi_get_virtual_district_schema(
network: str,
) -> dict[str, dict[str, Any]]:
return get_virtual_district_schema(network)
@router.get("/getvirtualdistrict/")
async def fastapi_get_virtual_district(network: str, id: str) -> dict[str, Any]:
return get_virtual_district(network, id)
@router.post("/setvirtualdistrict/", response_model=None)
async def fastapi_set_virtual_district(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_virtual_district(network, ChangeSet(props))
@router.post("/addvirtualdistrict/", response_model=None)
async def fastapi_add_virtual_district(network: str, req: Request) -> ChangeSet:
props = await req.json()
return add_virtual_district(network, ChangeSet(props))
@router.post("/deletevirtualdistrict/", response_model=None)
async def fastapi_delete_virtual_district(network: str, req: Request) -> ChangeSet:
props = await req.json()
return delete_virtual_district(network, ChangeSet(props))
@router.get("/getallvirtualdistrict/")
async def fastapi_get_all_virtual_district(network: str) -> list[dict[str, Any]]:
return get_all_virtual_districts(network)
@router.post("/generatevirtualdistrict/", response_model=None)
async def fastapi_generate_virtual_district(
network: str, inflate_delta: float, req: Request
) -> ChangeSet:
props = await req.json()
return generate_virtual_district(network, props["centers"], inflate_delta)
@router.get("/calculatedistrictmeteringareafornodes/")
async def fastapi_calculate_district_metering_area_for_nodes(
network: str, req: Request
) -> list[list[str]]:
props = await req.json()
nodes = props["nodes"]
part_count = props["part_count"]
part_type = props["part_type"]
return calculate_district_metering_area_for_nodes(
network, nodes, part_count, part_type
)

View File

@@ -0,0 +1,105 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
@router.get("/getreservoirschema")
async def fast_get_reservoir_schema(network: str) -> dict[str, dict[str, Any]]:
return get_reservoir_schema(network)
@router.post("/addreservoir/", response_model=None)
async def fastapi_add_reservoir(
network: str, reservoir: str, x: float, y: float, head: float
) -> ChangeSet:
ps = {"id": reservoir, "x": x, "y": y, "head": head}
return add_reservoir(network, ChangeSet(ps))
@router.post("/deletereservoir/", response_model=None)
async def fastapi_delete_reservoir(network: str, reservoir: str) -> ChangeSet:
ps = {"id": reservoir}
return delete_reservoir(network, ChangeSet(ps))
@router.get("/getreservoirhead/")
async def fastapi_get_reservoir_head(network: str, reservoir: str) -> float | None:
ps = get_reservoir(network, reservoir)
return ps["head"]
@router.get("/getreservoirpattern/")
async def fastapi_get_reservoir_pattern(network: str, reservoir: str) -> str | None:
ps = get_reservoir(network, reservoir)
return ps["pattern"]
@router.get("/getreservoirx/")
async def fastapi_get_reservoir_x(
network: str, reservoir: str
) -> dict[str, float] | None:
ps = get_reservoir(network, reservoir)
return ps["x"]
@router.get("/getreservoiry/")
async def fastapi_get_reservoir_y(
network: str, reservoir: str
) -> dict[str, float] | None:
ps = get_reservoir(network, reservoir)
return ps["y"]
@router.get("/getreservoircoord/")
async def fastapi_get_reservoir_coord(
network: str, reservoir: str
) -> dict[str, float] | None:
ps = get_reservoir(network, reservoir)
coord = {"id": reservoir, "x": ps["x"], "y": ps["y"]}
return coord
@router.post("/setreservoirhead/", response_model=None)
async def fastapi_set_reservoir_head(
network: str, reservoir: str, head: float
) -> ChangeSet:
ps = {"id": reservoir, "head": head}
return set_reservoir(network, ChangeSet(ps))
@router.post("/setreservoirpattern/", response_model=None)
async def fastapi_set_reservoir_pattern(
network: str, reservoir: str, pattern: str
) -> ChangeSet:
ps = {"id": reservoir, "pattern": pattern}
return set_reservoir(network, ChangeSet(ps))
@router.post("/setreservoirx/", response_model=None)
async def fastapi_set_reservoir_x(network: str, reservoir: str, x: float) -> ChangeSet:
ps = {"id": reservoir, "x": x}
return set_reservoir(network, ChangeSet(ps))
@router.post("/setreservoiry/", response_model=None)
async def fastapi_set_reservoir_y(network: str, reservoir: str, y: float) -> ChangeSet:
ps = {"id": reservoir, "y": y}
return set_reservoir(network, ChangeSet(ps))
@router.post("/setreservoircoord/", response_model=None)
async def fastapi_set_reservoir_coord(
network: str, reservoir: str, x: float, y: float
) -> ChangeSet:
ps = {"id": reservoir, "x": x, "y": y}
return set_reservoir(network, ChangeSet(ps))
@router.get("/getreservoirproperties/")
async def fastapi_get_reservoir_properties(
network: str, reservoir: str
) -> dict[str, Any]:
return get_reservoir(network, reservoir)
@router.get("/getallreservoirproperties/")
async def fastapi_get_all_reservoir_properties(network: str) -> list[dict[str, Any]]:
# 缓存查询结果提高性能
# global redis_client
results = get_all_reservoirs(network)
return results
@router.post("/setreservoirproperties/", response_model=None)
async def fastapi_set_reservoir_properties(
network: str, reservoir: str, req: Request
) -> ChangeSet:
props = await req.json()
ps = {"id": reservoir} | props
return set_reservoir(network, ChangeSet(ps))

View File

@@ -0,0 +1,27 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
############################################################
# tag 8.[TAGS]
############################################################
@router.get("/gettagschema/")
async def fastapi_get_tag_schema(network: str) -> dict[str, dict[str, Any]]:
return get_tag_schema(network)
@router.get("/gettag/")
async def fastapi_get_tag(network: str, t_type: str, id: str) -> dict[str, Any]:
return get_tag(network, t_type, id)
@router.get("/gettags/")
async def fastapi_get_tags(network: str) -> list[dict[str, Any]]:
tags = get_tags(network)
return tags
@router.post("/settag/", response_model=None)
async def fastapi_set_tag(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_tag(network, ChangeSet(props))

View File

@@ -0,0 +1,188 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
@router.get("/gettankschema")
async def fast_get_tank_schema(network: str) -> dict[str, dict[str, Any]]:
return get_tank_schema(network)
@router.post("/addtank/", response_model=None)
async def fastapi_add_tank(
network: str,
tank: str,
x: float,
y: float,
elevation: float,
init_level: float = 0,
min_level: float = 0,
max_level: float = 0,
diameter: float = 0,
min_vol: float = 0,
) -> ChangeSet:
ps = {
"id": tank,
"x": x,
"y": y,
"elevation": elevation,
"init_level": init_level,
"min_level": min_level,
"max_level": max_level,
"diameter": diameter,
"min_vol": min_vol,
}
return add_tank(network, ChangeSet(ps))
@router.post("/deletetank/", response_model=None)
async def fastapi_delete_tank(network: str, tank: str) -> ChangeSet:
ps = {"id": tank}
return delete_tank(network, ChangeSet(ps))
@router.get("/gettankelevation/")
async def fastapi_get_tank_elevation(network: str, tank: str) -> float | None:
ps = get_tank(network, tank)
return ps["elevation"]
@router.get("/gettankinitlevel/")
async def fastapi_get_tank_init_level(network: str, tank: str) -> float | None:
ps = get_tank(network, tank)
return ps["init_level"]
@router.get("/gettankminlevel/")
async def fastapi_get_tank_min_level(network: str, tank: str) -> float | None:
ps = get_tank(network, tank)
return ps["min_level"]
@router.get("/gettankmaxlevel/")
async def fastapi_get_tank_max_level(network: str, tank: str) -> float | None:
ps = get_tank(network, tank)
return ps["max_level"]
@router.get("/gettankdiameter/")
async def fastapi_get_tank_diameter(network: str, tank: str) -> float | None:
ps = get_tank(network, tank)
return ps["diameter"]
@router.get("/gettankminvol/")
async def fastapi_get_tank_min_vol(network: str, tank: str) -> float | None:
ps = get_tank(network, tank)
return ps["min_vol"]
@router.get("/gettankvolcurve/")
async def fastapi_get_tank_vol_curve(network: str, tank: str) -> str | None:
ps = get_tank(network, tank)
return ps["vol_curve"]
@router.get("/gettankoverflow/")
async def fastapi_get_tank_overflow(network: str, tank: str) -> str | None:
ps = get_tank(network, tank)
return ps["overflow"]
@router.get("/gettankx/")
async def fastapi_get_tank_x(network: str, tank: str) -> float:
ps = get_tank(network, tank)
return ps["x"]
@router.get("/gettanky/")
async def fastapi_get_tank_y(network: str, tank: str) -> float:
ps = get_tank(network, tank)
return ps["y"]
@router.get("/gettankcoord/")
async def fastapi_get_tank_coord(network: str, tank: str) -> dict[str, float]:
ps = get_tank(network, tank)
coord = {"x": ps["x"], "y": ps["y"]}
return coord
@router.post("/settankelevation/", response_model=None)
async def fastapi_set_tank_elevation(
network: str, tank: str, elevation: float
) -> ChangeSet:
ps = {"id": tank, "elevation": elevation}
return set_tank(network, ChangeSet(ps))
@router.post("/settankinitlevel/", response_model=None)
async def fastapi_set_tank_init_level(
network: str, tank: str, init_level: float
) -> ChangeSet:
ps = {"id": tank, "init_level": init_level}
return set_tank(network, ChangeSet(ps))
@router.post("/settankminlevel/", response_model=None)
async def fastapi_set_tank_min_level(
network: str, tank: str, min_level: float
) -> ChangeSet:
ps = {"id": tank, "min_level": min_level}
return set_tank(network, ChangeSet(ps))
@router.post("/settankmaxlevel/", response_model=None)
async def fastapi_set_tank_max_level(
network: str, tank: str, max_level: float
) -> ChangeSet:
ps = {"id": tank, "max_level": max_level}
return set_tank(network, ChangeSet(ps))
@router.post("settankdiameter//", response_model=None)
async def fastapi_set_tank_diameter(
network: str, tank: str, diameter: float
) -> ChangeSet:
ps = {"id": tank, "diameter": diameter}
return set_tank(network, ChangeSet(ps))
@router.post("/settankminvol/", response_model=None)
async def fastapi_set_tank_min_vol(
network: str, tank: str, min_vol: float
) -> ChangeSet:
ps = {"id": tank, "min_vol": min_vol}
return set_tank(network, ChangeSet(ps))
@router.post("/settankvolcurve/", response_model=None)
async def fastapi_set_tank_vol_curve(
network: str, tank: str, vol_curve: str
) -> ChangeSet:
ps = {"id": tank, "vol_curve": vol_curve}
return set_tank(network, ChangeSet(ps))
@router.post("/settankoverflow/", response_model=None)
async def fastapi_set_tank_overflow(
network: str, tank: str, overflow: str
) -> ChangeSet:
ps = {"id": tank, "overflow": overflow}
return set_tank(network, ChangeSet(ps))
@router.post("/settankx/", response_model=None)
async def fastapi_set_tank_x(network: str, tank: str, x: float) -> ChangeSet:
ps = {"id": tank, "x": x}
return set_tank(network, ChangeSet(ps))
@router.post("/settanky/", response_model=None)
async def fastapi_set_tank_y(network: str, tank: str, y: float) -> ChangeSet:
ps = {"id": tank, "y": y}
return set_tank(network, ChangeSet(ps))
@router.post("/settankcoord/", response_model=None)
async def fastapi_set_tank_coord(
network: str, tank: str, x: float, y: float
) -> ChangeSet:
ps = {"id": tank, "x": x, "y": y}
return set_tank(network, ChangeSet(ps))
@router.get("/gettankproperties/")
async def fastapi_get_tank_properties(network: str, tank: str) -> dict[str, Any]:
return get_tank(network, tank)
@router.get("/getalltankproperties/")
async def fastapi_get_all_tank_properties(network: str) -> list[dict[str, Any]]:
# 缓存查询结果提高性能
# global redis_client
results = get_all_tanks(network)
return results
@router.post("/settankproperties/", response_model=None)
async def fastapi_set_tank_properties(
network: str, tank: str, req: Request
) -> ChangeSet:
props = await req.json()
ps = {"id": tank} | props
return set_tank(network, ChangeSet(ps))

View File

@@ -0,0 +1,115 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
@router.get("/getvalveschema")
async def fastapi_get_valve_schema(network: str) -> dict[str, dict[str, Any]]:
return get_valve_schema(network)
@router.post("/addvalve/", response_model=None)
async def fastapi_add_valve(
network: str,
valve: str,
node1: str,
node2: str,
diameter: float = 0,
v_type: str = VALVES_TYPE_PRV,
setting: float = 0,
minor_loss: float = 0,
) -> ChangeSet:
ps = {
"id": valve,
"node1": node1,
"node2": node2,
"diameter": diameter,
"v_type": v_type,
"setting": setting,
"minor_loss": minor_loss,
}
return add_valve(network, ChangeSet(ps))
@router.post("/deletevalve/", response_model=None)
async def fastapi_delete_valve(network: str, valve: str) -> ChangeSet:
ps = {"id": valve}
return delete_valve(network, ChangeSet(ps))
@router.get("/getvalvenode1/")
async def fastapi_get_valve_node1(network: str, valve: str) -> str | None:
ps = get_valve(network, valve)
return ps["node1"]
@router.get("/getvalvenode2/")
async def fastapi_get_valve_node2(network: str, valve: str) -> str | None:
ps = get_valve(network, valve)
return ps["node2"]
@router.get("/getvalvediameter/")
async def fastapi_get_valve_diameter(network: str, valve: str) -> float | None:
ps = get_valve(network, valve)
return ps["diameter"]
@router.get("/getvalvetype/")
async def fastapi_get_valve_type(network: str, valve: str) -> str | None:
ps = get_valve(network, valve)
return ps["type"]
@router.get("/getvalvesetting/")
async def fastapi_get_valve_setting(network: str, valve: str) -> float | None:
ps = get_valve(network, valve)
return ps["setting"]
@router.get("/getvalveminorloss/")
async def fastapi_get_valve_minor_loss(network: str, valve: str) -> float | None:
ps = get_valve(network, valve)
return ps["minor_loss"]
@router.post("/setvalvenode1/", response_model=None)
async def fastapi_set_valve_node1(network: str, valve: str, node1: str) -> ChangeSet:
ps = {"id": valve, "node1": node1}
return set_valve(network, ChangeSet(ps))
@router.post("/setvalvenode2/", response_model=None)
async def fastapi_set_valve_node2(network: str, valve: str, node2: str) -> ChangeSet:
ps = {"id": valve, "node2": node2}
return set_valve(network, ChangeSet(ps))
@router.post("/setvalvenodediameter/", response_model=None)
async def fastapi_set_valve_diameter(
network: str, valve: str, diameter: float
) -> ChangeSet:
ps = {"id": valve, "diameter": diameter}
return set_valve(network, ChangeSet(ps))
@router.post("/setvalvetype/", response_model=None)
async def fastapi_set_valve_type(network: str, valve: str, type: str) -> ChangeSet:
ps = {"id": valve, "type": type}
return set_valve(network, ChangeSet(ps))
@router.post("/setvalvesetting/", response_model=None)
async def fastapi_set_valve_setting(
network: str, valve: str, setting: float
) -> ChangeSet:
ps = {"id": valve, "setting": setting}
return set_valve(network, ChangeSet(ps))
@router.get("/getvalveproperties/")
async def fastapi_get_valve_properties(network: str, valve: str) -> dict[str, Any]:
return get_valve(network, valve)
@router.get("/getallvalveproperties/")
async def fastapi_get_all_valve_properties(network: str) -> list[dict[str, Any]]:
# 缓存查询结果提高性能
# global redis_client
results = get_all_valves(network)
return results
@router.post("/setvalveproperties/", response_model=None)
async def fastapi_set_valve_properties(
network: str, valve: str, req: Request
) -> ChangeSet:
props = await req.json()
ps = {"id": valve} | props
return set_valve(network, ChangeSet(ps))

View File

@@ -0,0 +1,202 @@
import json
from fastapi import APIRouter, Request, HTTPException
from fastapi.responses import PlainTextResponse
from typing import Any, Dict
import app.services.project_info as project_info
from app.native.api import ChangeSet
from app.services.tjnetwork import (
list_project,
have_project,
create_project,
delete_project,
is_project_open,
open_project,
close_project,
copy_project,
import_inp,
export_inp,
read_inp,
dump_inp,
get_all_vertices,
get_all_scada_elements,
get_all_district_metering_areas,
get_all_service_areas,
get_all_virtual_districts,
get_extension_data,
convert_inp_v3_to_v2,
)
# For inp file upload/download
import os
from fastapi import Response, status
from fastapi.responses import FileResponse
inpDir = "data/" # Assuming data directory exists or is defined somewhere.
# In main.py it was likely global. For safety, let's use a relative path or get from config.
# But let's stick to what main.py probably used or a default.
router = APIRouter()
lockedPrjs: Dict[str, str] = {}
@router.get("/listprojects/")
async def list_projects_endpoint() -> list[str]:
return list_project()
@router.get("/haveproject/")
async def have_project_endpoint(network: str):
return have_project(network)
@router.post("/createproject/")
async def create_project_endpoint(network: str):
create_project(network)
return network
@router.post("/deleteproject/")
async def delete_project_endpoint(network: str):
delete_project(network)
return True
@router.get("/isprojectopen/")
async def is_project_open_endpoint(network: str):
return is_project_open(network)
@router.post("/openproject/")
async def open_project_endpoint(network: str):
open_project(network)
return network
@router.post("/closeproject/")
async def close_project_endpoint(network: str):
close_project(network)
return True
@router.post("/copyproject/")
async def copy_project_endpoint(source: str, target: str):
copy_project(source, target)
return True
@router.post("/importinp/")
async def import_inp_endpoint(network: str, req: Request):
jo_root = await req.json()
inp_text = jo_root["inp"]
ps = {"inp": inp_text}
ret = import_inp(network, ChangeSet(ps))
print(ret)
return ret
@router.get("/exportinp/", response_model=None)
async def export_inp_endpoint(network: str, version: str) -> ChangeSet:
cs = export_inp(network, version)
op = cs.operations[0]
open_project(network)
op["vertex"] = json.dumps(get_all_vertices(network))
op["scada"] = json.dumps(get_all_scada_elements(network))
op["dma"] = json.dumps(get_all_district_metering_areas(network))
op["sa"] = json.dumps(get_all_service_areas(network))
op["vd"] = json.dumps(get_all_virtual_districts(network))
op["legend"] = get_extension_data(network, "legend")
db = get_extension_data(network, "scada_db")
print(db)
scada_db = ""
if db:
scada_db = db
print(scada_db)
op["scada_db"] = scada_db
close_project(network)
return cs
@router.post("/readinp/")
async def read_inp_endpoint(network: str, inp: str) -> bool:
read_inp(network, inp)
return True
@router.get("/dumpinp/")
async def dump_inp_endpoint(network: str, inp: str) -> bool:
dump_inp(network, inp)
return True
@router.get("/isprojectlocked/")
async def is_project_locked_endpoint(network: str, req: Request):
return network in lockedPrjs.keys()
@router.get("/isprojectlockedbyme/")
async def is_project_locked_by_me_endpoint(network: str, req: Request):
client_host = req.client.host
return lockedPrjs.get(network) == client_host
# 0 successfully locked
# 1 already locked by you
# 2 locked by others
@router.post("/lockproject/")
async def lock_project_endpoint(network: str, req: Request):
client_host = req.client.host
if not network in lockedPrjs.keys():
lockedPrjs[network] = client_host
return 0
else:
if lockedPrjs.get(network) == client_host:
return 1
else:
return 2
@router.post("/unlockproject/")
def unlock_project_endpoint(network: str, req: Request):
client_host = req.client.host
if lockedPrjs.get(network) == client_host:
print("delete key")
del lockedPrjs[network]
return True
return False
# inp file operations
@router.post("/uploadinp/", status_code=status.HTTP_200_OK)
async def fastapi_upload_inp(afile: bytes, name: str):
if not os.path.exists(inpDir):
os.makedirs(inpDir, exist_ok=True)
filePath = inpDir + str(name)
with open(filePath, "wb") as f:
f.write(afile)
return True
@router.get("/downloadinp/", status_code=status.HTTP_200_OK)
async def fastapi_download_inp(name: str, response: Response):
filePath = inpDir + name
if os.path.exists(filePath):
return FileResponse(
filePath, media_type="application/octet-stream", filename="inp.inp"
)
else:
response.status_code = status.HTTP_400_BAD_REQUEST
return True
# DingZQ, 2024-12-28, convert v3 to v2
@router.get("/convertv3tov2/", response_model=None)
async def fastapi_convert_v3_to_v2(req: Request) -> ChangeSet:
network = "v3Tov2"
jo_root = await req.json()
inp = jo_root["inp"]
cs = convert_inp_v3_to_v2(inp)
op = cs.operations[0]
open_project(network)
op["vertex"] = json.dumps(get_all_vertices(network))
op["scada"] = json.dumps(get_all_scada_elements(network))
op["dma"] = json.dumps(get_all_district_metering_areas(network))
op["sa"] = json.dumps(get_all_service_areas(network))
op["vd"] = json.dumps(get_all_virtual_districts(network))
op["legend"] = get_extension_data(network, "legend")
db = get_extension_data(network, "scada_db")
print(db)
scada_db = ""
if db:
scada_db = db
print(scada_db)
op["scada_db"] = scada_db
close_project(network)
return cs

View File

@@ -0,0 +1,44 @@
from typing import Any, List, Dict
from fastapi import APIRouter
from app.services.tjnetwork import (
get_pipe_risk_probability_now,
get_pipe_risk_probability,
get_pipes_risk_probability,
get_network_pipe_risk_probability_now,
get_pipe_risk_probability_geometries,
)
router = APIRouter()
@router.get("/getpiperiskprobabilitynow/")
async def fastapi_get_pipe_risk_probability_now(
network: str, pipe_id: str
) -> dict[str, Any]:
return get_pipe_risk_probability_now(network, pipe_id)
@router.get("/getpiperiskprobability/")
async def fastapi_get_pipe_risk_probability(
network: str, pipe_id: str
) -> dict[str, Any]:
return get_pipe_risk_probability(network, pipe_id)
@router.get("/getpipesriskprobability/")
async def fastapi_get_pipes_risk_probability(
network: str, pipe_ids: str
) -> list[dict[str, Any]]:
pipeids = pipe_ids.split(",")
return get_pipes_risk_probability(network, pipeids)
@router.get("/getnetworkpiperiskprobabilitynow/")
async def fastapi_get_network_pipe_risk_probability_now(
network: str,
) -> list[dict[str, Any]]:
return get_network_pipe_risk_probability_now(network)
@router.get("/getpiperiskprobabilitygeometries/")
async def fastapi_get_pipe_risk_probability_geometries(network: str) -> dict[str, Any]:
return get_pipe_risk_probability_geometries(network)

View File

@@ -0,0 +1,169 @@
from typing import Any
from fastapi import APIRouter, Request
from app.native.api import ChangeSet
from app.services.tjnetwork import (
get_scada_info,
get_all_scada_info,
get_scada_device_schema,
get_scada_device,
set_scada_device,
add_scada_device,
delete_scada_device,
clean_scada_device,
get_all_scada_device_ids,
get_all_scada_devices,
get_scada_device_data_schema,
get_scada_device_data,
set_scada_device_data,
add_scada_device_data,
delete_scada_device_data,
clean_scada_device_data,
get_scada_element_schema,
get_scada_element,
set_scada_element,
add_scada_element,
delete_scada_element,
clean_scada_element,
get_all_scada_elements,
get_scada_element_schema,
get_scada_info_schema,
)
router = APIRouter()
@router.get("/getscadaproperties/")
async def fast_get_scada_properties(network: str, scada: str) -> dict[str, Any]:
return get_scada_info(network, scada)
@router.get("/getallscadaproperties/")
async def fast_get_all_scada_properties(network: str) -> list[dict[str, Any]]:
return get_all_scada_info(network)
############################################################
# scada_device 29
############################################################
@router.get("/getscadadeviceschema/")
async def fastapi_get_scada_device_schema(network: str) -> dict[str, dict[str, Any]]:
return get_scada_device_schema(network)
@router.get("/getscadadevice/")
async def fastapi_get_scada_device(network: str, id: str) -> dict[str, Any]:
return get_scada_device(network, id)
@router.post("/setscadadevice/", response_model=None)
async def fastapi_set_scada_device(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_scada_device(network, ChangeSet(props))
@router.post("/addscadadevice/", response_model=None)
async def fastapi_add_scada_device(network: str, req: Request) -> ChangeSet:
props = await req.json()
return add_scada_device(network, ChangeSet(props))
@router.post("/deletescadadevice/", response_model=None)
async def fastapi_delete_scada_device(network: str, req: Request) -> ChangeSet:
props = await req.json()
return delete_scada_device(network, ChangeSet(props))
@router.post("/cleanscadadevice/", response_model=None)
async def fastapi_clean_scada_device(network: str) -> ChangeSet:
return clean_scada_device(network)
@router.get("/getallscadadeviceids/")
async def fastapi_get_all_scada_device_ids(network: str) -> list[str]:
return get_all_scada_device_ids(network)
@router.get("/getallscadadevices/")
async def fastapi_get_all_scada_devices(network: str) -> list[dict[str, Any]]:
return get_all_scada_devices(network)
############################################################
# scada_device_data 30
############################################################
@router.get("/getscadadevicedataschema/")
async def fastapi_get_scada_device_data_schema(
network: str,
) -> dict[str, dict[str, Any]]:
return get_scada_device_data_schema(network)
@router.get("/getscadadevicedata/")
async def fastapi_get_scada_device_data(network: str, device_id: str) -> dict[str, Any]:
return get_scada_device_data(network, device_id)
@router.post("/setscadadevicedata/", response_model=None)
async def fastapi_set_scada_device_data(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_scada_device_data(network, ChangeSet(props))
@router.post("/addscadadevicedata/", response_model=None)
async def fastapi_add_scada_device_data(network: str, req: Request) -> ChangeSet:
props = await req.json()
return add_scada_device_data(network, ChangeSet(props))
@router.post("/deletescadadevicedata/", response_model=None)
async def fastapi_delete_scada_device_data(network: str, req: Request) -> ChangeSet:
props = await req.json()
return delete_scada_device_data(network, ChangeSet(props))
@router.post("/cleanscadadevicedata/", response_model=None)
async def fastapi_clean_scada_device_data(network: str) -> ChangeSet:
return clean_scada_device_data(network)
############################################################
# scada_element 31
############################################################
@router.get("/getscadaelementschema/")
async def fastapi_get_scada_element_schema(
network: str,
) -> dict[str, dict[str, Any]]:
return get_scada_element_schema(network)
@router.get("/getscadaelements/")
async def fastapi_get_scada_elements(network: str) -> list[dict[str, Any]]:
return get_all_scada_elements(network)
@router.get("/getscadaelement/")
async def fastapi_get_scada_element(network: str, id: str) -> dict[str, Any]:
return get_scada_element(network, id)
@router.post("/setscadaelement/", response_model=None)
async def fastapi_set_scada_element(network: str, req: Request) -> ChangeSet:
props = await req.json()
return set_scada_element(network, ChangeSet(props))
@router.post("/addscadaelement/", response_model=None)
async def fastapi_add_scada_element(network: str, req: Request) -> ChangeSet:
props = await req.json()
return add_scada_element(network, ChangeSet(props))
@router.post("/deletescadaelement/", response_model=None)
async def fastapi_delete_scada_element(network: str, req: Request) -> ChangeSet:
props = await req.json()
return delete_scada_element(network, ChangeSet(props))
@router.post("/cleanscadaelement/", response_model=None)
async def fastapi_clean_scada_element(network: str) -> ChangeSet:
return clean_scada_element(network)
############################################################
# scada_info 38
############################################################
@router.get("/getscadainfoschema/")
async def fastapi_get_scada_info_schema(network: str) -> dict[str, dict[str, Any]]:
return get_scada_info_schema(network)
@router.get("/getscadainfo/")
async def fastapi_get_scada_info(network: str, id: str) -> dict[str, Any]:
return get_scada_info(network, id)
@router.get("/getallscadainfo/")
async def fastapi_get_all_scada_info(network: str) -> list[dict[str, Any]]:
return get_all_scada_info(network)

View File

@@ -0,0 +1,17 @@
from fastapi import APIRouter
from typing import Any, List, Dict
from app.services.tjnetwork import get_scheme_schema, get_scheme, get_all_schemes
router = APIRouter()
@router.get("/getschemeschema/")
async def fastapi_get_scheme_schema(network: str) -> dict[str, dict[Any, Any]]:
return get_scheme_schema(network)
@router.get("/getscheme/")
async def fastapi_get_scheme(network: str, schema_name: str) -> dict[Any, Any]:
return get_scheme(network, schema_name)
@router.get("/getallschemes/")
async def fastapi_get_all_schemes(network: str) -> list[dict[Any, Any]]:
return get_all_schemes(network)

View File

@@ -0,0 +1,164 @@
from typing import Any
from fastapi import APIRouter, HTTPException
from fastapi.responses import PlainTextResponse
from app.infra.cache.redis_client import redis_client
from app.services.tjnetwork import (
run_project,
run_project_return_dict,
run_inp,
dump_output,
)
from app.algorithms.simulations import (
burst_analysis,
valve_close_analysis,
flushing_analysis,
contaminant_simulation,
age_analysis,
# scheduling_analysis,
pressure_regulation,
)
from app.algorithms.sensors import (
pressure_sensor_placement_sensitivity,
pressure_sensor_placement_kmeans,
)
from app.services.network_import import network_update
from app.services.simulation_ops import project_management
router = APIRouter()
# 必须用这个PlainTextResponse不然每个key都有引号
@router.get("/runproject/", response_class=PlainTextResponse)
async def run_project_endpoint(network: str) -> str:
lock_key = "exclusive_api_lock"
timeout = 120 # 锁自动过期时间(秒)
# 尝试获取锁NX=True: 不存在时设置EX=timeout: 过期时间)
acquired = redis_client.set(lock_key, "locked", nx=True, ex=timeout)
if not acquired:
raise HTTPException(status_code=409, detail="is in simulation")
else:
try:
return run_project(network)
finally:
# 手动释放锁(可选,依赖过期时间自动释放更安全)
redis_client.delete(lock_key)
# DingZQ, 2025-02-04, 返回dict[str, Any]
# output 和 report
# output 是 json
# report 是 text
@router.get("/runprojectreturndict/")
async def run_project_return_dict_endpoint(network: str) -> dict[str, Any]:
lock_key = "exclusive_api_lock"
timeout = 120 # 锁自动过期时间(秒)
# 尝试获取锁NX=True: 不存在时设置EX=timeout: 过期时间)
acquired = redis_client.set(lock_key, "locked", nx=True, ex=timeout)
if not acquired:
raise HTTPException(status_code=409, detail="is in simulation")
else:
try:
return run_project_return_dict(network)
finally:
# 手动释放锁(可选,依赖过期时间自动释放更安全)
redis_client.delete(lock_key)
# put in inp folder, name without extension
@router.get("/runinp/")
async def run_inp_endpoint(network: str) -> str:
return run_inp(network)
# path is absolute path
@router.get("/dumpoutput/")
async def dump_output_endpoint(output: str) -> str:
return dump_output(output)
# Analysis Endpoints
@router.get("/burstanalysis/")
async def burst_analysis_endpoint(
network: str, pipe_id: str, start_time: str, end_time: str, burst_flow: float
):
return burst_analysis(network, pipe_id, start_time, end_time, burst_flow)
@router.get("/valvecloseanalysis/")
async def valve_close_analysis_endpoint(
network: str, valve_id: str, start_time: str, end_time: str
):
return valve_close_analysis(network, valve_id, start_time, end_time)
@router.get("/flushinganalysis/")
async def flushing_analysis_endpoint(
network: str, pipe_id: str, start_time: str, duration: float, flow: float
):
return flushing_analysis(network, pipe_id, start_time, duration, flow)
@router.get("/contaminantsimulation/")
async def contaminant_simulation_endpoint(
network: str, node_id: str, start_time: str, duration: float, concentration: float
):
return contaminant_simulation(network, node_id, start_time, duration, concentration)
@router.get("/ageanalysis/")
async def age_analysis_endpoint(network: str):
return age_analysis(network)
@router.get("/schedulinganalysis/")
async def scheduling_analysis_endpoint(network: str):
return scheduling_analysis(network)
@router.get("/pressureregulation/")
async def pressure_regulation_endpoint(
network: str, target_node: str, target_pressure: float
):
return pressure_regulation(network, target_node, target_pressure)
@router.get("/projectmanagement/")
async def project_management_endpoint(network: str):
return project_management(network)
@router.get("/dailyschedulinganalysis/")
async def daily_scheduling_analysis_endpoint(network: str):
return daily_scheduling_analysis(network)
@router.get("/networkupdate/")
async def network_update_endpoint(network: str):
return network_update(network)
@router.get("/pumpfailure/")
async def pump_failure_endpoint(network: str, pump_id: str, time: str):
return pump_failure(network, pump_id, time)
@router.get("/pressuresensorplacementsensitivity/")
async def pressure_sensor_placement_sensitivity_endpoint(
name: str, scheme_name: str, sensor_number: int, min_diameter: int, username: str
):
return pressure_sensor_placement_sensitivity(
name, scheme_name, sensor_number, min_diameter, username
)
@router.get("/pressuresensorplacementkmeans/")
async def pressure_sensor_placement_kmeans_endpoint(
name: str, scheme_name: str, sensor_number: int, min_diameter: int, username: str
):
return pressure_sensor_placement_kmeans(
name, scheme_name, sensor_number, min_diameter, username
)

View File

@@ -0,0 +1,111 @@
from fastapi import APIRouter, Request
from app.native.api import ChangeSet
from app.services.tjnetwork import (
get_current_operation,
execute_undo,
execute_redo,
list_snapshot,
have_snapshot,
have_snapshot_for_operation,
have_snapshot_for_current_operation,
take_snapshot_for_operation,
take_snapshot_for_current_operation,
take_snapshot,
pick_snapshot,
pick_operation,
sync_with_server,
execute_batch_commands,
execute_batch_command,
get_restore_operation,
set_restore_operation,
)
router = APIRouter()
@router.get("/getcurrentoperationid/")
async def get_current_operation_id_endpoint(network: str) -> int:
return get_current_operation(network)
@router.post("/undo/")
async def undo_endpoint(network: str):
return execute_undo(network)
@router.post("/redo/")
async def redo_endpoint(network: str):
return execute_redo(network)
@router.get("/getsnapshots/")
async def list_snapshot_endpoint(network: str) -> list[tuple[int, str]]:
return list_snapshot(network)
@router.get("/havesnapshot/")
async def have_snapshot_endpoint(network: str, tag: str) -> bool:
return have_snapshot(network, tag)
@router.get("/havesnapshotforoperation/")
async def have_snapshot_for_operation_endpoint(network: str, operation: int) -> bool:
return have_snapshot_for_operation(network, operation)
@router.get("/havesnapshotforcurrentoperation/")
async def have_snapshot_for_current_operation_endpoint(network: str) -> bool:
return have_snapshot_for_current_operation(network)
@router.post("/takesnapshotforoperation/")
async def take_snapshot_for_operation_endpoint(
network: str, operation: int, tag: str
) -> None:
return take_snapshot_for_operation(network, operation, tag)
@router.post("/takesnapshotforcurrentoperation")
async def take_snapshot_for_current_operation_endpoint(network: str, tag: str) -> None:
return take_snapshot_for_current_operation(network, tag)
# 兼容旧拼写: takenapshotforcurrentoperation
@router.post("/takenapshotforcurrentoperation")
async def take_snapshot_for_current_operation_legacy_endpoint(
network: str, tag: str
) -> None:
return take_snapshot_for_current_operation(network, tag)
@router.post("/takesnapshot/")
async def take_snapshot_endpoint(network: str, tag: str) -> None:
return take_snapshot(network, tag)
@router.post("/picksnapshot/", response_model=None)
async def pick_snapshot_endpoint(network: str, tag: str, discard: bool = False) -> ChangeSet:
return pick_snapshot(network, tag, discard)
@router.post("/pickoperation/", response_model=None)
async def pick_operation_endpoint(
network: str, operation: int, discard: bool = False
) -> ChangeSet:
return pick_operation(network, operation, discard)
@router.get("/syncwithserver/", response_model=None)
async def sync_with_server_endpoint(network: str, operation: int) -> ChangeSet:
return sync_with_server(network, operation)
@router.post("/batch/", response_model=None)
async def execute_batch_commands_endpoint(network: str, req: Request) -> ChangeSet:
jo_root = await req.json()
cs: ChangeSet = ChangeSet()
cs.operations = jo_root["operations"]
rcs = execute_batch_commands(network, cs)
return rcs
@router.post("/compressedbatch/", response_model=None)
async def execute_compressed_batch_commands_endpoint(
network: str, req: Request
) -> ChangeSet:
jo_root = await req.json()
cs: ChangeSet = ChangeSet()
cs.operations = jo_root["operations"]
return execute_batch_command(network, cs)
@router.get("/getrestoreoperation/")
async def get_restore_operation_endpoint(network: str) -> int:
return get_restore_operation(network)
@router.post("/setrestoreoperation/")
async def set_restore_operation_endpoint(network: str, operation: int) -> None:
return set_restore_operation(network, operation)

View File

@@ -0,0 +1,21 @@
from fastapi import APIRouter, Request
from typing import Any, List, Dict, Union
from app.services.tjnetwork import *
router = APIRouter()
###########################################################
# user 39
###########################################################
@router.get("/getuserschema/")
async def fastapi_get_user_schema(network: str) -> dict[str, dict[Any, Any]]:
return get_user_schema(network)
@router.get("/getuser/")
async def fastapi_get_user(network: str, user_name: str) -> dict[Any, Any]:
return get_user(network, user_name)
@router.get("/getallusers/")
async def fastapi_get_all_users(network: str) -> list[dict[Any, Any]]:
return get_all_users(network)

View File

@@ -2,19 +2,78 @@ from fastapi import APIRouter
from app.api.v1.endpoints import ( from app.api.v1.endpoints import (
auth, auth,
project, project,
network_elements,
simulation, simulation,
scada, scada,
extension, extension,
snapshots snapshots,
data_query,
users,
schemes,
misc,
risk,
cache,
)
from app.api.v1.endpoints.network import (
general,
junctions,
reservoirs,
tanks,
pipes,
pumps,
valves,
tags,
demands,
geometry,
regions,
)
from app.api.v1.endpoints.components import (
curves,
patterns,
controls,
options,
quality,
visuals,
) )
api_router = APIRouter() api_router = APIRouter()
api_router.include_router(auth.router, prefix="/auth", tags=["auth"]) # Core Services
api_router.include_router(project.router, prefix="/projects", tags=["projects"]) api_router.include_router(auth.router, tags=["Auth"])
api_router.include_router(network_elements.router, prefix="/elements", tags=["network-elements"]) api_router.include_router(project.router, tags=["Project"])
api_router.include_router(simulation.router, prefix="/simulation", tags=["simulation"])
api_router.include_router(scada.router, prefix="/scada", tags=["scada"]) # Network Elements (Node/Link Types)
api_router.include_router(extension.router, prefix="/extension", tags=["extension"]) api_router.include_router(general.router, tags=["Network General"])
api_router.include_router(snapshots.router, prefix="/snapshots", tags=["snapshots"]) api_router.include_router(junctions.router, tags=["Junctions"])
api_router.include_router(reservoirs.router, tags=["Reservoirs"])
api_router.include_router(tanks.router, tags=["Tanks"])
api_router.include_router(pipes.router, tags=["Pipes"])
api_router.include_router(pumps.router, tags=["Pumps"])
api_router.include_router(valves.router, tags=["Valves"])
# Network Features
api_router.include_router(tags.router, tags=["Tags"])
api_router.include_router(demands.router, tags=["Demands"])
api_router.include_router(geometry.router, tags=["Geometry & Coordinates"])
api_router.include_router(regions.router, tags=["Regions & DMAs"])
# Components & Controls
api_router.include_router(curves.router, tags=["Curves"])
api_router.include_router(patterns.router, tags=["Patterns"])
api_router.include_router(controls.router, tags=["Controls & Rules"])
api_router.include_router(options.router, tags=["Options"])
api_router.include_router(quality.router, tags=["Quality"])
api_router.include_router(visuals.router, tags=["Visuals"])
# Simulation & Data
api_router.include_router(simulation.router, tags=["Simulation Control"])
api_router.include_router(data_query.router, tags=["Data Query & InfluxDB"])
api_router.include_router(scada.router, tags=["SCADA"])
api_router.include_router(snapshots.router, tags=["Snapshots"])
api_router.include_router(users.router, tags=["Users"])
api_router.include_router(schemes.router, tags=["Schemes"])
api_router.include_router(misc.router, tags=["Misc"])
api_router.include_router(risk.router, tags=["Risk"])
api_router.include_router(cache.router, tags=["Cache"])
# Extension
api_router.include_router(extension.router, tags=["Extension"])

19
app/infra/cache/redis_client.py vendored Normal file
View File

@@ -0,0 +1,19 @@
import redis
import msgpack
from datetime import datetime
from typing import Any
# Initialize Redis connection
redis_client = redis.Redis(host="127.0.0.1", port=6379, db=0)
def encode_datetime(obj: Any) -> Any:
"""Serialize datetime objects to dictionary format."""
if isinstance(obj, datetime):
return {"__datetime__": True, "as_str": obj.strftime("%Y%m%dT%H:%M:%S.%f")}
return obj
def decode_datetime(obj: Any) -> Any:
"""Deserialize dictionary format to datetime objects."""
if "__datetime__" in obj:
return datetime.strptime(obj["as_str"], "%Y%m%dT%H:%M:%S.%f")
return obj

View File

@@ -18,7 +18,7 @@ import get_data
import psycopg import psycopg
import time import time
import app.services.simulation as simulation import app.services.simulation as simulation
from tjnetwork import * from app.services.tjnetwork import *
import schedule import schedule
import threading import threading
import app.services.globals as globals import app.services.globals as globals

View File

@@ -2,7 +2,7 @@ import time
from typing import List, Optional from typing import List, Optional
from fastapi.logger import logger from fastapi.logger import logger
import postgresql_info import app.native.api.postgresql_info as postgresql_info
import psycopg import psycopg

View File

@@ -4,15 +4,15 @@ from datetime import datetime, timedelta
from psycopg import AsyncConnection from psycopg import AsyncConnection
import pandas as pd import pandas as pd
import numpy as np import numpy as np
from api_ex.Fdataclean import clean_flow_data_df_kf from app.algorithms.api_ex.Fdataclean import clean_flow_data_df_kf
from api_ex.Pdataclean import clean_pressure_data_df_km from app.algorithms.api_ex.Pdataclean import clean_pressure_data_df_km
from api_ex.pipeline_health_analyzer import PipelineHealthAnalyzer from app.algorithms.api_ex.pipeline_health_analyzer import PipelineHealthAnalyzer
from postgresql.internal_queries import InternalQueries from app.infra.db.postgresql.internal_queries import InternalQueries
from postgresql.scada_info import ScadaRepository as PostgreScadaRepository from app.infra.db.postgresql.scada_info import ScadaRepository as PostgreScadaRepository
from timescaledb.schemas.realtime import RealtimeRepository from app.infra.db.timescaledb.schemas.realtime import RealtimeRepository
from timescaledb.schemas.scheme import SchemeRepository from app.infra.db.timescaledb.schemas.scheme import SchemeRepository
from timescaledb.schemas.scada import ScadaRepository from app.infra.db.timescaledb.schemas.scada import ScadaRepository
class CompositeQueries: class CompositeQueries:

View File

@@ -1,13 +1,13 @@
from typing import List from typing import List
from fastapi.logger import logger from fastapi.logger import logger
from timescaledb.schemas.scheme import SchemeRepository
from timescaledb.schemas.realtime import RealtimeRepository
import timescaledb.timescaledb_info as timescaledb_info
from datetime import datetime, timedelta from datetime import datetime, timedelta
from timescaledb.schemas.scada import ScadaRepository
import psycopg import psycopg
import time import time
from app.infra.db.timescaledb.schemas.scheme import SchemeRepository
from app.infra.db.timescaledb.schemas.realtime import RealtimeRepository
import app.infra.db.timescaledb.timescaledb_info as timescaledb_info
from app.infra.db.timescaledb.schemas.scada import ScadaRepository
class InternalStorage: class InternalStorage:

View File

@@ -8,7 +8,7 @@ from .schemas.realtime import RealtimeRepository
from .schemas.scheme import SchemeRepository from .schemas.scheme import SchemeRepository
from .schemas.scada import ScadaRepository from .schemas.scada import ScadaRepository
from .composite_queries import CompositeQueries from .composite_queries import CompositeQueries
from postgresql.database import get_database_instance as get_postgres_database_instance from app.infra.db.postgresql.database import get_database_instance as get_postgres_database_instance
router = APIRouter(prefix="/timescaledb", tags=["TimescaleDB"]) router = APIRouter(prefix="/timescaledb", tags=["TimescaleDB"])

View File

@@ -2,7 +2,7 @@ from typing import List, Any, Dict
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from collections import defaultdict from collections import defaultdict
from psycopg import AsyncConnection, Connection, sql from psycopg import AsyncConnection, Connection, sql
import globals import app.services.globals as globals
# 定义UTC+8时区 # 定义UTC+8时区
UTC_8 = timezone(timedelta(hours=8)) UTC_8 = timezone(timedelta(hours=8))

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,32 @@
from app.services.network_import import network_update, submit_scada_info
from app.services.scheme_management import (
create_user,
delete_user,
scheme_name_exists,
store_scheme_info,
delete_scheme_info,
query_scheme_list,
upload_shp_to_pg,
submit_risk_probability_result,
)
from app.services.simulation_ops import (
project_management,
scheduling_simulation,
daily_scheduling_simulation,
)
__all__ = [
"network_update",
"submit_scada_info",
"create_user",
"delete_user",
"scheme_name_exists",
"store_scheme_info",
"delete_scheme_info",
"query_scheme_list",
"upload_shp_to_pg",
"submit_risk_probability_result",
"project_management",
"scheduling_simulation",
"daily_scheduling_simulation",
]

View File

@@ -0,0 +1,197 @@
import csv
import os
import chardet
import psycopg
from psycopg import sql
import app.services.project_info as project_info
from app.native.api.postgresql_info import get_pgconn_string
from app.services.tjnetwork import read_inp
############################################################
# network_update 10
############################################################
def network_update(file_path: str) -> None:
"""
更新pg数据库中的inp文件
:param file_path: inp文件
:return:
"""
read_inp("szh", file_path)
csv_path = "./history_pattern_flow.csv"
# # 检查文件是否存在
# if os.path.exists(csv_path):
# print(f"history_patterns_flows文件存在开始处理...")
#
# # 读取 CSV 文件
# df = pd.read_csv(csv_path)
#
# # 连接到 PostgreSQL 数据库(这里是数据库 "bb"
# with psycopg.connect("dbname=bb host=127.0.0.1") as conn:
# with conn.cursor() as cur:
# for index, row in df.iterrows():
# # 直接将数据插入,不进行唯一性检查
# insert_sql = sql.SQL("""
# INSERT INTO history_patterns_flows (id, factor, flow)
# VALUES (%s, %s, %s);
# """)
# # 将数据插入数据库
# cur.execute(insert_sql, (row['id'], row['factor'], row['flow']))
# conn.commit()
# print("数据成功导入到 'history_patterns_flows' 表格。")
# else:
# print(f"history_patterns_flows文件不存在。")
# 检查文件是否存在
if os.path.exists(csv_path):
print(f"history_patterns_flows文件存在开始处理...")
# 连接到 PostgreSQL 数据库(这里是数据库 "bb"
with psycopg.connect(f"dbname={project_info.name} host=127.0.0.1") as conn:
with conn.cursor() as cur:
with open(csv_path, newline="", encoding="utf-8-sig") as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
# 直接将数据插入,不进行唯一性检查
insert_sql = sql.SQL(
"""
INSERT INTO history_patterns_flows (id, factor, flow)
VALUES (%s, %s, %s);
"""
)
# 将数据插入数据库
cur.execute(insert_sql, (row["id"], row["factor"], row["flow"]))
conn.commit()
print("数据成功导入到 'history_patterns_flows' 表格。")
else:
print(f"history_patterns_flows文件不存在。")
def submit_scada_info(name: str, coord_id: str) -> None:
"""
将scada信息表导入pg数据库
:param name: 项目名称(数据库名称)
:param coord_id: 坐标系的id如4326根据原始坐标信息输入
:return:
"""
scada_info_path = "./scada_info.csv"
# 检查文件是否存在
if os.path.exists(scada_info_path):
print(f"scada_info文件存在开始处理...")
# 自动检测文件编码
with open(scada_info_path, "rb") as file:
raw_data = file.read()
detected = chardet.detect(raw_data)
file_encoding = detected["encoding"]
print(f"检测到的文件编码:{file_encoding}")
try:
# 动态替换数据库名称
conn_string = get_pgconn_string(db_name=name)
# 连接到 PostgreSQL 数据库(这里是数据库 "bb"
with psycopg.connect(conn_string) as conn:
with conn.cursor() as cur:
# 检查 scada_info 表是否为空
cur.execute("SELECT COUNT(*) FROM scada_info;")
count = cur.fetchone()[0]
if count > 0:
print("scada_info表中已有数据正在清空记录...")
cur.execute("DELETE FROM scada_info;")
print("表记录已清空。")
with open(
scada_info_path, newline="", encoding=file_encoding
) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
# 将CSV单元格值为空的字段转换为 None
cleaned_row = {
key: (value if value.strip() else None)
for key, value in row.items()
}
# 处理 associated_source_outflow_id 列动态变化
associated_columns = [
f"associated_source_outflow_id{i}" for i in range(1, 21)
]
associated_values = [
(
cleaned_row.get(col).strip()
if cleaned_row.get(col)
and cleaned_row.get(col).strip()
else None
)
for col in associated_columns
]
# 将 X_coor 和 Y_coor 转换为 geometry 类型
x_coor = (
float(cleaned_row["X_coor"])
if cleaned_row["X_coor"]
else None
)
y_coor = (
float(cleaned_row["Y_coor"])
if cleaned_row["Y_coor"]
else None
)
coord = (
f"SRID={coord_id};POINT({x_coor} {y_coor})"
if x_coor and y_coor
else None
)
# 准备插入 SQL 语句
insert_sql = sql.SQL(
"""
INSERT INTO scada_info (
id, type, associated_element_id, associated_pattern,
associated_pipe_flow_id, {associated_columns},
API_query_id, transmission_mode, transmission_frequency,
reliability, X_coor, Y_coor, coord
)
VALUES (
%s, %s, %s, %s, %s, {associated_placeholders},
%s, %s, %s, %s, %s, %s, %s
);
"""
).format(
associated_columns=sql.SQL(", ").join(
sql.Identifier(col) for col in associated_columns
),
associated_placeholders=sql.SQL(", ").join(
sql.Placeholder() for _ in associated_columns
),
)
# 将数据插入数据库
cur.execute(
insert_sql,
(
cleaned_row["id"],
cleaned_row["type"],
cleaned_row["associated_element_id"],
cleaned_row.get("associated_pattern"),
cleaned_row.get("associated_pipe_flow_id"),
*associated_values,
cleaned_row.get("API_query_id"),
cleaned_row["transmission_mode"],
cleaned_row["transmission_frequency"],
cleaned_row["reliability"],
x_coor,
y_coor,
coord,
),
)
conn.commit()
print("数据成功导入到 'scada_info' 表格。")
except Exception as e:
print(f"导入时出错:{e}")
else:
print(f"scada_info文件不存在。")

View File

@@ -1 +1,22 @@
name='szh' import os
import yaml
# 获取当前项目根目录的路径
_current_file = os.path.abspath(__file__)
project_root = os.path.dirname(os.path.dirname(os.path.dirname(_current_file)))
# 尝试读取 .yml 或 .yaml 文件
config_file = os.path.join(project_root, "configs", "project_info.yml")
if not os.path.exists(config_file):
config_file = os.path.join(project_root, "configs", "project_info.yaml")
if not os.path.exists(config_file):
raise FileNotFoundError(f"未找到项目配置文件 (project_info.yaml 或 .yml): {os.path.dirname(config_file)}")
with open(config_file, 'r', encoding='utf-8') as f:
_config = yaml.safe_load(f)
if not _config or 'name' not in _config:
raise KeyError(f"项目配置文件中缺少 'name' 配置: {config_file}")
name = _config['name']

View File

@@ -0,0 +1,266 @@
import ast
import json
import geopandas as gpd
import pandas as pd
import psycopg
from sqlalchemy import create_engine
from app.native.api.postgresql_info import get_pgconn_string
# 2025/03/23
def create_user(name: str, username: str, password: str):
"""
创建用户
:param name: 数据库名称
:param username: 用户名
:param password: 密码
:return:
"""
try:
# 动态替换数据库名称
conn_string = get_pgconn_string(db_name=name)
# 连接到 PostgreSQL 数据库(这里是数据库 "bb"
with psycopg.connect(conn_string) as conn:
with conn.cursor() as cur:
cur.execute(
"INSERT INTO users (username, password) VALUES (%s, %s)",
(username, password),
)
# 提交事务
conn.commit()
print("新用户创建成功!")
except Exception as e:
print(f"创建用户出错:{e}")
# 2025/03/23
def delete_user(name: str, username: str):
"""
删除用户
:param name: 数据库名称
:param username: 用户名
:return:
"""
try:
# 动态替换数据库名称
conn_string = get_pgconn_string(db_name=name)
# 连接到 PostgreSQL 数据库(这里是数据库 "bb"
with psycopg.connect(conn_string) as conn:
with conn.cursor() as cur:
cur.execute("DELETE FROM users WHERE username = %s", (username,))
conn.commit()
print(f"用户 {username} 删除成功!")
except Exception as e:
print(f"删除用户出错:{e}")
# 2025/03/23
def scheme_name_exists(name: str, scheme_name: str) -> bool:
"""
判断传入的 scheme_name 是否已存在于 scheme_list 表中,用于输入框判断
:param name: 数据库名称
:param scheme_name: 需要判断的方案名称
:return: 如果存在返回 True否则返回 False
"""
try:
conn_string = get_pgconn_string(db_name=name)
with psycopg.connect(conn_string) as conn:
with conn.cursor() as cur:
cur.execute(
"SELECT COUNT(*) FROM scheme_list WHERE scheme_name = %s",
(scheme_name,),
)
result = cur.fetchone()
if result is not None and result[0] > 0:
return True
else:
return False
except Exception as e:
print(f"查询 scheme_name 时出错:{e}")
return False
# 2025/03/23
def store_scheme_info(
name: str,
scheme_name: str,
scheme_type: str,
username: str,
scheme_start_time: str,
scheme_detail: dict,
):
"""
将一条方案记录插入 scheme_list 表中
:param name: 数据库名称
:param scheme_name: 方案名称
:param scheme_type: 方案类型
:param username: 用户名(需在 users 表中已存在)
:param scheme_start_time: 方案起始时间(字符串)
:param scheme_detail: 方案详情(字典,会转换为 JSON
:return:
"""
try:
conn_string = get_pgconn_string(db_name=name)
with psycopg.connect(conn_string) as conn:
with conn.cursor() as cur:
sql = """
INSERT INTO scheme_list (scheme_name, scheme_type, username, scheme_start_time, scheme_detail)
VALUES (%s, %s, %s, %s, %s)
"""
# 将字典转换为 JSON 字符串
scheme_detail_json = json.dumps(scheme_detail)
cur.execute(
sql,
(
scheme_name,
scheme_type,
username,
scheme_start_time,
scheme_detail_json,
),
)
conn.commit()
print("方案信息存储成功!")
except Exception as e:
print(f"存储方案信息时出错:{e}")
# 2025/03/23
def delete_scheme_info(name: str, scheme_name: str) -> None:
"""
从 scheme_list 表中删除指定的方案
:param name: 数据库名称
:param scheme_name: 要删除的方案名称
"""
try:
conn_string = get_pgconn_string(db_name=name)
with psycopg.connect(conn_string) as conn:
with conn.cursor() as cur:
# 使用参数化查询删除方案记录
cur.execute(
"DELETE FROM scheme_list WHERE scheme_name = %s", (scheme_name,)
)
conn.commit()
print(f"方案 {scheme_name} 删除成功!")
except Exception as e:
print(f"删除方案时出错:{e}")
# 2025/03/23
def query_scheme_list(name: str) -> list:
"""
查询pg数据库中的scheme_list按照 create_time 降序排列,离现在时间最近的记录排在最前面
:param name: 项目名称(数据库名称)
:return: 返回查询结果的所有行
"""
try:
# 动态替换数据库名称
conn_string = get_pgconn_string(db_name=name)
# 连接到 PostgreSQL 数据库(这里是数据库 "bb"
with psycopg.connect(conn_string) as conn:
with conn.cursor() as cur:
# 按 create_time 降序排列
cur.execute("SELECT * FROM scheme_list ORDER BY create_time DESC")
rows = cur.fetchall()
return rows
except Exception as e:
print(f"查询错误:{e}")
# 2025/03/23
def upload_shp_to_pg(name: str, table_name: str, role: str, shp_file_path: str):
"""
将 Shapefile 文件上传到 PostgreSQL 数据库
:param name: 项目名称(数据库名称)
:param table_name: 创建表的名字
:param role: 数据库角色名位于c盘user中查看
:param shp_file_path: shp文件的路径
:return:
"""
try:
# 动态连接到指定的数据库
conn_string = get_pgconn_string(db_name=name)
with psycopg.connect(conn_string) as conn:
# 读取 Shapefile 文件
gdf = gpd.read_file(shp_file_path)
# 检查投影坐标系CRS并确保是 EPSG:4326
if gdf.crs.to_string() != "EPSG:4490":
gdf = gdf.to_crs(epsg=4490)
# 使用 GeoDataFrame 的 .to_postgis 方法将数据写入 PostgreSQL
# 需要在数据库中提前安装 PostGIS 扩展
engine = create_engine(f"postgresql+psycopg2://{role}:@127.0.0.1/{name}")
gdf.to_postgis(
table_name, engine, if_exists="replace", index=True, index_label="id"
)
print(
f"Shapefile 文件成功上传到 PostgreSQL 数据库 '{name}' 的表 '{table_name}'."
)
except Exception as e:
print(f"上传 Shapefile 到 PostgreSQL 时出错:{e}")
def submit_risk_probability_result(name: str, result_file_path: str) -> None:
"""
将管网风险评估结果导入pg数据库
:param name: 项目名称(数据库名称)
:param result_file_path: 结果文件路径
:return:
"""
# 自动检测文件编码
# with open({result_file_path}, 'rb') as file:
# raw_data = file.read()
# detected = chardet.detect(raw_data)
# file_encoding = detected['encoding']
# print(f"检测到的文件编码:{file_encoding}")
try:
# 动态替换数据库名称
conn_string = get_pgconn_string(db_name=name)
# 连接到 PostgreSQL 数据库
with psycopg.connect(conn_string) as conn:
with conn.cursor() as cur:
# 检查 scada_info 表是否为空
cur.execute("SELECT COUNT(*) FROM pipe_risk_probability;")
count = cur.fetchone()[0]
if count > 0:
print("pipe_risk_probability表中已有数据正在清空记录...")
cur.execute("DELETE FROM pipe_risk_probability;")
print("表记录已清空。")
# 读取Excel并转换x/y列为列表
df = pd.read_excel(result_file_path, sheet_name="Sheet1")
df["x"] = df["x"].apply(ast.literal_eval)
df["y"] = df["y"].apply(ast.literal_eval)
# 批量插入数据
for index, row in df.iterrows():
insert_query = """
INSERT INTO pipe_risk_probability
(pipeID, pipeage, risk_probability_now, x, y)
VALUES (%s, %s, %s, %s, %s)
"""
cur.execute(
insert_query,
(
row["pipeID"],
row["pipeage"],
row["risk_probability_now"],
row["x"], # 直接传递列表
row["y"], # 同上
),
)
conn.commit()
print("风险评估结果导入成功")
except Exception as e:
print(f"导入时出错:{e}")

View File

@@ -1,5 +1,5 @@
import numpy as np import numpy as np
from tjnetwork import * from app.services.tjnetwork import *
from app.native.api.s36_wda_cal import * from app.native.api.s36_wda_cal import *
# from get_real_status import * # from get_real_status import *
@@ -11,7 +11,7 @@ import pytz
import requests import requests
import time import time
import shutil import shutil
from epanet.epanet import Output from app.services.epanet.epanet import Output
from typing import Optional, Tuple from typing import Optional, Tuple
import app.infra.db.influxdb.api as influxdb_api import app.infra.db.influxdb.api as influxdb_api
import typing import typing
@@ -21,8 +21,8 @@ import app.services.globals as globals
import uuid import uuid
import app.services.project_info as project_info import app.services.project_info as project_info
from app.native.api.postgresql_info import get_pgconn_string from app.native.api.postgresql_info import get_pgconn_string
from timescaledb.internal_queries import InternalQueries as TimescaleInternalQueries from app.infra.db.timescaledb.internal_queries import InternalQueries as TimescaleInternalQueries
from timescaledb.internal_queries import InternalStorage as TimescaleInternalStorage from app.infra.db.timescaledb.internal_queries import InternalStorage as TimescaleInternalStorage
logging.basicConfig( logging.basicConfig(
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"

View File

@@ -0,0 +1,233 @@
import json
from datetime import datetime
from math import pi
import pytz
from app.algorithms.api_ex.run_simulation import run_simulation_ex
from app.native.api.project import copy_project
from app.services.epanet.epanet import Output
from app.services.tjnetwork import *
############################################################
# project management 07 ***暂时不使用,与业务需求无关***
############################################################
def project_management(
prj_name,
start_datetime,
pump_control,
tank_initial_level_control=None,
region_demand_control=None,
) -> str:
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Analysis."
)
new_name = f"project_management_{prj_name}"
if have_project(new_name):
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
# if is_project_open(prj_name):
# close_project(prj_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Copying Database."
)
# CopyProjectEx()(prj_name, new_name,
# ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table'])
copy_project(prj_name + "_template", new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Opening Database."
)
open_project(new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Database Loading OK."
)
result = run_simulation_ex(
name=new_name,
simulation_type="realtime",
start_datetime=start_datetime,
duration=86400,
pump_control=pump_control,
tank_initial_level_control=tank_initial_level_control,
region_demand_control=region_demand_control,
downloading_prohibition=True,
)
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
return result
############################################################
# scheduling analysis 08 ***暂时不使用,与业务需求无关***
############################################################
def scheduling_simulation(
prj_name, start_time, pump_control, tank_id, water_plant_output_id, time_delta=300
) -> str:
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Analysis."
)
new_name = f"scheduling_{prj_name}"
if have_project(new_name):
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
# if is_project_open(prj_name):
# close_project(prj_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Copying Database."
)
# CopyProjectEx()(prj_name, new_name,
# ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table'])
copy_project(prj_name + "_template", new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Opening Database."
)
open_project(new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Database Loading OK."
)
run_simulation_ex(
new_name, "realtime", start_time, duration=0, pump_control=pump_control
)
if not is_project_open(new_name):
open_project(new_name)
tank = get_tank(new_name, tank_id) # 水塔信息
tank_floor_space = pi * pow(tank["diameter"] / 2, 2) # 水塔底面积(m^2)
tank_init_level = tank["init_level"] # 水塔初始水位(m)
tank_pipes_id = tank["links"] # pipes list
tank_pipe_flow_direction = (
{}
) # 管道流向修正系数, 水塔为下游节点时为1, 水塔为上游节点时为-1
for pipe_id in tank_pipes_id:
if get_pipe(new_name, pipe_id)["node2"] == tank_id: # 水塔为下游节点
tank_pipe_flow_direction[pipe_id] = 1
else:
tank_pipe_flow_direction[pipe_id] = -1
output = Output("./temp/{}.db.out".format(new_name))
node_results = (
output.node_results()
) # [{'node': str, 'result': [{'pressure': float}]}]
water_plant_output_pressure = 0
for node_result in node_results:
if node_result["node"] == water_plant_output_id: # 水厂出水压力(m)
water_plant_output_pressure = node_result["result"][-1]["pressure"]
water_plant_output_pressure /= 100 # 预计水厂出水压力(Mpa)
pipe_results = output.link_results() # [{'link': str, 'result': [{'flow': float}]}]
tank_inflow = 0
for pipe_result in pipe_results:
for pipe_id in tank_pipes_id: # 遍历与水塔相连的管道
if pipe_result["link"] == pipe_id: # 水塔入流流量(L/s)
tank_inflow += (
pipe_result["result"][-1]["flow"]
* tank_pipe_flow_direction[pipe_id]
)
tank_inflow /= 1000 # 水塔入流流量(m^3/s)
tank_level_delta = tank_inflow * time_delta / tank_floor_space # 水塔水位改变值(m)
tank_level = tank_init_level + tank_level_delta # 预计水塔水位(m)
simulation_results = {
"water_plant_output_pressure": water_plant_output_pressure,
"tank_init_level": tank_init_level,
"tank_level": tank_level,
}
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
return json.dumps(simulation_results)
def daily_scheduling_simulation(
prj_name, start_time, pump_control, reservoir_id, tank_id, water_plant_output_id
) -> str:
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Analysis."
)
new_name = f"daily_scheduling_{prj_name}"
if have_project(new_name):
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
# if is_project_open(prj_name):
# close_project(prj_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Copying Database."
)
# CopyProjectEx()(prj_name, new_name,
# ['operation', 'current_operation', 'restore_operation', 'batch_operation', 'operation_table'])
copy_project(prj_name + "_template", new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Start Opening Database."
)
open_project(new_name)
print(
datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")
+ " -- Database Loading OK."
)
run_simulation_ex(
new_name, "realtime", start_time, duration=86400, pump_control=pump_control
)
if not is_project_open(new_name):
open_project(new_name)
output = Output("./temp/{}.db.out".format(new_name))
node_results = (
output.node_results()
) # [{'node': str, 'result': [{'pressure': float, 'head': float}]}]
water_plant_output_pressure = []
reservoir_level = []
tank_level = []
for node_result in node_results:
if node_result["node"] == water_plant_output_id:
for result in node_result["result"]:
water_plant_output_pressure.append(
result["pressure"] / 100
) # 水厂出水压力(Mpa)
elif node_result["node"] == reservoir_id:
for result in node_result["result"]:
reservoir_level.append(result["head"] - 250.35) # 清水池液位(m)
elif node_result["node"] == tank_id:
for result in node_result["result"]:
tank_level.append(result["pressure"]) # 调节池液位(m)
simulation_results = {
"water_plant_output_pressure": water_plant_output_pressure,
"reservoir_level": reservoir_level,
"tank_level": tank_level,
}
if is_project_open(new_name):
close_project(new_name)
delete_project(new_name)
return json.dumps(simulation_results)

Some files were not shown because too many files have changed in this diff Show More