diff --git a/influxdb_api.py b/influxdb_api.py index c45f7e6..bcd49df 100644 --- a/influxdb_api.py +++ b/influxdb_api.py @@ -1,4 +1,13 @@ -from influxdb_client import InfluxDBClient, BucketsApi, WriteApi, OrganizationsApi, Point, QueryApi, WriteOptions, DeleteApi +from influxdb_client import ( + InfluxDBClient, + BucketsApi, + WriteApi, + OrganizationsApi, + Point, + QueryApi, + WriteOptions, + DeleteApi, +) from typing import List, Dict from datetime import datetime, timedelta, timezone from influxdb_client.client.write_api import SYNCHRONOUS, ASYNCHRONOUS @@ -24,7 +33,10 @@ import time_api url = influxdb_info.url token = influxdb_info.token org_name = influxdb_info.org -client = InfluxDBClient(url=url, token=token, org=org_name, timeout=600*1000) # 600 seconds +client = InfluxDBClient( + url=url, token=token, org=org_name, timeout=600 * 1000 +) # 600 seconds + def query_pg_scada_info_realtime(name: str) -> None: """ @@ -38,11 +50,13 @@ def query_pg_scada_info_realtime(name: str) -> None: with psycopg.connect(conn_string) as conn: with conn.cursor() as cur: # 查询 transmission_mode 为 'realtime' 的记录 - cur.execute(""" + cur.execute( + """ SELECT type, api_query_id FROM scada_info WHERE transmission_mode = 'realtime'; - """) + """ + ) records = cur.fetchall() # 清空全局列表 globals.reservoir_liquid_level_realtime_ids.clear() @@ -59,7 +73,9 @@ def query_pg_scada_info_realtime(name: str) -> None: record_type, api_query_id = record if api_query_id is not None: # 确保 api_query_id 不为空 if record_type == "reservoir_liquid_level": - globals.reservoir_liquid_level_realtime_ids.append(api_query_id) + globals.reservoir_liquid_level_realtime_ids.append( + api_query_id + ) elif record_type == "tank_liquid_level": globals.tank_liquid_level_realtime_ids.append(api_query_id) elif record_type == "fixed_pump": @@ -102,20 +118,22 @@ def query_pg_scada_info_non_realtime(name: str) -> None: close_project(name) open_project(name) dic_time = get_time(name) - globals.hydraulic_timestep = dic_time['HYDRAULIC TIMESTEP'] + globals.hydraulic_timestep = dic_time["HYDRAULIC TIMESTEP"] # DingZQ, 2025-03-21 - #close_project(name) + # close_project(name) # 连接数据库 conn_string = f"dbname={name} host=127.0.0.1" try: with psycopg.connect(conn_string) as conn: with conn.cursor() as cur: # 查询 transmission_mode 为 'non_realtime' 的记录 - cur.execute(""" + cur.execute( + """ SELECT type, api_query_id, transmission_frequency FROM scada_info WHERE transmission_mode = 'non_realtime'; - """) + """ + ) records = cur.fetchall() # 清空全局列表 globals.reservoir_liquid_level_non_realtime_ids.clear() @@ -133,7 +151,9 @@ def query_pg_scada_info_non_realtime(name: str) -> None: record_type, api_query_id, freq = record if api_query_id is not None: # 确保 api_query_id 不为空 if record_type == "reservoir_liquid_level": - globals.reservoir_liquid_level_non_realtime_ids.append(api_query_id) + globals.reservoir_liquid_level_non_realtime_ids.append( + api_query_id + ) elif record_type == "fixed_pump": globals.fixed_pump_non_realtime_ids.append(api_query_id) elif record_type == "variable_pump": @@ -152,7 +172,9 @@ def query_pg_scada_info_non_realtime(name: str) -> None: if freq is not None: transmission_frequencies.append(freq) # 计算 transmission_frequency 最大值 - globals.transmission_frequency = max(transmission_frequencies) if transmission_frequencies else None + globals.transmission_frequency = ( + max(transmission_frequencies) if transmission_frequencies else None + ) # 打印结果,方便调试 # print("Query completed. Results:") # print("Reservoir Liquid Level Non-Realtime IDs:", globals.reservoir_liquid_level_non_realtime_ids) @@ -172,28 +194,26 @@ def query_pg_scada_info_non_realtime(name: str) -> None: # 2025/03/23 def get_new_client() -> InfluxDBClient: """每次调用返回一个新的 InfluxDBClient 实例。""" - return InfluxDBClient(url=url, - token=token, - org=org_name, - enable_gzip=True, - timeout=600*1000) # 600 seconds + return InfluxDBClient( + url=url, token=token, org=org_name, enable_gzip=True, timeout=600 * 1000 + ) # 600 seconds -# 2025/04/11, DingZQ + +# 2025/04/11, DingZQ def create_write_options() -> WriteOptions: - ''' + """ 创建一个写入选项 - ''' + """ return WriteOptions( - jitter_interval=200, # 添加抖动以避免同时写入 - max_retry_delay=30000, # 最大重试延迟(毫秒) - max_retries=5, # 最大重试次数(0 表示不重试) - batch_size=10_000, # 每批次发送10,000个点 - flush_interval=10_000, # 10秒强制刷新 - retry_interval=5_000 # 失败重试间隔5秒 + jitter_interval=200, # 添加抖动以避免同时写入 + max_retry_delay=30000, # 最大重试延迟(毫秒) + max_retries=5, # 最大重试次数(0 表示不重试) + batch_size=10_000, # 每批次发送10,000个点 + flush_interval=10_000, # 10秒强制刷新 + retry_interval=5_000, # 失败重试间隔5秒 ) - # 2025/02/01 def delete_buckets(org_name: str) -> None: """ @@ -203,11 +223,15 @@ def delete_buckets(org_name: str) -> None: """ client = get_new_client() # 定义需要删除的 bucket 名称列表 - buckets_to_delete = ['SCADA_data', 'realtime_simulation_result', 'scheme_simulation_result'] + buckets_to_delete = [ + "SCADA_data", + "realtime_simulation_result", + "scheme_simulation_result", + ] buckets_api = client.buckets_api() buckets_obj = buckets_api.find_buckets(org=org_name) # 确保 buckets_obj 拥有 buckets 属性 - if hasattr(buckets_obj, 'buckets'): + if hasattr(buckets_obj, "buckets"): for bucket in buckets_obj.buckets: if bucket.name in buckets_to_delete: # 只删除特定名称的 bucket try: @@ -231,7 +255,11 @@ def create_and_initialize_buckets(org_name: str) -> None: """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) # 先删除原有的,然后再进行初始化 # delete_buckets(org_name) @@ -253,7 +281,7 @@ def create_and_initialize_buckets(org_name: str) -> None: write_api = client.write_api( write_options=WriteOptions(batch_size=1000, flush_interval=1000), success_callback=success_callback, - error_callback=error_callback + error_callback=error_callback, ) org_api = OrganizationsApi(client) # 获取 org_id @@ -266,7 +294,7 @@ def create_and_initialize_buckets(org_name: str) -> None: buckets = [ {"name": "SCADA_data", "retention_rules": []}, {"name": "realtime_simulation_result", "retention_rules": []}, - {"name": "scheme_simulation_result", "retention_rules": []} + {"name": "scheme_simulation_result", "retention_rules": []}, ] # 创建一个临时存储点数据的列表 points_to_write = [] @@ -276,89 +304,103 @@ def create_and_initialize_buckets(org_name: str) -> None: created_bucket = bucket_api.create_bucket( bucket_name=bucket["name"], retention_rules=bucket["retention_rules"], - org_id=org_id + org_id=org_id, ) print(f"Bucket '{bucket['name']}' created with ID: {created_bucket.id}") # 根据 Bucket 初始化数据 if bucket["name"] == "SCADA_data": - point = Point("SCADA") \ - .tag("date", None) \ - .tag("description", None) \ - .tag("device_ID", None) \ - .field("monitored_value", 0.0) \ - .field("datacleaning_value", 0.0) \ - .field("simulation_value", 0.0) \ - .time("2024-11-21T00:00:00Z", write_precision='s') + point = ( + Point("SCADA") + .tag("date", None) + .tag("description", None) + .tag("device_ID", None) + .field("monitored_value", 0.0) + .field("datacleaning_value", 0.0) + .field("simulation_value", 0.0) + .time("2024-11-21T00:00:00Z", write_precision="s") + ) points_to_write.append(point) # write_api.write(bucket="SCADA_data", org=org_name, record=point) # print("Initialized SCADA_data with default structure.") - elif bucket["name"] == "realtime_simulation_result": # realtime_simulation_result - link_point = Point("link") \ - .tag("date", None) \ - .tag("ID", None) \ - .field("flow", 0.0) \ - .field("leakage", 0.0) \ - .field("velocity", 0.0) \ - .field("headloss", 0.0) \ - .field("status", None) \ - .field("setting", 0.0) \ - .field("quality", 0.0) \ - .field("reaction", 0.0) \ - .field("friction", 0.0) \ - .time("2024-11-21T00:00:00Z", write_precision='s') + elif ( + bucket["name"] == "realtime_simulation_result" + ): # realtime_simulation_result + link_point = ( + Point("link") + .tag("date", None) + .tag("ID", None) + .field("flow", 0.0) + .field("leakage", 0.0) + .field("velocity", 0.0) + .field("headloss", 0.0) + .field("status", None) + .field("setting", 0.0) + .field("quality", 0.0) + .field("reaction", 0.0) + .field("friction", 0.0) + .time("2024-11-21T00:00:00Z", write_precision="s") + ) points_to_write.append(link_point) - node_point = Point("node") \ - .tag("date", None) \ - .tag("ID", None) \ - .field("head", 0.0) \ - .field("pressure", 0.0) \ - .field("actualdemand", 0.0) \ - .field("demanddeficit", 0.0) \ - .field("totalExternalOutflow", 0.0) \ - .field("quality", 0.0) \ - .time("2024-11-21T00:00:00Z", write_precision='s') + node_point = ( + Point("node") + .tag("date", None) + .tag("ID", None) + .field("head", 0.0) + .field("pressure", 0.0) + .field("actualdemand", 0.0) + .field("demanddeficit", 0.0) + .field("totalExternalOutflow", 0.0) + .field("quality", 0.0) + .time("2024-11-21T00:00:00Z", write_precision="s") + ) points_to_write.append(node_point) # write_api.write(bucket="realtime_simulation_result", org=org_name, record=link_point) # write_api.write(bucket="realtime_simulation_result", org=org_name, record=node_point) # print("Initialized realtime_simulation_result with default structure.") elif bucket["name"] == "scheme_simulation_result": - link_point = Point("link") \ - .tag("date", None) \ - .tag("ID", None) \ - .tag("scheme_Type", None) \ - .tag("scheme_Name", None) \ - .field("flow", 0.0) \ - .field("leakage", 0.0) \ - .field("velocity", 0.0) \ - .field("headloss", 0.0) \ - .field("status", None) \ - .field("setting", 0.0) \ - .field("quality", 0.0) \ - .time("2024-11-21T00:00:00Z", write_precision='s') + link_point = ( + Point("link") + .tag("date", None) + .tag("ID", None) + .tag("scheme_Type", None) + .tag("scheme_Name", None) + .field("flow", 0.0) + .field("leakage", 0.0) + .field("velocity", 0.0) + .field("headloss", 0.0) + .field("status", None) + .field("setting", 0.0) + .field("quality", 0.0) + .time("2024-11-21T00:00:00Z", write_precision="s") + ) points_to_write.append(link_point) - node_point = Point("node") \ - .tag("date", None) \ - .tag("ID", None) \ - .tag("scheme_Type", None) \ - .tag("scheme_Name", None) \ - .field("head", 0.0) \ - .field("pressure", 0.0) \ - .field("actualdemand", 0.0) \ - .field("demanddeficit", 0.0) \ - .field("totalExternalOutflow", 0.0) \ - .field("quality", 0.0) \ - .time("2024-11-21T00:00:00Z", write_precision='s') + node_point = ( + Point("node") + .tag("date", None) + .tag("ID", None) + .tag("scheme_Type", None) + .tag("scheme_Name", None) + .field("head", 0.0) + .field("pressure", 0.0) + .field("actualdemand", 0.0) + .field("demanddeficit", 0.0) + .field("totalExternalOutflow", 0.0) + .field("quality", 0.0) + .time("2024-11-21T00:00:00Z", write_precision="s") + ) points_to_write.append(node_point) - SCADA_point = Point("SCADA") \ - .tag("date", None) \ - .tag("description", None) \ - .tag("device_ID", None) \ - .tag("scheme_Type", None) \ - .tag("scheme_Name", None) \ - .field("monitored_value", 0.0) \ - .field("datacleaning_value", 0.0) \ - .field("scheme_simulation_value", 0.0) \ - .time("2024-11-21T00:00:00Z", write_precision='s') + SCADA_point = ( + Point("SCADA") + .tag("date", None) + .tag("description", None) + .tag("device_ID", None) + .tag("scheme_Type", None) + .tag("scheme_Name", None) + .field("monitored_value", 0.0) + .field("datacleaning_value", 0.0) + .field("scheme_simulation_value", 0.0) + .time("2024-11-21T00:00:00Z", write_precision="s") + ) points_to_write.append(SCADA_point) # write_api.write(bucket="scheme_simulation_result", org=org_name, record=link_point) # write_api.write(bucket="scheme_simulation_result", org=org_name, record=node_point) @@ -375,7 +417,9 @@ def create_and_initialize_buckets(org_name: str) -> None: client.close() -def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str = "SCADA_data") -> None: +def store_realtime_SCADA_data_to_influxdb( + get_real_value_time: str, bucket: str = "SCADA_data" +) -> None: """ 将SCADA数据通过数据接口导入数据库 :param get_real_value_time: 获取数据的时间,格式如'2024-11-25T09:00:00+08:00' @@ -384,7 +428,11 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) # 本地变量,用于记录成功写入的数据点数量 points_written = 0 @@ -404,7 +452,7 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str write_api = client.write_api( write_options=create_write_options(), success_callback=success_callback, - error_callback=error_callback + error_callback=error_callback, ) # 创建一个临时存储点数据的列表 @@ -413,11 +461,11 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str try_count = 0 reservoir_liquid_level_realtime_data_list = [] tank_liquid_level_realtime_data_list = [] - fixed_pump_realtime_data_list =[] - variable_pump_realtime_data_list =[] + fixed_pump_realtime_data_list = [] + variable_pump_realtime_data_list = [] source_outflow_realtime_data_list = [] pipe_flow_realtime_data_list = [] - pressure_realtime_data_list =[] + pressure_realtime_data_list = [] demand_realtime_data_list = [] quality_realtime_data_list = [] while try_count <= 5: # 尝试6次 ******* @@ -426,24 +474,41 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if globals.reservoir_liquid_level_realtime_ids: # print(globals.reservoir_liquid_level_realtime_ids) reservoir_liquid_level_realtime_data_list = get_realValue.get_realValue( - ids=','.join(globals.reservoir_liquid_level_realtime_ids)) + ids=",".join(globals.reservoir_liquid_level_realtime_ids) + ) # print(reservoir_liquid_level_realtime_data_list) if globals.tank_liquid_level_realtime_ids: - tank_liquid_level_realtime_data_list = get_realValue.get_realValue(ids=','.join(globals.tank_liquid_level_realtime_ids)) + tank_liquid_level_realtime_data_list = get_realValue.get_realValue( + ids=",".join(globals.tank_liquid_level_realtime_ids) + ) if globals.fixed_pump_realtime_ids: - fixed_pump_realtime_data_list = get_realValue.get_realValue(ids=','.join(globals.fixed_pump_realtime_ids)) + fixed_pump_realtime_data_list = get_realValue.get_realValue( + ids=",".join(globals.fixed_pump_realtime_ids) + ) if globals.variable_pump_realtime_ids: - variable_pump_realtime_data_list = get_realValue.get_realValue(ids=','.join(globals.variable_pump_realtime_ids)) + variable_pump_realtime_data_list = get_realValue.get_realValue( + ids=",".join(globals.variable_pump_realtime_ids) + ) if globals.source_outflow_realtime_ids: - source_outflow_realtime_data_list = get_realValue.get_realValue(ids=','.join(globals.source_outflow_realtime_ids)) + source_outflow_realtime_data_list = get_realValue.get_realValue( + ids=",".join(globals.source_outflow_realtime_ids) + ) if globals.pipe_flow_realtime_ids: - pipe_flow_realtime_data_list = get_realValue.get_realValue(ids=','.join(globals.pipe_flow_realtime_ids)) + pipe_flow_realtime_data_list = get_realValue.get_realValue( + ids=",".join(globals.pipe_flow_realtime_ids) + ) if globals.pressure_realtime_ids: - pressure_realtime_data_list = get_realValue.get_realValue(ids=','.join(globals.pressure_realtime_ids)) + pressure_realtime_data_list = get_realValue.get_realValue( + ids=",".join(globals.pressure_realtime_ids) + ) if globals.demand_realtime_ids: - demand_realtime_data_list = get_realValue.get_realValue(ids=','.join(globals.demand_realtime_ids)) + demand_realtime_data_list = get_realValue.get_realValue( + ids=",".join(globals.demand_realtime_ids) + ) if globals.quality_realtime_ids: - quality_realtime_data_list = get_realValue.get_realValue(ids=','.join(globals.quality_realtime_ids)) + quality_realtime_data_list = get_realValue.get_realValue( + ids=",".join(globals.quality_realtime_ids) + ) except Exception as e: print(e) time.sleep(10) @@ -453,8 +518,10 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if reservoir_liquid_level_realtime_data_list: for data in reservoir_liquid_level_realtime_data_list: # 将 data['time'] 和 get_realValue_time 转换为 datetime 对象 - data_time = datetime.fromisoformat(data['time']) - get_real_value_time_dt = datetime.fromisoformat(get_real_value_time).replace(tzinfo=None) + data_time = datetime.fromisoformat(data["time"]) + get_real_value_time_dt = datetime.fromisoformat( + get_real_value_time + ).replace(tzinfo=None) # 将获取的时间转换为 UTC 时间 get_real_value_time_utc = get_real_value_time_dt.astimezone(timezone.utc) # 计算时间差(绝对值) @@ -463,17 +530,20 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if time_difference > 60: # 超过1分钟 monitored_value = None else: # 小于等于3分钟 - monitored_value = float(data['monitored_value']) + monitored_value = float(data["monitored_value"]) # 创建Point对象 point = ( - Point('reservoir_liquid_level_realtime') - .tag("date", datetime.fromisoformat(get_real_value_time).strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) + Point("reservoir_liquid_level_realtime") + .tag( + "date", + datetime.fromisoformat(get_real_value_time).strftime("%Y-%m-%d"), + ) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) .field("monitored_value", monitored_value) .field("datacleaning_value", None) .field("simulation_value", None) - .time(get_real_value_time_utc, write_precision='s') + .time(get_real_value_time_utc, write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -481,8 +551,10 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if tank_liquid_level_realtime_data_list: for data in tank_liquid_level_realtime_data_list: # 将 data['time'] 和 get_realValue_time 转换为 datetime 对象 - data_time = datetime.fromisoformat(data['time']) - get_real_value_time_dt = datetime.fromisoformat(get_real_value_time).replace(tzinfo=None) + data_time = datetime.fromisoformat(data["time"]) + get_real_value_time_dt = datetime.fromisoformat( + get_real_value_time + ).replace(tzinfo=None) # 将获取的时间转换为 UTC 时间 get_real_value_time_utc = get_real_value_time_dt.astimezone(timezone.utc) # 计算时间差(绝对值) @@ -491,17 +563,20 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if time_difference > 60: # 超过1分钟 monitored_value = None else: # 小于等于3分钟 - monitored_value = float(data['monitored_value']) + monitored_value = float(data["monitored_value"]) # 创建Point对象 point = ( - Point('tank_liquid_level_realtime') - .tag("date", datetime.fromisoformat(get_real_value_time).strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) + Point("tank_liquid_level_realtime") + .tag( + "date", + datetime.fromisoformat(get_real_value_time).strftime("%Y-%m-%d"), + ) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) .field("monitored_value", monitored_value) .field("datacleaning_value", None) .field("simulation_value", None) - .time(get_real_value_time_utc, write_precision='s') + .time(get_real_value_time_utc, write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -509,8 +584,10 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if fixed_pump_realtime_data_list: for data in fixed_pump_realtime_data_list: # 将 data['time'] 和 get_realValue_time 转换为 datetime 对象 - data_time = datetime.fromisoformat(data['time']) - get_real_value_time_dt = datetime.fromisoformat(get_real_value_time).replace(tzinfo=None) + data_time = datetime.fromisoformat(data["time"]) + get_real_value_time_dt = datetime.fromisoformat( + get_real_value_time + ).replace(tzinfo=None) # 将获取的时间转换为 UTC 时间 get_real_value_time_utc = get_real_value_time_dt.astimezone(timezone.utc) # 计算时间差(绝对值) @@ -519,17 +596,20 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if time_difference > 60: # 超过1分钟 monitored_value = None else: # 小于等于3分钟 - monitored_value = float(data['monitored_value']) + monitored_value = float(data["monitored_value"]) # 创建Point对象 point = ( - Point('fixed_pump_realtime') - .tag("date", datetime.fromisoformat(get_real_value_time).strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) + Point("fixed_pump_realtime") + .tag( + "date", + datetime.fromisoformat(get_real_value_time).strftime("%Y-%m-%d"), + ) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) .field("monitored_value", monitored_value) .field("datacleaning_value", None) .field("simulation_value", None) - .time(get_real_value_time_utc, write_precision='s') + .time(get_real_value_time_utc, write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -537,27 +617,32 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if variable_pump_realtime_data_list: for data in variable_pump_realtime_data_list: # 将 data['time'] 和 get_realValue_time 转换为 datetime 对象 - data_time = datetime.fromisoformat(data['time']) - get_real_value_time_dt = datetime.fromisoformat(get_real_value_time).replace(tzinfo=None) + data_time = datetime.fromisoformat(data["time"]) + get_real_value_time_dt = datetime.fromisoformat( + get_real_value_time + ).replace(tzinfo=None) # 将获取的时间转换为 UTC 时间 get_real_value_time_utc = get_real_value_time_dt.astimezone(timezone.utc) # 计算时间差(绝对值) time_difference = abs((data_time - get_real_value_time_dt).total_seconds()) # 判断时间差是否超过1分钟 - if time_difference > 60: # 超过1分钟 + if time_difference > 60: # 超过1分钟 monitored_value = None else: # 小于等于3分钟 - monitored_value = float(data['monitored_value']) + monitored_value = float(data["monitored_value"]) # 创建Point对象 point = ( - Point('variable_pump_realtime') - .tag("date", datetime.fromisoformat(get_real_value_time).strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) + Point("variable_pump_realtime") + .tag( + "date", + datetime.fromisoformat(get_real_value_time).strftime("%Y-%m-%d"), + ) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) .field("monitored_value", monitored_value) .field("datacleaning_value", None) .field("simulation_value", None) - .time(get_real_value_time_utc, write_precision='s') + .time(get_real_value_time_utc, write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -565,8 +650,10 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if source_outflow_realtime_data_list: for data in source_outflow_realtime_data_list: # 将 data['time'] 和 get_realValue_time 转换为 datetime 对象 - data_time = datetime.fromisoformat(data['time']) - get_real_value_time_dt = datetime.fromisoformat(get_real_value_time).replace(tzinfo=None) + data_time = datetime.fromisoformat(data["time"]) + get_real_value_time_dt = datetime.fromisoformat( + get_real_value_time + ).replace(tzinfo=None) # 将获取的时间转换为 UTC 时间 get_real_value_time_utc = get_real_value_time_dt.astimezone(timezone.utc) # 计算时间差(绝对值) @@ -575,17 +662,20 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if time_difference > 60: # 超过1分钟 monitored_value = None else: # 小于等于3分钟 - monitored_value = float(data['monitored_value']) + monitored_value = float(data["monitored_value"]) # 创建Point对象 point = ( - Point('source_outflow_realtime') - .tag("date", datetime.fromisoformat(get_real_value_time).strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) + Point("source_outflow_realtime") + .tag( + "date", + datetime.fromisoformat(get_real_value_time).strftime("%Y-%m-%d"), + ) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) .field("monitored_value", monitored_value) .field("datacleaning_value", None) .field("simulation_value", None) - .time(get_real_value_time_utc, write_precision='s') + .time(get_real_value_time_utc, write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -593,8 +683,10 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if pipe_flow_realtime_data_list: for data in pipe_flow_realtime_data_list: # 将 data['time'] 和 get_realValue_time 转换为 datetime 对象 - data_time = datetime.fromisoformat(data['time']) - get_real_value_time_dt = datetime.fromisoformat(get_real_value_time).replace(tzinfo=None) + data_time = datetime.fromisoformat(data["time"]) + get_real_value_time_dt = datetime.fromisoformat( + get_real_value_time + ).replace(tzinfo=None) # 将获取的时间转换为 UTC 时间 get_real_value_time_utc = get_real_value_time_dt.astimezone(timezone.utc) # 计算时间差(绝对值) @@ -603,17 +695,20 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if time_difference > 60: # 超过1分钟 monitored_value = None else: # 小于等于3分钟 - monitored_value = float(data['monitored_value']) + monitored_value = float(data["monitored_value"]) # 创建Point对象 point = ( - Point('pipe_flow_realtime') - .tag("date", datetime.fromisoformat(get_real_value_time).strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) + Point("pipe_flow_realtime") + .tag( + "date", + datetime.fromisoformat(get_real_value_time).strftime("%Y-%m-%d"), + ) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) .field("monitored_value", monitored_value) .field("datacleaning_value", None) .field("simulation_value", None) - .time(get_real_value_time_utc, write_precision='s') + .time(get_real_value_time_utc, write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -621,8 +716,10 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if pressure_realtime_data_list: for data in pressure_realtime_data_list: # 将 data['time'] 和 get_realValue_time 转换为 datetime 对象 - data_time = datetime.fromisoformat(data['time']) - get_real_value_time_dt = datetime.fromisoformat(get_real_value_time).replace(tzinfo=None) + data_time = datetime.fromisoformat(data["time"]) + get_real_value_time_dt = datetime.fromisoformat( + get_real_value_time + ).replace(tzinfo=None) # 将获取的时间转换为 UTC 时间 get_real_value_time_utc = get_real_value_time_dt.astimezone(timezone.utc) # 计算时间差(绝对值) @@ -631,17 +728,20 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if time_difference > 60: # 超过1分钟 monitored_value = None else: # 小于等于3分钟 - monitored_value = float(data['monitored_value']) + monitored_value = float(data["monitored_value"]) # 创建Point对象 point = ( - Point('pressure_realtime') - .tag("date", datetime.fromisoformat(get_real_value_time).strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) + Point("pressure_realtime") + .tag( + "date", + datetime.fromisoformat(get_real_value_time).strftime("%Y-%m-%d"), + ) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) .field("monitored_value", monitored_value) .field("datacleaning_value", None) .field("simulation_value", None) - .time(get_real_value_time_utc, write_precision='s') + .time(get_real_value_time_utc, write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -649,8 +749,10 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if demand_realtime_data_list: for data in demand_realtime_data_list: # 将 data['time'] 和 get_realValue_time 转换为 datetime 对象 - data_time = datetime.fromisoformat(data['time']) - get_real_value_time_dt = datetime.fromisoformat(get_real_value_time).replace(tzinfo=None) + data_time = datetime.fromisoformat(data["time"]) + get_real_value_time_dt = datetime.fromisoformat( + get_real_value_time + ).replace(tzinfo=None) # 将获取的时间转换为 UTC 时间 get_real_value_time_utc = get_real_value_time_dt.astimezone(timezone.utc) # 计算时间差(绝对值) @@ -659,17 +761,20 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if time_difference > 60: # 超过1分钟 monitored_value = None else: # 小于等于3分钟 - monitored_value = float(data['monitored_value']) + monitored_value = float(data["monitored_value"]) # 创建Point对象 point = ( - Point('demand_realtime') - .tag("date", datetime.fromisoformat(get_real_value_time).strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) + Point("demand_realtime") + .tag( + "date", + datetime.fromisoformat(get_real_value_time).strftime("%Y-%m-%d"), + ) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) .field("monitored_value", monitored_value) .field("datacleaning_value", None) .field("simulation_value", None) - .time(get_real_value_time_utc, write_precision='s') + .time(get_real_value_time_utc, write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -677,8 +782,10 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if quality_realtime_data_list: for data in quality_realtime_data_list: # 将 data['time'] 和 get_realValue_time 转换为 datetime 对象 - data_time = datetime.fromisoformat(data['time']) - get_real_value_time_dt = datetime.fromisoformat(get_real_value_time).replace(tzinfo=None) + data_time = datetime.fromisoformat(data["time"]) + get_real_value_time_dt = datetime.fromisoformat( + get_real_value_time + ).replace(tzinfo=None) # 将获取的时间转换为 UTC 时间 get_real_value_time_utc = get_real_value_time_dt.astimezone(timezone.utc) # 计算时间差(绝对值) @@ -687,17 +794,20 @@ def store_realtime_SCADA_data_to_influxdb(get_real_value_time: str, bucket: str if time_difference > 60: # 超过1分钟 monitored_value = None else: # 小于等于3分钟 - monitored_value = float(data['monitored_value']) + monitored_value = float(data["monitored_value"]) # 创建Point对象 point = ( - Point('quality_realtime') - .tag("date", datetime.fromisoformat(get_real_value_time).strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) + Point("quality_realtime") + .tag( + "date", + datetime.fromisoformat(get_real_value_time).strftime("%Y-%m-%d"), + ) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) .field("monitored_value", monitored_value) .field("datacleaning_value", None) .field("simulation_value", None) - .time(get_real_value_time_utc, write_precision='s') + .time(get_real_value_time_utc, write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -719,13 +829,15 @@ def convert_time_format(original_time: str) -> str: :param original_time: str, “2024-04-13T08:00:00+08:00"格式的时间 :return: str,“2024-04-13 08:00:00”格式的时间 """ - new_time = original_time.replace('T', ' ') - new_time = new_time.replace('+08:00', '') + new_time = original_time.replace("T", " ") + new_time = new_time.replace("+08:00", "") return new_time # 2025/01/10 -def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bucket: str = "SCADA_data") -> None: +def store_non_realtime_SCADA_data_to_influxdb( + get_history_data_end_time: str, bucket: str = "SCADA_data" +) -> None: """ 获取某段时间内传回的scada数据 :param get_history_data_end_time: 获取历史数据的终止时间时间,格式如'2024-11-25T09:00:00+08:00' @@ -734,7 +846,11 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) # 本地变量,用于记录成功写入的数据点数量 points_written = 0 @@ -758,19 +874,23 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu write_api = client.write_api( write_options=create_write_options(), success_callback=success_callback, - error_callback=error_callback + error_callback=error_callback, ) # 创建一个临时存储点数据的列表 points_to_write = [] # 将end_date字符串转换为datetime对象 - end_date_dt = datetime.strptime(convert_time_format(get_history_data_end_time), '%Y-%m-%d %H:%M:%S') - end_date = end_date_dt.strftime('%Y-%m-%d %H:%M:%S') + end_date_dt = datetime.strptime( + convert_time_format(get_history_data_end_time), "%Y-%m-%d %H:%M:%S" + ) + end_date = end_date_dt.strftime("%Y-%m-%d %H:%M:%S") # 将transmission_frequency字符串转换为timedelta对象 - transmission_frequency_dt = datetime.strptime(globals.transmission_frequency, '%H:%M:%S') - datetime(1900, 1, 1) + transmission_frequency_dt = datetime.strptime( + globals.transmission_frequency, "%H:%M:%S" + ) - datetime(1900, 1, 1) get_history_data_start_time = end_date_dt - transmission_frequency_dt - begin_date = get_history_data_start_time.strftime('%Y-%m-%d %H:%M:%S') + begin_date = get_history_data_start_time.strftime("%Y-%m-%d %H:%M:%S") # print(begin_date) # print(end_date) reservoir_liquid_level_non_realtime_data_list = [] @@ -789,52 +909,72 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu # reservoir_liquid_level_non_realtime_data_list = get_data.get_history_data( # ids=','.join(reservoir_liquid_level_non_realtime_ids), begin_date=begin_date, end_date=end_date, downsample='1m') if globals.reservoir_liquid_level_non_realtime_ids: - reservoir_liquid_level_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.reservoir_liquid_level_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + reservoir_liquid_level_non_realtime_data_list = ( + get_data.get_history_data( + ids=",".join(globals.reservoir_liquid_level_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) + ) if globals.tank_liquid_level_non_realtime_ids: tank_liquid_level_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.tank_liquid_level_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.tank_liquid_level_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.fixed_pump_non_realtime_ids: fixed_pump_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.fixed_pump_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.fixed_pump_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.variable_pump_non_realtime_ids: variable_pump_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.variable_pump_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.variable_pump_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.source_outflow_non_realtime_ids: source_outflow_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.source_outflow_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.source_outflow_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.pipe_flow_non_realtime_ids: pipe_flow_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.pipe_flow_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.pipe_flow_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) # print(pipe_flow_non_realtime_data_list) if globals.pressure_non_realtime_ids: pressure_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.pressure_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.pressure_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) # print(pressure_non_realtime_data_list) if globals.demand_non_realtime_ids: demand_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.demand_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.demand_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.quality_non_realtime_ids: quality_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.quality_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.quality_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) except Exception as e: print(f"Attempt {try_count} failed with error: {e}") if try_count < 5: @@ -849,14 +989,14 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu for data in reservoir_liquid_level_non_realtime_data_list: # 创建Point对象 point = ( - Point('reservoir_liquid_level_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("reservoir_liquid_level_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -864,14 +1004,14 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu for data in tank_liquid_level_non_realtime_data_list: # 创建Point对象 point = ( - Point('tank_liquid_level_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("tank_liquid_level_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -879,14 +1019,14 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu for data in fixed_pump_non_realtime_data_list: # 创建Point对象 point = ( - Point('fixed_pump_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("fixed_pump_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -894,14 +1034,14 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu for data in variable_pump_non_realtime_data_list: # 创建Point对象 point = ( - Point('variable_pump_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("variable_pump_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -909,14 +1049,14 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu for data in source_outflow_non_realtime_data_list: # 创建Point对象 point = ( - Point('source_outflow_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("source_outflow_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -924,14 +1064,14 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu for data in pipe_flow_non_realtime_data_list: # 创建Point对象 point = ( - Point('pipe_flow_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("pipe_flow_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -939,14 +1079,14 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu for data in pressure_non_realtime_data_list: # 创建Point对象 point = ( - Point('pressure_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("pressure_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -954,14 +1094,14 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu for data in demand_non_realtime_data_list: # 创建Point对象 point = ( - Point('demand_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("demand_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -969,14 +1109,14 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu for data in quality_non_realtime_data_list: # 创建Point对象 point = ( - Point('quality_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("quality_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -994,7 +1134,9 @@ def store_non_realtime_SCADA_data_to_influxdb(get_history_data_end_time: str, bu # 2025/03/01 -def download_history_data_manually(begin_time: str, end_time: str, bucket: str = "SCADA_data") -> None: +def download_history_data_manually( + begin_time: str, end_time: str, bucket: str = "SCADA_data" +) -> None: """ 获取某个时间段内所有SCADA设备的历史数据,非实时执行,手动补充数据版 :param begin_time: 获取历史数据的开始时间,格式如'2024-11-25T09:00:00+08:00' @@ -1004,7 +1146,11 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) # 本地变量,用于记录成功写入的数据点数量 points_written = 0 @@ -1019,6 +1165,7 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = def error_callback(exception): print("Error writing batch:", exception) + # write_options = WriteOptions( # jitter_interval=200, # 添加抖动以避免同时写入 # max_retry_delay=30000 # 最大重试延迟(毫秒) @@ -1028,7 +1175,7 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = write_api = client.write_api( write_options=create_write_options(), success_callback=success_callback, - error_callback=error_callback + error_callback=error_callback, ) # 创建一个临时存储点数据的列表 points_to_write = [] @@ -1038,11 +1185,11 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = reservoir_liquid_level_realtime_data_list = [] tank_liquid_level_realtime_data_list = [] - fixed_pump_realtime_data_list =[] - variable_pump_realtime_data_list =[] + fixed_pump_realtime_data_list = [] + variable_pump_realtime_data_list = [] source_outflow_realtime_data_list = [] pipe_flow_realtime_data_list = [] - pressure_realtime_data_list =[] + pressure_realtime_data_list = [] demand_realtime_data_list = [] quality_realtime_data_list = [] @@ -1062,98 +1209,136 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = try_count += 1 if globals.reservoir_liquid_level_realtime_ids: reservoir_liquid_level_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.reservoir_liquid_level_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.reservoir_liquid_level_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.tank_liquid_level_realtime_ids: tank_liquid_level_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.tank_liquid_level_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.tank_liquid_level_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.fixed_pump_realtime_ids: fixed_pump_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.fixed_pump_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.fixed_pump_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.variable_pump_realtime_ids: variable_pump_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.variable_pump_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.variable_pump_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.source_outflow_realtime_ids: source_outflow_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.source_outflow_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.source_outflow_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.pipe_flow_realtime_ids: pipe_flow_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.pipe_flow_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.pipe_flow_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.pressure_realtime_ids: pressure_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.pressure_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.pressure_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.demand_realtime_ids: demand_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.demand_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.demand_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.quality_realtime_ids: quality_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.quality_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.quality_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) # reservoir_liquid_level_non_realtime_data_list = get_data.get_history_data( # ids=','.join(reservoir_liquid_level_non_realtime_ids), begin_date=begin_date, end_date=end_date, downsample='1m') if globals.reservoir_liquid_level_non_realtime_ids: - reservoir_liquid_level_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.reservoir_liquid_level_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + reservoir_liquid_level_non_realtime_data_list = ( + get_data.get_history_data( + ids=",".join(globals.reservoir_liquid_level_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) + ) if globals.tank_liquid_level_non_realtime_ids: tank_liquid_level_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.tank_liquid_level_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.tank_liquid_level_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.fixed_pump_non_realtime_ids: fixed_pump_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.fixed_pump_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.fixed_pump_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.variable_pump_non_realtime_ids: variable_pump_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.variable_pump_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.variable_pump_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.source_outflow_non_realtime_ids: source_outflow_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.source_outflow_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.source_outflow_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.pipe_flow_non_realtime_ids: pipe_flow_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.pipe_flow_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.pipe_flow_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) # print(pipe_flow_non_realtime_data_list) if globals.pressure_non_realtime_ids: pressure_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.pressure_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.pressure_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) # print(pressure_non_realtime_data_list) if globals.demand_non_realtime_ids: demand_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.demand_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.demand_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) if globals.quality_non_realtime_ids: quality_non_realtime_data_list = get_data.get_history_data( - ids=','.join(globals.quality_non_realtime_ids), - begin_date=begin_date, end_date=end_date, - downsample='1m') + ids=",".join(globals.quality_non_realtime_ids), + begin_date=begin_date, + end_date=end_date, + downsample="1m", + ) except Exception as e: print(f"Attempt {try_count} failed with error: {e}") if try_count < 5: @@ -1169,14 +1354,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in reservoir_liquid_level_realtime_data_list: # 创建Point对象 point = ( - Point('reservoir_liquid_level_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("reservoir_liquid_level_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1184,14 +1369,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in tank_liquid_level_realtime_data_list: # 创建Point对象 point = ( - Point('tank_liquid_level_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("tank_liquid_level_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1199,14 +1384,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in fixed_pump_realtime_data_list: # 创建Point对象 point = ( - Point('fixed_pump_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("fixed_pump_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1214,14 +1399,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in variable_pump_realtime_data_list: # 创建Point对象 point = ( - Point('variable_pump_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("variable_pump_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1229,14 +1414,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in source_outflow_realtime_data_list: # 创建Point对象 point = ( - Point('source_outflow_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("source_outflow_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1244,14 +1429,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in pipe_flow_realtime_data_list: # 创建Point对象 point = ( - Point('pipe_flow_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("pipe_flow_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1259,14 +1444,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in pressure_realtime_data_list: # 创建Point对象 point = ( - Point('pressure_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("pressure_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1274,14 +1459,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in demand_realtime_data_list: # 创建Point对象 point = ( - Point('demand_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("demand_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1289,14 +1474,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in quality_realtime_data_list: # 创建Point对象 point = ( - Point('quality_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("quality_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1304,14 +1489,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in reservoir_liquid_level_non_realtime_data_list: # 创建Point对象 point = ( - Point('reservoir_liquid_level_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("reservoir_liquid_level_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1319,14 +1504,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in tank_liquid_level_non_realtime_data_list: # 创建Point对象 point = ( - Point('tank_liquid_level_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("tank_liquid_level_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1334,14 +1519,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in fixed_pump_non_realtime_data_list: # 创建Point对象 point = ( - Point('fixed_pump_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("fixed_pump_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1349,14 +1534,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in variable_pump_non_realtime_data_list: # 创建Point对象 point = ( - Point('variable_pump_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("variable_pump_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1364,14 +1549,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in source_outflow_non_realtime_data_list: # 创建Point对象 point = ( - Point('source_outflow_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("source_outflow_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1379,14 +1564,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in pipe_flow_non_realtime_data_list: # 创建Point对象 point = ( - Point('pipe_flow_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("pipe_flow_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1394,14 +1579,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in pressure_non_realtime_data_list: # 创建Point对象 point = ( - Point('pressure_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("pressure_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1409,14 +1594,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in demand_non_realtime_data_list: # 创建Point对象 point = ( - Point('demand_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("demand_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1424,14 +1609,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = for data in quality_non_realtime_data_list: # 创建Point对象 point = ( - Point('quality_non_realtime') - .tag("date", data['time'].strftime('%Y-%m-%d')) - .tag("description", data['description']) - .tag("device_ID", data['device_ID']) - .field("monitored_value", float(data['monitored_value'])) + Point("quality_non_realtime") + .tag("date", data["time"].strftime("%Y-%m-%d")) + .tag("description", data["description"]) + .tag("device_ID", data["device_ID"]) + .field("monitored_value", float(data["monitored_value"])) .field("datacleaning_value", None) .field("simulation_value", None) - .time(data['time'], write_precision='s') + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -1447,11 +1632,14 @@ def download_history_data_manually(begin_time: str, end_time: str, bucket: str = client.close() + ########################SCADA############################################################################################################ -# DingZQ, 2025-03-08 -def query_all_SCADA_records_by_date(query_date: str, bucket: str="SCADA_data") -> list[dict[str, float]]: +# DingZQ, 2025-03-08 +def query_all_SCADA_records_by_date( + query_date: str, bucket: str = "SCADA_data" +) -> list[dict[str, float]]: """ 根据日期查询所有SCADA数据 @@ -1463,12 +1651,22 @@ def query_all_SCADA_records_by_date(query_date: str, bucket: str="SCADA_data") - """ client = get_new_client() - if client.ping(): print("{} -- Successfully connected to InfluxDB.".format( datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) - else: print("{} -- Failed to connect to InfluxDB.".format( datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + if client.ping(): + print( + "{} -- Successfully connected to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) + else: + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 将北京时间转换为 UTC 时间 - + bg_start_time, bg_end_time = time_api.parse_beijing_date_range(query_date) # bg_end_time = bg_start_time + timedelta(hours=2) # 服务器性能不行,暂时返回2个小时的数据 utc_start_time = bg_start_time.astimezone(timezone.utc) @@ -1480,12 +1678,12 @@ def query_all_SCADA_records_by_date(query_date: str, bucket: str="SCADA_data") - SCADA_results = [] # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_end_time.isoformat()}) |> filter(fn: (r) => r["_field"] == "monitored_value") |> sort(columns: ["_time"], desc: false) - ''' + """ # 执行查询 try: @@ -1499,12 +1697,12 @@ def query_all_SCADA_records_by_date(query_date: str, bucket: str="SCADA_data") - # 获取字段 "_value" 即为 monitored_value monitored_value = record.get_value() rec = { - "ID": record['device_ID'], # 是api_query 而不是 普通的Id - "time": record.get_time(), - record['_measurement']: monitored_value + "ID": record["device_ID"], # 是api_query 而不是 普通的Id + "time": record.get_time(), + record["_measurement"]: monitored_value, } SCADA_results.append(rec) - + except Exception as e: print(f"Error querying InfluxDB for date {query_date}: {e}") @@ -1513,8 +1711,9 @@ def query_all_SCADA_records_by_date(query_date: str, bucket: str="SCADA_data") - return SCADA_results - -def query_SCADA_data_by_device_ID_and_time(query_ids_list: List[str], query_time: str, bucket: str="SCADA_data") -> Dict[str, float]: +def query_SCADA_data_by_device_ID_and_time( + query_ids_list: List[str], query_time: str, bucket: str = "SCADA_data" +) -> Dict[str, float]: """ 根据SCADA设备的ID和时间查询值 :param query_ids_list: SCADA设备ID的列表 @@ -1524,7 +1723,11 @@ def query_SCADA_data_by_device_ID_and_time(query_ids_list: List[str], query_time """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 将北京时间转换为 UTC 时间 @@ -1536,11 +1739,11 @@ def query_SCADA_data_by_device_ID_and_time(query_ids_list: List[str], query_time SCADA_result_dict = {} for device_id in query_ids_list: # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["device_ID"] == "{device_id}" and r["_field"] == "monitored_value") - ''' + """ # 执行查询 try: result = query_api.query(flux_query) @@ -1559,12 +1762,17 @@ def query_SCADA_data_by_device_ID_and_time(query_ids_list: List[str], query_time print(f"Error querying InfluxDB for device ID {device_id}: {e}") SCADA_result_dict[device_id] = None client.close() - + return SCADA_result_dict -def query_scheme_SCADA_data_by_device_ID_and_time(query_ids_list: List[str], query_time: str, scheme_Type: str, - scheme_Name: str, bucket: str="scheme_simulation_result") -> Dict[str, float]: +def query_scheme_SCADA_data_by_device_ID_and_time( + query_ids_list: List[str], + query_time: str, + scheme_Type: str, + scheme_Name: str, + bucket: str = "scheme_simulation_result", +) -> Dict[str, float]: """ 根据SCADA设备的ID和时间查询方案中的值 :param query_ids_list: SCADA设备ID的列表 @@ -1574,7 +1782,11 @@ def query_scheme_SCADA_data_by_device_ID_and_time(query_ids_list: List[str], que """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 将北京时间转换为 UTC 时间 @@ -1586,11 +1798,11 @@ def query_scheme_SCADA_data_by_device_ID_and_time(query_ids_list: List[str], que SCADA_result_dict = {} for device_id in query_ids_list: # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["device_ID"] == "{device_id}" and r["_field"] == "monitored_value" and r["scheme_Type"] == "{scheme_Type}" and r["scheme_Name"] == "{scheme_Name}") - ''' + """ # 执行查询 try: result = query_api.query(flux_query) @@ -1608,17 +1820,23 @@ def query_scheme_SCADA_data_by_device_ID_and_time(query_ids_list: List[str], que except Exception as e: print(f"Error querying InfluxDB for device ID {device_id}: {e}") SCADA_result_dict[device_id] = None - + client.close() - + return SCADA_result_dict + # 2025/03/14 # DingZQ # 返回SCADA数据的原始值,其中可能包含了异常值跟缺失值,我们需要再后续曲线中修复 # 缺失值 # 异常值 -def query_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[str], start_time: str, end_time: str, bucket: str="SCADA_data"): +def query_SCADA_data_by_device_ID_and_timerange( + query_ids_list: List[str], + start_time: str, + end_time: str, + bucket: str = "SCADA_data", +): """ 查询指定时间范围内,多个SCADA设备的数据,用于漏损定位 :param query_ids_list: SCADA设备ID的列表 @@ -1629,11 +1847,15 @@ def query_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[str], start """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() - print('start_time', start_time) - print('end_time', end_time) + print("start_time", start_time) + print("end_time", end_time) # 将北京时间转换为 UTC 时间 # beijing_start_time = datetime.fromisoformat(start_time) # utc_start_time = beijing_start_time.astimezone(timezone.utc) - timedelta(seconds=1) @@ -1642,13 +1864,13 @@ def query_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[str], start # utc_end_time = beijing_end_time.astimezone(timezone.utc) + timedelta(seconds=1) # print(utc_end_time) beijing_start_time = datetime.fromisoformat(start_time) - print('beijing_start_time', beijing_start_time) + print("beijing_start_time", beijing_start_time) utc_start_time = time_api.to_utc_time(beijing_start_time) - print('utc_start_time', utc_start_time) + print("utc_start_time", utc_start_time) beijing_end_time = datetime.fromisoformat(end_time) - print('beijing_end_time', beijing_end_time) + print("beijing_end_time", beijing_end_time) utc_stop_time = time_api.to_utc_time(beijing_end_time) - print('utc_stop_time', utc_stop_time) + print("utc_stop_time", utc_stop_time) SCADA_dict = {} for device_id in query_ids_list: # flux_query = f''' @@ -1658,31 +1880,36 @@ def query_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[str], start # |> pivot(rowKey: ["_time"], columnKey: ["device_ID"], valueColumn: "_value") # |> sort(columns: ["_time"]) # ''' - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["device_ID"] == "{device_id}" and r["_field"] == "monitored_value") |> sort(columns: ["_time"]) - ''' + """ # 执行查询,返回一个 FluxTable 列表 tables = query_api.query(flux_query) records_list = [] for table in tables: for record in table.records: # 获取记录的时间和监测值 - records_list.append({ - "time": record["_time"], - "value": record["_value"] - }) + records_list.append( + {"time": record["_time"], "value": record["_value"]} + ) SCADA_dict[device_id] = records_list client.close() return SCADA_dict + # 2025/05/04 DingZQ # SCADA 原始数据有异常偏离,返回的是一个list,list的内容是清洗后的正常值,表示为 time + value -def query_cleaning_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[str], start_time: str, end_time: str, bucket: str="SCADA_data"): +def query_cleaning_SCADA_data_by_device_ID_and_timerange( + query_ids_list: List[str], + start_time: str, + end_time: str, + bucket: str = "SCADA_data", +): """ 查询指定时间范围内,多个SCADA设备的修复的单个的数据 :param query_ids_list: SCADA设备ID的列表 @@ -1693,29 +1920,33 @@ def query_cleaning_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[st """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() - print('start_time', start_time) - print('end_time', end_time) + print("start_time", start_time) + print("end_time", end_time) # 将北京时间转换为 UTC 时间 beijing_start_time = datetime.fromisoformat(start_time) - print('beijing_start_time', beijing_start_time) + print("beijing_start_time", beijing_start_time) utc_start_time = time_api.to_utc_time(beijing_start_time) - print('utc_start_time', utc_start_time) + print("utc_start_time", utc_start_time) beijing_end_time = datetime.fromisoformat(end_time) - print('beijing_end_time', beijing_end_time) + print("beijing_end_time", beijing_end_time) utc_stop_time = time_api.to_utc_time(beijing_end_time) - print('utc_stop_time', utc_stop_time) + print("utc_stop_time", utc_stop_time) SCADA_dict = {} for device_id in query_ids_list: - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["device_ID"] == "{device_id}" and r["_field"] == "datacleaning_value") |> sort(columns: ["_time"]) - ''' + """ # 执行查询,返回一个 FluxTable 列表 tables = query_api.query(flux_query) print(tables) @@ -1723,19 +1954,24 @@ def query_cleaning_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[st for table in tables: for record in table.records: # 获取记录的时间和监测值 - records_list.append({ - "time": record["_time"], - "value": record["_value"] - }) + records_list.append( + {"time": record["_time"], "value": record["_value"]} + ) SCADA_dict[device_id] = records_list client.close() return SCADA_dict + # 2025/05/04 DingZQ # SCADA 数据原版缺失,根据历史数据的平均值补上缺失的部分 -def query_filling_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[str], start_time: str, end_time: str, bucket: str="SCADA_data"): +def query_filling_SCADA_data_by_device_ID_and_timerange( + query_ids_list: List[str], + start_time: str, + end_time: str, + bucket: str = "SCADA_data", +): """ 查询指定时间范围内,多个SCADA设备的填补的单个的数据 :param query_ids_list: SCADA设备ID的列表 @@ -1746,30 +1982,34 @@ def query_filling_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[str """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() - print('start_time', start_time) - print('end_time', end_time) + print("start_time", start_time) + print("end_time", end_time) # 将北京时间转换为 UTC 时间 beijing_start_time = datetime.fromisoformat(start_time) - print('beijing_start_time', beijing_start_time) + print("beijing_start_time", beijing_start_time) utc_start_time = time_api.to_utc_time(beijing_start_time) - print('utc_start_time', utc_start_time) + print("utc_start_time", utc_start_time) beijing_end_time = datetime.fromisoformat(end_time) - print('beijing_end_time', beijing_end_time) + print("beijing_end_time", beijing_end_time) utc_stop_time = time_api.to_utc_time(beijing_end_time) - print('utc_stop_time', utc_stop_time) + print("utc_stop_time", utc_stop_time) SCADA_dict = {} for device_id in query_ids_list: - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["device_ID"] == "{device_id}" and r["_field"] == "datafilling_value") |> sort(columns: ["_time"]) - ''' + """ # 执行查询,返回一个 FluxTable 列表 tables = query_api.query(flux_query) print(tables) @@ -1777,19 +2017,24 @@ def query_filling_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[str for table in tables: for record in table.records: # 获取记录的时间和监测值 - records_list.append({ - "time": record["_time"], - "value": record["_value"] - }) + records_list.append( + {"time": record["_time"], "value": record["_value"]} + ) SCADA_dict[device_id] = records_list client.close() return SCADA_dict + # 2025/05/04 DingZQ # 是把原始数据跟清洗后的数据合并到一起,暂时不需要用这个API -def query_cleaned_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[str], start_time: str, end_time: str, bucket: str="SCADA_data"): +def query_cleaned_SCADA_data_by_device_ID_and_timerange( + query_ids_list: List[str], + start_time: str, + end_time: str, + bucket: str = "SCADA_data", +): """ 查询指定时间范围内,多个SCADA设备的清洗完毕后的完整数据 :param query_ids_list: SCADA设备ID的列表 @@ -1800,28 +2045,32 @@ def query_cleaned_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[str """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() - print('start_time', start_time) - print('end_time', end_time) + print("start_time", start_time) + print("end_time", end_time) # 将北京时间转换为 UTC 时间 beijing_start_time = datetime.fromisoformat(start_time) - print('beijing_start_time', beijing_start_time) + print("beijing_start_time", beijing_start_time) utc_start_time = time_api.to_utc_time(beijing_start_time) - print('utc_start_time', utc_start_time) + print("utc_start_time", utc_start_time) beijing_end_time = datetime.fromisoformat(end_time) - print('beijing_end_time', beijing_end_time) + print("beijing_end_time", beijing_end_time) utc_stop_time = time_api.to_utc_time(beijing_end_time) - print('utc_stop_time', utc_stop_time) + print("utc_stop_time", utc_stop_time) SCADA_dict = {} for device_id in query_ids_list: - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["device_ID"] == "{device_id}" and r["_field"] == "cleaned_value") |> sort(columns: ["_time"]) - ''' + """ # 执行查询,返回一个 FluxTable 列表 tables = query_api.query(flux_query) print(tables) @@ -1829,10 +2078,9 @@ def query_cleaned_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[str for table in tables: for record in table.records: # 获取记录的时间和监测值 - records_list.append({ - "time": record["_time"], - "value": record["_value"] - }) + records_list.append( + {"time": record["_time"], "value": record["_value"]} + ) SCADA_dict[device_id] = records_list client.close() @@ -1841,7 +2089,9 @@ def query_cleaned_SCADA_data_by_device_ID_and_timerange(query_ids_list: List[str # DingZQ, 2025-02-15 -def query_SCADA_data_by_device_ID_and_date(query_ids_list: List[str], query_date: str, bucket: str="SCADA_data") -> list[dict[str, float]]: +def query_SCADA_data_by_device_ID_and_date( + query_ids_list: List[str], query_date: str, bucket: str = "SCADA_data" +) -> list[dict[str, float]]: """ 根据SCADA设备的ID和日期查询值 :param query_ids_list: SCADA设备ID的列表, 是api_query 而不是 普通的Id @@ -1852,14 +2102,19 @@ def query_SCADA_data_by_device_ID_and_date(query_ids_list: List[str], query_date """ start_time, end_time = time_api.parse_beijing_date_range(query_date) - - return query_SCADA_data_by_device_ID_and_timerange(query_ids_list, str(start_time), str(end_time), bucket) + return query_SCADA_data_by_device_ID_and_timerange( + query_ids_list, str(start_time), str(end_time), bucket + ) # 2025/02/01 -def store_realtime_simulation_result_to_influxdb(node_result_list: List[Dict[str, any]], link_result_list: List[Dict[str, any]], - result_start_time: str, bucket: str = "realtime_simulation_result"): +def store_realtime_simulation_result_to_influxdb( + node_result_list: List[Dict[str, any]], + link_result_list: List[Dict[str, any]], + result_start_time: str, + bucket: str = "realtime_simulation_result", +): """ 将实时模拟计算结果数据存储到 InfluxDB 的realtime_simulation_result这个bucket中。 :param node_result_list: (List[Dict[str, any]]): 包含节点和结果数据的字典列表。 @@ -1870,9 +2125,16 @@ def store_realtime_simulation_result_to_influxdb(node_result_list: List[Dict[str """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) - print("store_realtime_simulation_result_to_influxdb : result_start_time ", result_start_time) + print( + "store_realtime_simulation_result_to_influxdb : result_start_time ", + result_start_time, + ) # 本地变量,用于记录成功写入的数据点数量 points_written = 0 @@ -1894,51 +2156,57 @@ def store_realtime_simulation_result_to_influxdb(node_result_list: List[Dict[str write_api = client.write_api( write_options=create_write_options(), success_callback=success_callback, - error_callback=error_callback + error_callback=error_callback, ) # 创建一个临时存储点数据的列表 points_to_write = [] - date_str = result_start_time.split('T')[0] + date_str = result_start_time.split("T")[0] print("store_realtime_simulation_result_to_influxdb : date_str ", date_str) - time_beijing = datetime.strptime(result_start_time, '%Y-%m-%dT%H:%M:%S%z').isoformat() + time_beijing = datetime.strptime( + result_start_time, "%Y-%m-%dT%H:%M:%S%z" + ).isoformat() for result in node_result_list: # 提取节点信息和结果数据 - node_id = result.get('node') - data_list = result.get('result', []) + node_id = result.get("node") + data_list = result.get("result", []) for data in data_list: # 构建 Point 数据,多个 field 存在于一个数据点中 - node_point = Point("node") \ - .tag("date", date_str) \ - .tag("ID", node_id) \ - .field("head", data.get('head', 0.0)) \ - .field("pressure", data.get('pressure', 0.0)) \ - .field("actualdemand", data.get('demand', 0.0)) \ - .field("demanddeficit", None) \ - .field("totalExternalOutflow", None) \ - .field("quality", data.get('quality', 0.0)) \ - .time(time_beijing, write_precision='s') + node_point = ( + Point("node") + .tag("date", date_str) + .tag("ID", node_id) + .field("head", data.get("head", 0.0)) + .field("pressure", data.get("pressure", 0.0)) + .field("actualdemand", data.get("demand", 0.0)) + .field("demanddeficit", None) + .field("totalExternalOutflow", None) + .field("quality", data.get("quality", 0.0)) + .time(time_beijing, write_precision="s") + ) points_to_write.append(node_point) # 写入数据到 InfluxDB,多个 field 在同一个 point 中 # write_api.write(bucket=bucket, org=org_name, record=node_point) # write_api.flush() # print(f"成功将 {len(node_result_list)} 条node数据写入 InfluxDB。") for result in link_result_list: - link_id = result.get('link') - data_list = result.get('result', []) + link_id = result.get("link") + data_list = result.get("result", []) for data in data_list: - link_point = Point("link") \ - .tag("date", date_str) \ - .tag("ID", link_id) \ - .field("flow", data.get('flow', 0.0)) \ - .field("velocity", data.get('velocity', 0.0)) \ - .field("headloss", data.get('headloss', 0.0)) \ - .field("quality", data.get('quality', 0.0)) \ - .field("status", data.get('status', "UNKNOWN")) \ - .field("setting", data.get('setting', 0.0)) \ - .field("reaction", data.get('reaction', 0.0)) \ - .field("friction", data.get('friction', 0.0)) \ - .time(time_beijing, write_precision='s') + link_point = ( + Point("link") + .tag("date", date_str) + .tag("ID", link_id) + .field("flow", data.get("flow", 0.0)) + .field("velocity", data.get("velocity", 0.0)) + .field("headloss", data.get("headloss", 0.0)) + .field("quality", data.get("quality", 0.0)) + .field("status", data.get("status", "UNKNOWN")) + .field("setting", data.get("setting", 0.0)) + .field("reaction", data.get("reaction", 0.0)) + .field("friction", data.get("friction", 0.0)) + .time(time_beijing, write_precision="s") + ) points_to_write.append(link_point) # write_api.write(bucket=bucket, org=org_name, record=link_point) # write_api.flush() @@ -1951,7 +2219,7 @@ def store_realtime_simulation_result_to_influxdb(node_result_list: List[Dict[str except Exception as e: client.close() raise RuntimeError(f"数据写入 InfluxDB 时发生错误: {e}") - + time.sleep(10) print("Total points written:", points_written) @@ -1960,7 +2228,9 @@ def store_realtime_simulation_result_to_influxdb(node_result_list: List[Dict[str # 2025/02/01 -def query_latest_record_by_ID(ID: str, type: str, bucket: str="realtime_simulation_result") -> dict: +def query_latest_record_by_ID( + ID: str, type: str, bucket: str = "realtime_simulation_result" +) -> dict: """ 查询指定ID的最新的一条记录 :param ID: (str): 要查询的 ID。 @@ -1970,11 +2240,15 @@ def query_latest_record_by_ID(ID: str, type: str, bucket: str="realtime_simulati """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() if type == "node": - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: -1d, stop: now()) // 查找最近七天的记录 |> filter(fn: (r) => r["_measurement"] == "node" and r["ID"] == "{ID}") @@ -1986,7 +2260,7 @@ def query_latest_record_by_ID(ID: str, type: str, bucket: str="realtime_simulati |> group() // 将所有数据聚合到同一个 group |> sort(columns: ["_time"], desc: true) |> limit(n: 1) - ''' + """ tables = query_api.query(flux_query) # 解析查询结果 for table in tables: @@ -1999,10 +2273,10 @@ def query_latest_record_by_ID(ID: str, type: str, bucket: str="realtime_simulati "actualdemand": record["actualdemand"], # "demanddeficit": record["demanddeficit"], # "totalExternalOutflow": record["totalExternalOutflow"], - "quality": record["quality"] + "quality": record["quality"], } elif type == "link": - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: -1d, stop: now()) // 查找最近七天的记录 |> filter(fn: (r) => r["_measurement"] == "link" and r["ID"] == "{ID}") @@ -2014,7 +2288,7 @@ def query_latest_record_by_ID(ID: str, type: str, bucket: str="realtime_simulati |> group() // 将所有数据聚合到同一个 group |> sort(columns: ["_time"], desc: true) |> limit(n: 1) - ''' + """ tables = query_api.query(flux_query) # 解析查询结果 for table in tables: @@ -2029,14 +2303,16 @@ def query_latest_record_by_ID(ID: str, type: str, bucket: str="realtime_simulati "status": record["status"], "setting": record["setting"], "reaction": record["reaction"], - "friction": record["friction"] + "friction": record["friction"], } client.close() return None # 如果没有找到记录 # 2025/02/01 -def query_all_records_by_time(query_time: str, bucket: str="realtime_simulation_result") -> tuple: +def query_all_records_by_time( + query_time: str, bucket: str = "realtime_simulation_result" +) -> tuple: """ 查询指定北京时间的所有记录,包括 'node' 和 'link' measurement,分别以指定格式返回。 :param query_time: (str): 输入的北京时间,格式为 '2024-11-24T17:30:00+08:00'。 @@ -2045,7 +2321,11 @@ def query_all_records_by_time(query_time: str, bucket: str="realtime_simulation_ """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 将北京时间转换为 UTC 时间 @@ -2054,7 +2334,7 @@ def query_all_records_by_time(query_time: str, bucket: str="realtime_simulation_ utc_start_time = utc_time - timedelta(seconds=1) utc_stop_time = utc_time + timedelta(seconds=1) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["_measurement"] == "node" or r["_measurement"] == "link") @@ -2063,7 +2343,7 @@ def query_all_records_by_time(query_time: str, bucket: str="realtime_simulation_ columnKey:["_field"], valueColumn:"_value" ) - ''' + """ # 执行查询 tables = query_api.query(flux_query) node_records = [] @@ -2075,34 +2355,43 @@ def query_all_records_by_time(query_time: str, bucket: str="realtime_simulation_ measurement = record["_measurement"] # 处理 node 数据 if measurement == "node": - node_records.append({ - "time": record["_time"], - "ID": record["ID"], - "head": record["head"], - "pressure": record["pressure"], - "actualdemand": record["actualdemand"], - "quality": record["quality"] - }) + node_records.append( + { + "time": record["_time"], + "ID": record["ID"], + "head": record["head"], + "pressure": record["pressure"], + "actualdemand": record["actualdemand"], + "quality": record["quality"], + } + ) # 处理 link 数据 elif measurement == "link": - link_records.append({ - "time": record["_time"], - "ID": record["ID"], - "flow": record["flow"], - "velocity": record["velocity"], - "headloss": record["headloss"], - "quality": record["quality"], - "status": record["status"], - "setting": record["setting"], - "reaction": record["reaction"], - "friction": record["friction"] - }) + link_records.append( + { + "time": record["_time"], + "ID": record["ID"], + "flow": record["flow"], + "velocity": record["velocity"], + "headloss": record["headloss"], + "quality": record["quality"], + "status": record["status"], + "setting": record["setting"], + "reaction": record["reaction"], + "friction": record["friction"], + } + ) client.close() return node_records, link_records # 2025/03/03 -def query_all_record_by_time_property(query_time: str, type: str, property: str, bucket: str="realtime_simulation_result") -> list: +def query_all_record_by_time_property( + query_time: str, + type: str, + property: str, + bucket: str = "realtime_simulation_result", +) -> list: """ 查询指定北京时间的所有记录,查询 'node' 或 'link' 的某一属性值,以指定格式返回。 :param query_time: (str): 输入的北京时间,格式为 '2024-11-24T17:30:00+08:00'。 @@ -2113,7 +2402,11 @@ def query_all_record_by_time_property(query_time: str, type: str, property: str, """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 确定 measurement @@ -2129,11 +2422,11 @@ def query_all_record_by_time_property(query_time: str, type: str, property: str, utc_start_time = utc_time - timedelta(seconds=1) utc_stop_time = utc_time + timedelta(seconds=1) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["_measurement"] == "{measurement}" and r["_field"] == "{property}") - ''' + """ # 执行查询 tables = query_api.query(flux_query) result_records = [] @@ -2141,16 +2434,247 @@ def query_all_record_by_time_property(query_time: str, type: str, property: str, for table in tables: for record in table.records: # print(record.values) # 打印完整记录内容 - result_records.append({ - "ID": record["ID"], - "value": record["_value"] - }) + result_records.append({"ID": record["ID"], "value": record["_value"]}) client.close() return result_records +def query_all_scheme_record_by_time_property( + query_time: str, + type: str, + property: str, + scheme_name: str, + bucket: str = "scheme_simulation_result", +) -> list: + """ + 查询指定北京时间的所有记录,查询 'node' 或 'link' 的某一属性值,以指定格式返回(新版本)。 + + :param query_time: (str): 输入的北京时间,格式为 '2024-11-24T17:30:00+08:00'。 + :param type: (str): 查询的类型(决定 measurement),'node' 或 'link' + :param property: (str): 查询的字段名称(field) + :param scheme_name: (str): 方案名称(如 "FANGAN1761124840355") + :param bucket: (str): 数据存储的 bucket 名称。 + :return: list(dict): result_records + """ + client = get_new_client() + if not client.ping(): + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) + + query_api = client.query_api() + + # 确定 measurement + if type == "node": + measurement = "node" + elif type == "link": + measurement = "link" + else: + raise ValueError(f"不支持的类型: {type}") + + # 将北京时间转换为 UTC 时间 + beijing_time = datetime.fromisoformat(query_time) + utc_time = beijing_time.astimezone(timezone.utc) + utc_start_time = utc_time - timedelta(seconds=1) + utc_stop_time = utc_time + timedelta(seconds=1) + # 构建 Flux 查询语句 + flux_query = f""" + from(bucket: "{bucket}") + |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) + |> filter(fn: (r) => r["scheme_Name"] == "{scheme_name}" and r["_measurement"] == "{measurement}" and r["_field"] == "{property}") + """ + # 执行查询 + tables = query_api.query(flux_query) + + result_records = [] + + # 解析查询结果 + for table in tables: + for record in table.records: + result_records.append({"ID": record["ID"], "value": record["_value"]}) + + client.close() + return result_records + + +def query_scheme_simulation_result_by_ID_time( + scheme_name: str, + ID: str, + type: str, + query_time: str, + bucket: str = "scheme_simulation_result", +) -> list[dict]: + """ + 查询指定ID在指定时间的记录 + :param ID: (str): 要查询的 ID。 + :param type: (str): "node"或“link” + :param query_time: (str): 查询的时间,格式为 '2024-11-24T17:30:00+08:00'。 + :param bucket: (str): 数据存储的 bucket 名称。 + :return: list[dict]: 指定时间的记录数据列表,如果没有找到则返回空列表。 + """ + client = get_new_client() + if not client.ping(): + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) + + query_api = client.query_api() + # 将北京时间转换为 UTC 时间 + beijing_time = datetime.fromisoformat(query_time) + utc_time = beijing_time.astimezone(timezone.utc) + utc_start_time = utc_time - timedelta(seconds=1) + utc_stop_time = utc_time + timedelta(seconds=1) + results = [] + if type == "node": + flux_query = f""" + from(bucket: "{bucket}") + |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) + |> filter(fn: (r) => r["scheme_Name"] == "{scheme_name}" and r["_measurement"] == "node" and r["ID"] == "{ID}") + |> pivot( + rowKey:["_time"], + columnKey:["_field"], + valueColumn:"_value" + ) + """ + tables = query_api.query(flux_query) + # 解析查询结果 + for table in tables: + for record in table.records: + results.append( + { + "time": record["_time"], + "ID": ID, + "head": record["head"], + "pressure": record["pressure"], + "actualdemand": record["actualdemand"], + "quality": record["quality"], + } + ) + elif type == "link": + flux_query = f""" + from(bucket: "{bucket}") + |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) + |> filter(fn: (r) => r["scheme_Name"] == "{scheme_name}" and r["_measurement"] == "link" and r["ID"] == "{ID}") + |> pivot( + rowKey:["_time"], + columnKey:["_field"], + valueColumn:"_value" + ) + """ + tables = query_api.query(flux_query) + # 解析查询结果 + for table in tables: + for record in table.records: + results.append( + { + "time": record["_time"], + "ID": ID, + "flow": record["flow"], + "velocity": record["velocity"], + "headloss": record["headloss"], + "quality": record["quality"], + "status": record["status"], + "setting": record["setting"], + "reaction": record["reaction"], + "friction": record["friction"], + } + ) + client.close() + return results # 返回列表,如果没有记录则为空列表 + + +def query_simulation_result_by_ID_time( + ID: str, type: str, query_time: str, bucket: str = "realtime_simulation_result" +) -> list[dict]: + """ + 查询指定ID在指定时间的记录 + :param ID: (str): 要查询的 ID。 + :param type: (str): "node"或“link” + :param query_time: (str): 查询的时间,格式为 '2024-11-24T17:30:00+08:00'。 + :param bucket: (str): 数据存储的 bucket 名称。 + :return: list[dict]: 指定时间的记录数据列表,如果没有找到则返回空列表。 + """ + client = get_new_client() + if not client.ping(): + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) + + query_api = client.query_api() + # 将北京时间转换为 UTC 时间 + beijing_time = datetime.fromisoformat(query_time) + utc_time = beijing_time.astimezone(timezone.utc) + utc_start_time = utc_time - timedelta(seconds=1) + utc_stop_time = utc_time + timedelta(seconds=1) + results = [] + if type == "node": + flux_query = f""" + from(bucket: "{bucket}") + |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) + |> filter(fn: (r) => r["_measurement"] == "node" and r["ID"] == "{ID}") + |> pivot( + rowKey:["_time"], + columnKey:["_field"], + valueColumn:"_value" + ) + """ + tables = query_api.query(flux_query) + # 解析查询结果 + for table in tables: + for record in table.records: + results.append( + { + "time": record["_time"], + "ID": ID, + "head": record["head"], + "pressure": record["pressure"], + "actualdemand": record["actualdemand"], + "quality": record["quality"], + } + ) + elif type == "link": + flux_query = f""" + from(bucket: "{bucket}") + |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) + |> filter(fn: (r) => r["_measurement"] == "link" and r["ID"] == "{ID}") + |> pivot( + rowKey:["_time"], + columnKey:["_field"], + valueColumn:"_value" + ) + """ + tables = query_api.query(flux_query) + # 解析查询结果 + for table in tables: + for record in table.records: + results.append( + { + "time": record["_time"], + "ID": ID, + "flow": record["flow"], + "velocity": record["velocity"], + "headloss": record["headloss"], + "quality": record["quality"], + "status": record["status"], + "setting": record["setting"], + "reaction": record["reaction"], + "friction": record["friction"], + } + ) + client.close() + return results # 返回列表,如果没有记录则为空列表 + + # 2025/02/21 -def query_all_records_by_date(query_date: str, bucket: str="realtime_simulation_result") -> tuple: +def query_all_records_by_date( + query_date: str, bucket: str = "realtime_simulation_result" +) -> tuple: """ 查询指定日期的所有记录,包括‘node’和‘link’,分别以指定的格式返回 :param query_date: 输入的日期,格式为‘2025-02-14’ @@ -2160,27 +2684,37 @@ def query_all_records_by_date(query_date: str, bucket: str="realtime_simulation_ client = get_new_client() # 记录开始时间 time_cost_start = time.perf_counter() - print('{} -- query_all_records_by_date started.'.format(datetime.now(pytz.timezone('Asia/Shanghai')).strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- query_all_records_by_date started.".format( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + ) + ) if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() - bg_start_time, bg_end_time = time_api.parse_beijing_date_range(query_date=query_date) + bg_start_time, bg_end_time = time_api.parse_beijing_date_range( + query_date=query_date + ) utc_start_time = time_api.to_utc_time(bg_start_time) utc_stop_time = time_api.to_utc_time(bg_end_time) print("bg_start_time", bg_start_time) print("bg_end_time", bg_end_time) - print('utc_start_time', utc_start_time) - print('utc_stop_time', utc_stop_time) + print("utc_start_time", utc_start_time) + print("utc_stop_time", utc_stop_time) - print('utc_start_time.isoformat', utc_start_time.isoformat()) - print('utc_stop_time.isoformat', utc_stop_time.isoformat()) + print("utc_start_time.isoformat", utc_start_time.isoformat()) + print("utc_stop_time.isoformat", utc_stop_time.isoformat()) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["_measurement"] == "node" or r["_measurement"] == "link" and r["date"] == "{query_date}") @@ -2189,7 +2723,7 @@ def query_all_records_by_date(query_date: str, bucket: str="realtime_simulation_ columnKey:["_field"], valueColumn:"_value" ) - ''' + """ # 执行查询 tables = query_api.query(flux_query) node_records = [] @@ -2201,37 +2735,49 @@ def query_all_records_by_date(query_date: str, bucket: str="realtime_simulation_ measurement = record["_measurement"] # 处理 node 数据 if measurement == "node": - node_records.append({ - "time": record["_time"], - "ID": record["ID"], - "head": record["head"], - "pressure": record["pressure"], - "actualdemand": record["actualdemand"], - "quality": record["quality"] - }) + node_records.append( + { + "time": record["_time"], + "ID": record["ID"], + "head": record["head"], + "pressure": record["pressure"], + "actualdemand": record["actualdemand"], + "quality": record["quality"], + } + ) # 处理 link 数据 elif measurement == "link": - link_records.append({ - "time": record["_time"], - "ID": record["ID"], - "flow": record["flow"], - "velocity": record["velocity"], - "headloss": record["headloss"], - "quality": record["quality"], - "status": record["status"], - "setting": record["setting"], - "reaction": record["reaction"], - "friction": record["friction"] - }) + link_records.append( + { + "time": record["_time"], + "ID": record["ID"], + "flow": record["flow"], + "velocity": record["velocity"], + "headloss": record["headloss"], + "quality": record["quality"], + "status": record["status"], + "setting": record["setting"], + "reaction": record["reaction"], + "friction": record["friction"], + } + ) time_cost_end = time.perf_counter() - print('{} -- query_all_records_by_date finished, cost time: {:.2f} s.'.format( datetime.now(pytz.timezone('Asia/Shanghai')).strftime('%Y-%m-%d %H:%M:%S'), time_cost_end - time_cost_start)) + print( + "{} -- query_all_records_by_date finished, cost time: {:.2f} s.".format( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S"), + time_cost_end - time_cost_start, + ) + ) client.close() return node_records, link_records + # 2025/04/12 DingZQ -def query_all_records_by_time_range(starttime: str, endtime: str, bucket: str="realtime_simulation_result") -> tuple: +def query_all_records_by_time_range( + starttime: str, endtime: str, bucket: str = "realtime_simulation_result" +) -> tuple: """ 查询指定时间范围内的所有记录,包括‘node’和‘link’,分别以指定的格式返回 :param starttime: 输入的开始时间,格式为‘2025-02-14T16:00:00+08:00’ @@ -2243,10 +2789,18 @@ def query_all_records_by_time_range(starttime: str, endtime: str, bucket: str="r # 记录开始时间 time_cost_start = time.perf_counter() - print('{} -- query_all_records_by_date started.'.format(datetime.now(pytz.timezone('Asia/Shanghai')).strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- query_all_records_by_date started.".format( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + ) + ) if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() @@ -2257,14 +2811,14 @@ def query_all_records_by_time_range(starttime: str, endtime: str, bucket: str="r print("bg_start_time", bg_start_time) print("bg_end_time", bg_end_time) - print('utc_start_time', utc_start_time) - print('utc_stop_time', utc_stop_time) + print("utc_start_time", utc_start_time) + print("utc_stop_time", utc_stop_time) - print('utc_start_time.isoformat', utc_start_time.isoformat()) - print('utc_stop_time.isoformat', utc_stop_time.isoformat()) + print("utc_start_time.isoformat", utc_start_time.isoformat()) + print("utc_stop_time.isoformat", utc_stop_time.isoformat()) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["_measurement"] == "node" or r["_measurement"] == "link" and r["date"] == "{query_date}") @@ -2273,7 +2827,7 @@ def query_all_records_by_time_range(starttime: str, endtime: str, bucket: str="r columnKey:["_field"], valueColumn:"_value" ) - ''' + """ # 执行查询 tables = query_api.query(flux_query) @@ -2287,38 +2841,50 @@ def query_all_records_by_time_range(starttime: str, endtime: str, bucket: str="r measurement = record["_measurement"] # 处理 node 数据 if measurement == "node": - node_records.append({ - "time": record["_time"], - "ID": record["ID"], - "head": record["head"], - "pressure": record["pressure"], - "actualdemand": record["actualdemand"], - "quality": record["quality"] - }) + node_records.append( + { + "time": record["_time"], + "ID": record["ID"], + "head": record["head"], + "pressure": record["pressure"], + "actualdemand": record["actualdemand"], + "quality": record["quality"], + } + ) # 处理 link 数据 elif measurement == "link": - link_records.append({ - "time": record["_time"], - "ID": record["ID"], - "flow": record["flow"], - "velocity": record["velocity"], - "headloss": record["headloss"], - "quality": record["quality"], - "status": record["status"], - "setting": record["setting"], - "reaction": record["reaction"], - "friction": record["friction"] - }) + link_records.append( + { + "time": record["_time"], + "ID": record["ID"], + "flow": record["flow"], + "velocity": record["velocity"], + "headloss": record["headloss"], + "quality": record["quality"], + "status": record["status"], + "setting": record["setting"], + "reaction": record["reaction"], + "friction": record["friction"], + } + ) time_cost_end = time.perf_counter() - print('{} -- query_all_records_by_date finished, cost time: {:.2f} s.'.format( datetime.now(pytz.timezone('Asia/Shanghai')).strftime('%Y-%m-%d %H:%M:%S'), time_cost_end - time_cost_start)) + print( + "{} -- query_all_records_by_date finished, cost time: {:.2f} s.".format( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S"), + time_cost_end - time_cost_start, + ) + ) client.close() return node_records, link_records + # 2025/03/15 DingZQ -def query_all_records_by_date_with_type(query_date: str, query_type: str, bucket: str="realtime_simulation_result") -> list: +def query_all_records_by_date_with_type( + query_date: str, query_type: str, bucket: str = "realtime_simulation_result" +) -> list: """ 查询指定日期的所有记录,包括‘node’和‘link’,分别以指定的格式返回 :param query_date: 输入的日期,格式为‘2025-02-14’ @@ -2332,26 +2898,32 @@ def query_all_records_by_date_with_type(query_date: str, query_type: str, bucket time_cost_start = time.perf_counter() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format( datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() - bg_start_time, bg_end_time = time_api.parse_beijing_date_range(query_date=query_date) + bg_start_time, bg_end_time = time_api.parse_beijing_date_range( + query_date=query_date + ) utc_start_time = time_api.to_utc_time(bg_start_time) utc_stop_time = time_api.to_utc_time(bg_end_time) print("bg_start_time", bg_start_time) print("bg_end_time", bg_end_time) - print('utc_start_time', utc_start_time) - print('utc_stop_time', utc_stop_time) + print("utc_start_time", utc_start_time) + print("utc_stop_time", utc_stop_time) - print('utc_start_time.isoformat', utc_start_time.isoformat()) - print('utc_stop_time.isoformat', utc_stop_time.isoformat()) + print("utc_start_time.isoformat", utc_start_time.isoformat()) + print("utc_stop_time.isoformat", utc_stop_time.isoformat()) - print('measurement', query_type) + print("measurement", query_type) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["_measurement"] == "{query_type}" and r["date"] == "{query_date}") @@ -2360,7 +2932,7 @@ def query_all_records_by_date_with_type(query_date: str, query_type: str, bucket columnKey:["_field"], valueColumn:"_value" ) - ''' + """ # 执行查询 tables = query_api.query(flux_query) result_records = [] @@ -2371,36 +2943,46 @@ def query_all_records_by_date_with_type(query_date: str, query_type: str, bucket measurement = record["_measurement"] # 处理 node 数据 if measurement == "node": - result_records.append({ - "time": record["_time"], - "ID": record["ID"], - "head": record["head"], - "pressure": record["pressure"], - "actualdemand": record["actualdemand"], - "quality": record["quality"] - }) + result_records.append( + { + "time": record["_time"], + "ID": record["ID"], + "head": record["head"], + "pressure": record["pressure"], + "actualdemand": record["actualdemand"], + "quality": record["quality"], + } + ) # 处理 link 数据 elif measurement == "link": - result_records.append({ - "time": record["_time"], - "ID": record["ID"], - "flow": record["flow"], - "velocity": record["velocity"], - "headloss": record["headloss"], - "quality": record["quality"], - "status": record["status"], - "setting": record["setting"], - "reaction": record["reaction"], - "friction": record["friction"] - }) + result_records.append( + { + "time": record["_time"], + "ID": record["ID"], + "flow": record["flow"], + "velocity": record["velocity"], + "headloss": record["headloss"], + "quality": record["quality"], + "status": record["status"], + "setting": record["setting"], + "reaction": record["reaction"], + "friction": record["friction"], + } + ) time_cost_end = time.perf_counter() client.close() return result_records + # 2025/02/21 -def query_all_record_by_date_property(query_date: str, type: str, property: str, bucket: str="realtime_simulation_result") -> list: +def query_all_record_by_date_property( + query_date: str, + type: str, + property: str, + bucket: str = "realtime_simulation_result", +) -> list: """ 查询指定日期的‘node’或‘link’的某一属性值的所有记录,以指定的格式返回 :param query_date: 输入的日期,格式为‘2025-02-14’ @@ -2412,10 +2994,17 @@ def query_all_record_by_date_property(query_date: str, type: str, property: str, client = get_new_client() # 记录开始时间 time_cost_start = time.perf_counter() - print('{} -- Hydraulic simulation started.'.format( - datetime.now(pytz.timezone('Asia/Shanghai')).strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Hydraulic simulation started.".format( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S") + ) + ) if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 确定 measurement @@ -2426,14 +3015,22 @@ def query_all_record_by_date_property(query_date: str, type: str, property: str, else: raise ValueError(f"不支持的类型: {type}") # 将 start_date 的北京时间转换为 UTC 时间 - start_time = (datetime.strptime(query_date, "%Y-%m-%d") - timedelta(days=1)).replace(hour=16, minute=0, second=0, tzinfo=timezone.utc).isoformat() - stop_time = datetime.strptime(query_date, "%Y-%m-%d").replace(hour=15, minute=59, second=59, tzinfo=timezone.utc).isoformat() + start_time = ( + (datetime.strptime(query_date, "%Y-%m-%d") - timedelta(days=1)) + .replace(hour=16, minute=0, second=0, tzinfo=timezone.utc) + .isoformat() + ) + stop_time = ( + datetime.strptime(query_date, "%Y-%m-%d") + .replace(hour=15, minute=59, second=59, tzinfo=timezone.utc) + .isoformat() + ) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {start_time}, stop: {stop_time}) |> filter(fn: (r) => r["_measurement"] == "{measurement}" and r["date"] == "{query_date}" and r["_field"] == "{property}") - ''' + """ # 执行查询 tables = query_api.query(flux_query) result_records = [] @@ -2441,21 +3038,29 @@ def query_all_record_by_date_property(query_date: str, type: str, property: str, for table in tables: for record in table.records: # print(record.values) # 打印完整记录内容 - result_records.append({ - "ID": record["ID"], - "time": record["_time"], - "value": record["_value"] - }) + result_records.append( + {"ID": record["ID"], "time": record["_time"], "value": record["_value"]} + ) time_cost_end = time.perf_counter() - print('{} -- Hydraulic simulation finished, cost time: {:.2f} s.'.format( - datetime.now(pytz.timezone('Asia/Shanghai')).strftime('%Y-%m-%d %H:%M:%S'), - time_cost_end - time_cost_start)) + print( + "{} -- Hydraulic simulation finished, cost time: {:.2f} s.".format( + datetime.now(pytz.timezone("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S"), + time_cost_end - time_cost_start, + ) + ) client.close() return result_records # 2025/02/01 -def query_curve_by_ID_property_daterange(ID: str, type: str, property: str, start_date: str, end_date: str, bucket: str="realtime_simulation_result") -> list: +def query_curve_by_ID_property_daterange( + ID: str, + type: str, + property: str, + start_date: str, + end_date: str, + bucket: str = "realtime_simulation_result", +) -> list: """ 根据 type 查询对应的 measurement,根据 ID 和 date 查询对应的 tag,根据 property 查询对应的 field。 :param ID: (str): 要查询的 ID(tag) @@ -2468,7 +3073,11 @@ def query_curve_by_ID_property_daterange(ID: str, type: str, property: str, star """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 确定 measurement @@ -2483,32 +3092,43 @@ def query_curve_by_ID_property_daterange(ID: str, type: str, property: str, star # start_time = previous_day.isoformat() + "T16:00:00Z" # stop_time = datetime.strptime(end_date, "%Y-%m-%d").isoformat() + "T15:59:59Z" # 将 start_date 的北京时间转换为 UTC 时间范围 - start_time = (datetime.strptime(start_date, "%Y-%m-%d") - timedelta(days=1)).replace(hour=16, minute=0, second=0, tzinfo=timezone.utc).isoformat() - stop_time = datetime.strptime(end_date, "%Y-%m-%d").replace(hour=15, minute=59, second=59, tzinfo=timezone.utc).isoformat() + start_time = ( + (datetime.strptime(start_date, "%Y-%m-%d") - timedelta(days=1)) + .replace(hour=16, minute=0, second=0, tzinfo=timezone.utc) + .isoformat() + ) + stop_time = ( + datetime.strptime(end_date, "%Y-%m-%d") + .replace(hour=15, minute=59, second=59, tzinfo=timezone.utc) + .isoformat() + ) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {start_time}, stop: {stop_time}) |> filter(fn: (r) => r["_measurement"] == "{measurement}" and r["ID"] == "{ID}" and r["_field"] == "{property}") - ''' + """ # 执行查询 tables = query_api.query(flux_query) # 解析查询结果 results = [] for table in tables: for record in table.records: - results.append({ - "time": record["_time"], - "value": record["_value"] - }) + results.append({"time": record["_time"], "value": record["_value"]}) client.close() return results # 2025/02/13 -def store_scheme_simulation_result_to_influxdb(node_result_list: List[Dict[str, any]], link_result_list: List[Dict[str, any]], - scheme_start_time: str, num_periods: int = 1, scheme_Type: str = None, scheme_Name: str = None, - bucket: str = "scheme_simulation_result"): +def store_scheme_simulation_result_to_influxdb( + node_result_list: List[Dict[str, any]], + link_result_list: List[Dict[str, any]], + scheme_start_time: str, + num_periods: int = 1, + scheme_Type: str = None, + scheme_Name: str = None, + bucket: str = "scheme_simulation_result", +): """ 将方案模拟计算结果存入 InfluxuDb 的scheme_simulation_result这个bucket中。 :param node_result_list: (List[Dict[str, any]]): 包含节点和结果数据的字典列表。 @@ -2522,7 +3142,11 @@ def store_scheme_simulation_result_to_influxdb(node_result_list: List[Dict[str, """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) try: # 本地变量,用于记录成功写入的数据点数量 @@ -2538,6 +3162,7 @@ def store_scheme_simulation_result_to_influxdb(node_result_list: List[Dict[str, def error_callback(exception): print("Error writing batch:", exception) + # write_options = WriteOptions( # jitter_interval=200, # 添加抖动以避免同时写入 # max_retry_delay=30000 # 最大重试延迟(毫秒) @@ -2546,59 +3171,67 @@ def store_scheme_simulation_result_to_influxdb(node_result_list: List[Dict[str, write_api = client.write_api( write_options=create_write_options(), success_callback=success_callback, - error_callback=error_callback + error_callback=error_callback, ) # 创建一个临时存储点数据的列表 points_to_write = [] - date_str = scheme_start_time.split('T')[0] - time_beijing = datetime.strptime(scheme_start_time, '%Y-%m-%dT%H:%M:%S%z') - timestep_parts = globals.hydraulic_timestep.split(':') - timestep = timedelta(hours=int(timestep_parts[0]), minutes=int(timestep_parts[1]), seconds=int(timestep_parts[2])) + date_str = scheme_start_time.split("T")[0] + time_beijing = datetime.strptime(scheme_start_time, "%Y-%m-%dT%H:%M:%S%z") + timestep_parts = globals.hydraulic_timestep.split(":") + timestep = timedelta( + hours=int(timestep_parts[0]), + minutes=int(timestep_parts[1]), + seconds=int(timestep_parts[2]), + ) for node_result in node_result_list: # 提取节点信息和数据结果 - node_id = node_result.get('node') + node_id = node_result.get("node") # 从period 0 到 period num_period - 1 for period_index in range(num_periods): scheme_time = (time_beijing + (timestep * period_index)).isoformat() - data_list = [node_result.get('result', [])[period_index]] + data_list = [node_result.get("result", [])[period_index]] for data in data_list: # 构建 Point 数据,多个 field 存在于一个数据点中 - node_point = Point("node") \ - .tag("date", date_str) \ - .tag("ID", node_id) \ - .tag("scheme_Type", scheme_Type) \ - .tag("scheme_Name", scheme_Name) \ - .field("head", data.get('head', 0.0)) \ - .field("pressure", data.get('pressure', 0.0)) \ - .field("actualdemand", data.get('demand', 0.0)) \ - .field("demanddeficit", None) \ - .field("totalExternalOutflow", None) \ - .field("quality", data.get('quality', 0.0)) \ - .time(scheme_time, write_precision='s') + node_point = ( + Point("node") + .tag("date", date_str) + .tag("ID", node_id) + .tag("scheme_Type", scheme_Type) + .tag("scheme_Name", scheme_Name) + .field("head", data.get("head", 0.0)) + .field("pressure", data.get("pressure", 0.0)) + .field("actualdemand", data.get("demand", 0.0)) + .field("demanddeficit", None) + .field("totalExternalOutflow", None) + .field("quality", data.get("quality", 0.0)) + .time(scheme_time, write_precision="s") + ) points_to_write.append(node_point) # 写入数据到 InfluxDB,多个 field 在同一个 point 中 # write_api.write(bucket=bucket, org=org_name, record=node_point) # write_api.flush() for link_result in link_result_list: - link_id = link_result.get('link') + link_id = link_result.get("link") for period_index in range(num_periods): scheme_time = (time_beijing + (timestep * period_index)).isoformat() - data_list = [link_result.get('result', [])[period_index]] + data_list = [link_result.get("result", [])[period_index]] for data in data_list: - link_point = Point("link") \ - .tag("date", date_str) \ - .tag("ID", link_id) \ - .tag("scheme_Type", scheme_Type) \ - .tag("scheme_Name", scheme_Name) \ - .field("flow", data.get('flow', 0.0)) \ - .field("velocity", data.get('velocity', 0.0)) \ - .field("headloss", data.get('headloss', 0.0)) \ - .field("quality", data.get('quality', 0.0)) \ - .field("status", data.get('status', "UNKNOWN")) \ - .field("setting", data.get('setting', 0.0)) \ - .field("reaction", data.get('reaction', 0.0)) \ - .field("friction", data.get('friction', 0.0)) \ - .time(scheme_time, write_precision='s') + link_point = ( + Point("link") + .tag("date", date_str) + .tag("ID", link_id) + .tag("scheme_Type", scheme_Type) + .tag("scheme_Name", scheme_Name) + .field("flow", data.get("flow", 0.0)) + .field("velocity", data.get("velocity", 0.0)) + .field("headloss", data.get("headloss", 0.0)) + .field("quality", data.get("quality", 0.0)) + .field("status", data.get("status", "UNKNOWN")) + .field("setting", data.get("setting", 0.0)) + .field("reaction", data.get("reaction", 0.0)) + .field("friction", data.get("friction", 0.0)) + .time(scheme_time, write_precision="s") + ) points_to_write.append(link_point) # write_api.write(bucket=bucket, org=org_name, record=link_point) # write_api.flush() @@ -2631,21 +3264,29 @@ def query_corresponding_query_id_and_element_id(name: str) -> None: with psycopg.connect(conn_string) as conn: with conn.cursor() as cur: # 查询 transmission_mode 为 'realtime' 的记录 - cur.execute(""" + cur.execute( + """ SELECT type, associated_element_id, api_query_id FROM scada_info WHERE type IN ('source_outflow', 'pipe_flow', 'demand', 'pressure', 'quality'); - """) + """ + ) records = cur.fetchall() # 遍历查询结果,根据 type 分类存入对应的字典 for record in records: record_type, associated_element_id, api_query_id = record if record_type == "source_outflow": - globals.scheme_source_outflow_ids[api_query_id] = associated_element_id + globals.scheme_source_outflow_ids[api_query_id] = ( + associated_element_id + ) elif record_type == "pipe_flow": - globals.scheme_pipe_flow_ids[api_query_id] = associated_element_id + globals.scheme_pipe_flow_ids[api_query_id] = ( + associated_element_id + ) elif record_type == "pressure": - globals.scheme_pressure_ids[api_query_id] = associated_element_id + globals.scheme_pressure_ids[api_query_id] = ( + associated_element_id + ) elif record_type == "demand": globals.scheme_demand_ids[api_query_id] = associated_element_id elif record_type == "quality": @@ -2664,12 +3305,13 @@ def query_corresponding_query_id_and_element_id(name: str) -> None: # def auto_get_burst_flow(): - - - # 2025/03/11 -def fill_scheme_simulation_result_to_SCADA(scheme_Type: str = None, scheme_Name: str = None, query_date: str = None, - bucket: str = "scheme_simulation_result"): +def fill_scheme_simulation_result_to_SCADA( + scheme_Type: str = None, + scheme_Name: str = None, + query_date: str = None, + bucket: str = "scheme_simulation_result", +): """ :param scheme_Type: 方案类型 :param scheme_Name: 方案名称 @@ -2679,7 +3321,11 @@ def fill_scheme_simulation_result_to_SCADA(scheme_Type: str = None, scheme_Name: """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) # 本地变量,用于记录成功写入的数据点数量 points_written = 0 @@ -2702,88 +3348,118 @@ def fill_scheme_simulation_result_to_SCADA(scheme_Type: str = None, scheme_Name: write_api = client.write_api( write_options=create_write_options(), success_callback=success_callback, - error_callback=error_callback + error_callback=error_callback, ) # 创建一个临时存储点数据的列表 points_to_write = [] # 查找associated_element_id的对应值 for key, value in globals.scheme_source_outflow_ids.items(): - scheme_source_outflow_result = (query_scheme_curve_by_ID_property(scheme_Type=scheme_Type, scheme_Name=scheme_Name, - query_date=query_date, ID=value, type='link', property='flow')) + scheme_source_outflow_result = query_scheme_curve_by_ID_property( + scheme_Type=scheme_Type, + scheme_Name=scheme_Name, + query_date=query_date, + ID=value, + type="link", + property="flow", + ) # print(f"Key: {key}, Query result: {scheme_source_outflow_result}") # 调试输出 for data in scheme_source_outflow_result: point = ( - Point('scheme_source_outflow') + Point("scheme_source_outflow") .tag("date", query_date) .tag("device_ID", key) .tag("scheme_Type", scheme_Type) .tag("scheme_Name", scheme_Name) - .field("monitored_value", data['value']) - .time(data['time'], write_precision='s') + .field("monitored_value", data["value"]) + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) for key, value in globals.scheme_pipe_flow_ids.items(): - scheme_pipe_flow_result = (query_scheme_curve_by_ID_property(scheme_Type=scheme_Type, scheme_Name=scheme_Name, - query_date=query_date, ID=value, type='link', property='flow')) + scheme_pipe_flow_result = query_scheme_curve_by_ID_property( + scheme_Type=scheme_Type, + scheme_Name=scheme_Name, + query_date=query_date, + ID=value, + type="link", + property="flow", + ) for data in scheme_pipe_flow_result: point = ( - Point('scheme_pipe_flow') + Point("scheme_pipe_flow") .tag("date", query_date) .tag("device_ID", key) .tag("scheme_Type", scheme_Type) .tag("scheme_Name", scheme_Name) - .field("monitored_value", data['value']) - .time(data['time'], write_precision='s') + .field("monitored_value", data["value"]) + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) for key, value in globals.scheme_pressure_ids.items(): - scheme_pressure_result = (query_scheme_curve_by_ID_property(scheme_Type=scheme_Type, scheme_Name=scheme_Name, - query_date=query_date, ID=value, type='node', property='pressure')) + scheme_pressure_result = query_scheme_curve_by_ID_property( + scheme_Type=scheme_Type, + scheme_Name=scheme_Name, + query_date=query_date, + ID=value, + type="node", + property="pressure", + ) for data in scheme_pressure_result: point = ( - Point('scheme_pressure') + Point("scheme_pressure") .tag("date", query_date) .tag("device_ID", key) .tag("scheme_Type", scheme_Type) .tag("scheme_Name", scheme_Name) - .field("monitored_value", data['value']) - .time(data['time'], write_precision='s') + .field("monitored_value", data["value"]) + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) for key, value in globals.scheme_demand_ids.items(): - scheme_demand_result = (query_scheme_curve_by_ID_property(scheme_Type=scheme_Type, scheme_Name=scheme_Name, - query_date=query_date, ID=value, type='node', property='actualdemand')) + scheme_demand_result = query_scheme_curve_by_ID_property( + scheme_Type=scheme_Type, + scheme_Name=scheme_Name, + query_date=query_date, + ID=value, + type="node", + property="actualdemand", + ) for data in scheme_demand_result: point = ( - Point('scheme_demand') + Point("scheme_demand") .tag("date", query_date) .tag("device_ID", key) .tag("scheme_Type", scheme_Type) .tag("scheme_Name", scheme_Name) - .field("monitored_value", data['value']) - .time(data['time'], write_precision='s') + .field("monitored_value", data["value"]) + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) for key, value in globals.scheme_quality_ids.items(): - scheme_quality_result = (query_scheme_curve_by_ID_property(scheme_Type=scheme_Type, scheme_Name=scheme_Name, - query_date=query_date, ID=value, type='node', property='quality')) + scheme_quality_result = query_scheme_curve_by_ID_property( + scheme_Type=scheme_Type, + scheme_Name=scheme_Name, + query_date=query_date, + ID=value, + type="node", + property="quality", + ) for data in scheme_quality_result: point = ( - Point('scheme_quality') + Point("scheme_quality") .tag("date", query_date) .tag("device_ID", key) .tag("scheme_Type", scheme_Type) .tag("scheme_Name", scheme_Name) - .field("monitored_value", data['value']) - .time(data['time'], write_precision='s') + .field("monitored_value", data["value"]) + .time(data["time"], write_precision="s") ) points_to_write.append(point) # write_api.write(bucket=bucket, org=org_name, record=point) @@ -2801,7 +3477,9 @@ def fill_scheme_simulation_result_to_SCADA(scheme_Type: str = None, scheme_Name: # 2025/02/15 -def query_SCADA_data_curve(api_query_id: str, start_date: str, end_date: str, bucket: str="SCADA_data") -> list: +def query_SCADA_data_curve( + api_query_id: str, start_date: str, end_date: str, bucket: str = "SCADA_data" +) -> list: """ 根据SCADA设备的api_query_id和时间范围,查询得到曲线,查到的数据为0时区时间 :param api_query_id: SCADA设备的api_query_id @@ -2812,34 +3490,48 @@ def query_SCADA_data_curve(api_query_id: str, start_date: str, end_date: str, bu """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 将 start_date 的北京时间转换为 UTC 时间范围 - start_time = (datetime.strptime(start_date, "%Y-%m-%d") - timedelta(days=1)).replace(hour=16, minute=0, second=0, tzinfo=timezone.utc).isoformat() - stop_time = datetime.strptime(end_date, "%Y-%m-%d").replace(hour=15, minute=59, second=59, tzinfo=timezone.utc).isoformat() + start_time = ( + (datetime.strptime(start_date, "%Y-%m-%d") - timedelta(days=1)) + .replace(hour=16, minute=0, second=0, tzinfo=timezone.utc) + .isoformat() + ) + stop_time = ( + datetime.strptime(end_date, "%Y-%m-%d") + .replace(hour=15, minute=59, second=59, tzinfo=timezone.utc) + .isoformat() + ) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {start_time}, stop: {stop_time}) |> filter(fn: (r) => r["device_ID"] == "{api_query_id}" and r["_field"] == "monitored_value") - ''' + """ # 执行查询 tables = query_api.query(flux_query) # 解析查询结果 results = [] for table in tables: for record in table.records: - results.append({ - "time": record["_time"], - "value": record["_value"] - }) + results.append({"time": record["_time"], "value": record["_value"]}) client.close() return results # 2025/02/18 -def query_scheme_all_record_by_time(scheme_Type: str, scheme_Name: str, query_time: str, bucket: str="scheme_simulation_result") -> tuple: +def query_scheme_all_record_by_time( + scheme_Type: str, + scheme_Name: str, + query_time: str, + bucket: str = "scheme_simulation_result", +) -> tuple: """ 查询指定方案某一时刻的所有记录,包括‘node'和‘link’,分别以指定格式返回。 :param scheme_Type: 方案类型 @@ -2850,7 +3542,11 @@ def query_scheme_all_record_by_time(scheme_Type: str, scheme_Name: str, query_ti """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 将北京时间转换为 UTC 时间 @@ -2859,7 +3555,7 @@ def query_scheme_all_record_by_time(scheme_Type: str, scheme_Name: str, query_ti utc_start_time = utc_time - timedelta(seconds=1) utc_stop_time = utc_time + timedelta(seconds=1) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["scheme_Type"] == "{scheme_Type}" and r["scheme_Name"] == "{scheme_Name}" and r["_measurement"] == "node" or r["_measurement"] == "link") @@ -2868,7 +3564,7 @@ def query_scheme_all_record_by_time(scheme_Type: str, scheme_Name: str, query_ti columnKey:["_field"], valueColumn:"_value" ) - ''' + """ # 执行查询 tables = query_api.query(flux_query) node_records = [] @@ -2880,35 +3576,45 @@ def query_scheme_all_record_by_time(scheme_Type: str, scheme_Name: str, query_ti measurement = record["_measurement"] # 处理 node 数据 if measurement == "node": - node_records.append({ - "time": record["_time"], - "ID": record["ID"], - "head": record["head"], - "pressure": record["pressure"], - "actualdemand": record["actualdemand"], - "quality": record["quality"] - }) + node_records.append( + { + "time": record["_time"], + "ID": record["ID"], + "head": record["head"], + "pressure": record["pressure"], + "actualdemand": record["actualdemand"], + "quality": record["quality"], + } + ) # 处理 link 数据 elif measurement == "link": - link_records.append({ - "time": record["_time"], - "ID": record["ID"], - "flow": record["flow"], - "velocity": record["velocity"], - "headloss": record["headloss"], - "quality": record["quality"], - "status": record["status"], - "setting": record["setting"], - "reaction": record["reaction"], - "friction": record["friction"] - }) + link_records.append( + { + "time": record["_time"], + "ID": record["ID"], + "flow": record["flow"], + "velocity": record["velocity"], + "headloss": record["headloss"], + "quality": record["quality"], + "status": record["status"], + "setting": record["setting"], + "reaction": record["reaction"], + "friction": record["friction"], + } + ) client.close() return node_records, link_records # 2025/03/04 -def query_scheme_all_record_by_time_property(scheme_Type: str, scheme_Name: str, query_time: str, type: str, property: str, - bucket: str="scheme_simulation_result") -> list: +def query_scheme_all_record_by_time_property( + scheme_Type: str, + scheme_Name: str, + query_time: str, + type: str, + property: str, + bucket: str = "scheme_simulation_result", +) -> list: """ 查询指定方案某一时刻‘node'或‘link’某一属性值,以指定格式返回。 :param scheme_Type: 方案类型 @@ -2921,7 +3627,11 @@ def query_scheme_all_record_by_time_property(scheme_Type: str, scheme_Name: str, """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 确定 measurement @@ -2937,28 +3647,32 @@ def query_scheme_all_record_by_time_property(scheme_Type: str, scheme_Name: str, utc_start_time = utc_time - timedelta(seconds=1) utc_stop_time = utc_time + timedelta(seconds=1) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["scheme_Type"] == "{scheme_Type}" and r["scheme_Name"] == "{scheme_Name}" and r["_measurement"] == "{measurement}" and r["_field"] == "{property}") - ''' + """ # 执行查询 tables = query_api.query(flux_query) result_records = [] # 解析查询结果 for table in tables: for record in table.records: - result_records.append({ - "ID": record["ID"], - "value": record["_value"] - }) + result_records.append({"ID": record["ID"], "value": record["_value"]}) client.close() return result_records # 2025/02/19 -def query_scheme_curve_by_ID_property(scheme_Type: str, scheme_Name: str, query_date: str, ID: str, type: str, property: str, - bucket: str="scheme_simulation_result") -> list: +def query_scheme_curve_by_ID_property( + scheme_Type: str, + scheme_Name: str, + query_date: str, + ID: str, + type: str, + property: str, + bucket: str = "scheme_simulation_result", +) -> list: """ 根据scheme_Type和scheme_Name,查询该模拟方案中,某一node或link的某一属性值的所有时间的结果 :param scheme_Type: 方案类型 @@ -2972,7 +3686,11 @@ def query_scheme_curve_by_ID_property(scheme_Type: str, scheme_Name: str, query_ """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 确定 measurement @@ -2982,31 +3700,41 @@ def query_scheme_curve_by_ID_property(scheme_Type: str, scheme_Name: str, query_ measurement = "link" else: raise ValueError(f"不支持的类型: {type}") - start_time = (datetime.strptime(query_date, "%Y-%m-%d") - timedelta(days=1)).replace(hour=16, minute=0, second=0, tzinfo=timezone.utc).isoformat() - stop_time = datetime.strptime(query_date, "%Y-%m-%d").replace(hour=15, minute=59, second=59, tzinfo=timezone.utc).isoformat() + start_time = ( + (datetime.strptime(query_date, "%Y-%m-%d") - timedelta(days=1)) + .replace(hour=16, minute=0, second=0, tzinfo=timezone.utc) + .isoformat() + ) + stop_time = ( + datetime.strptime(query_date, "%Y-%m-%d") + .replace(hour=15, minute=59, second=59, tzinfo=timezone.utc) + .isoformat() + ) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {start_time}, stop: {stop_time}) |> filter(fn: (r) => r["_measurement"] == "{measurement}" and r["scheme_Type"] == "{scheme_Type}" and r["scheme_Name"] == "{scheme_Name}" and r["ID"] == "{ID}" and r["_field"] == "{property}") - ''' + """ # 执行查询 tables = query_api.query(flux_query) # 解析查询结果 results = [] for table in tables: for record in table.records: - results.append({ - "time": record["_time"], - "value": record["_value"] - }) + results.append({"time": record["_time"], "value": record["_value"]}) client.close() return results # 2025/02/21 -def query_scheme_all_record(scheme_Type: str, scheme_Name: str, query_date: str, bucket: str="scheme_simulation_result") -> tuple: +def query_scheme_all_record( + scheme_Type: str, + scheme_Name: str, + query_date: str, + bucket: str = "scheme_simulation_result", +) -> tuple: """ 查询指定方案的所有记录,包括‘node'和‘link’,分别以指定格式返回。 :param scheme_Type: 方案类型 @@ -3017,18 +3745,24 @@ def query_scheme_all_record(scheme_Type: str, scheme_Name: str, query_date: str, """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() - bg_start_time, bg_end_time = time_api.parse_beijing_date_range(query_date=query_date) + bg_start_time, bg_end_time = time_api.parse_beijing_date_range( + query_date=query_date + ) utc_start_time = time_api.to_utc_time(bg_start_time) utc_stop_time = time_api.to_utc_time(bg_end_time) print(utc_start_time, utc_stop_time) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {utc_start_time.isoformat()}, stop: {utc_stop_time.isoformat()}) |> filter(fn: (r) => r["scheme_Type"] == "{scheme_Type}" and r["scheme_Name"] == "{scheme_Name}" and r["_measurement"] == "node" or r["_measurement"] == "link") @@ -3037,7 +3771,7 @@ def query_scheme_all_record(scheme_Type: str, scheme_Name: str, query_date: str, columnKey:["_field"], valueColumn:"_value" ) - ''' + """ # 执行查询 tables = query_api.query(flux_query) node_records = [] @@ -3049,35 +3783,45 @@ def query_scheme_all_record(scheme_Type: str, scheme_Name: str, query_date: str, measurement = record["_measurement"] # 处理 node 数据 if measurement == "node": - node_records.append({ - "time": record["_time"], - "ID": record["ID"], - "head": record["head"], - "pressure": record["pressure"], - "actualdemand": record["actualdemand"], - "quality": record["quality"] - }) + node_records.append( + { + "time": record["_time"], + "ID": record["ID"], + "head": record["head"], + "pressure": record["pressure"], + "actualdemand": record["actualdemand"], + "quality": record["quality"], + } + ) # 处理 link 数据 elif measurement == "link": - link_records.append({ - "time": record["_time"], - "ID": record["ID"], - "flow": record["flow"], - "velocity": record["velocity"], - "headloss": record["headloss"], - "quality": record["quality"], - "status": record["status"], - "setting": record["setting"], - "reaction": record["reaction"], - "friction": record["friction"] - }) + link_records.append( + { + "time": record["_time"], + "ID": record["ID"], + "flow": record["flow"], + "velocity": record["velocity"], + "headloss": record["headloss"], + "quality": record["quality"], + "status": record["status"], + "setting": record["setting"], + "reaction": record["reaction"], + "friction": record["friction"], + } + ) client.close() return node_records, link_records # 2025/03/04 -def query_scheme_all_record_property(scheme_Type: str, scheme_Name: str, query_date: str, type: str, property: str, - bucket: str="scheme_simulation_result") -> list: +def query_scheme_all_record_property( + scheme_Type: str, + scheme_Name: str, + query_date: str, + type: str, + property: str, + bucket: str = "scheme_simulation_result", +) -> list: """ 查询指定方案的‘node'或‘link’的某一属性值,以指定格式返回。 :param scheme_Type: 方案类型 @@ -3090,7 +3834,11 @@ def query_scheme_all_record_property(scheme_Type: str, scheme_Name: str, query_d """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 确定 measurement @@ -3100,31 +3848,39 @@ def query_scheme_all_record_property(scheme_Type: str, scheme_Name: str, query_d measurement = "link" else: raise ValueError(f"不支持的类型: {type}") - start_time = (datetime.strptime(query_date, "%Y-%m-%d") - timedelta(days=1)).replace(hour=16, minute=0, second=0, tzinfo=timezone.utc).isoformat() - stop_time = datetime.strptime(query_date, "%Y-%m-%d").replace(hour=15, minute=59, second=59, tzinfo=timezone.utc).isoformat() + start_time = ( + (datetime.strptime(query_date, "%Y-%m-%d") - timedelta(days=1)) + .replace(hour=16, minute=0, second=0, tzinfo=timezone.utc) + .isoformat() + ) + stop_time = ( + datetime.strptime(query_date, "%Y-%m-%d") + .replace(hour=15, minute=59, second=59, tzinfo=timezone.utc) + .isoformat() + ) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {start_time}, stop: {stop_time}) |> filter(fn: (r) => r["scheme_Type"] == "{scheme_Type}" and r["scheme_Name"] == "{scheme_Name}" and r["date"] == "{query_date}" and r["_measurement"] == "{measurement}" and r["_field"] == "{property}") - ''' + """ # 执行查询 tables = query_api.query(flux_query) result_records = [] # 解析查询结果 for table in tables: for record in table.records: - result_records.append({ - "time": record["_time"], - "ID": record["ID"], - "value": record["_value"] - }) + result_records.append( + {"time": record["_time"], "ID": record["ID"], "value": record["_value"]} + ) client.close() return result_records # 2025/02/16 -def export_SCADA_data_to_csv(start_date: str, end_date: str, bucket: str="SCADA_data") -> None: +def export_SCADA_data_to_csv( + start_date: str, end_date: str, bucket: str = "SCADA_data" +) -> None: """ 导出influxdb中SCADA_data这个bucket的数据到csv中 :param start_date: 查询开始的时间,格式为 'YYYY-MM-DD' @@ -3134,17 +3890,29 @@ def export_SCADA_data_to_csv(start_date: str, end_date: str, bucket: str="SCADA_ """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 将 start_date 的北京时间转换为 UTC 时间范围 - start_time = (datetime.strptime(start_date, "%Y-%m-%d") - timedelta(days=1)).replace(hour=16, minute=0, second=0, tzinfo=timezone.utc).isoformat() - stop_time = datetime.strptime(end_date, "%Y-%m-%d").replace(hour=15, minute=59, second=59, tzinfo=timezone.utc).isoformat() + start_time = ( + (datetime.strptime(start_date, "%Y-%m-%d") - timedelta(days=1)) + .replace(hour=16, minute=0, second=0, tzinfo=timezone.utc) + .isoformat() + ) + stop_time = ( + datetime.strptime(end_date, "%Y-%m-%d") + .replace(hour=15, minute=59, second=59, tzinfo=timezone.utc) + .isoformat() + ) # 构建 Flux 查询语句 - flux_query = f''' + flux_query = f""" from(bucket: "{bucket}") |> range(start: {start_time}, stop: {stop_time}) - ''' + """ # 执行查询 tables = query_api.query(flux_query) # 存储查询结果 @@ -3152,21 +3920,45 @@ def export_SCADA_data_to_csv(start_date: str, end_date: str, bucket: str="SCADA_ for table in tables: for record in table.records: row = { - 'time': record.get_time(), - 'measurement': record.get_measurement(), - 'date': record.values.get('date', None), - 'description': record.values.get('description', None), - 'device_ID': record.values.get('device_ID', None), - 'monitored_value': record.get_value() if record.get_field() == 'monitored_value' else None, - 'datacleaning_value': record.get_value() if record.get_field() == 'datacleaning_value' else None, - 'simulation_value': record.get_value() if record.get_field() == 'simulation_value' else None, + "time": record.get_time(), + "measurement": record.get_measurement(), + "date": record.values.get("date", None), + "description": record.values.get("description", None), + "device_ID": record.values.get("device_ID", None), + "monitored_value": ( + record.get_value() + if record.get_field() == "monitored_value" + else None + ), + "datacleaning_value": ( + record.get_value() + if record.get_field() == "datacleaning_value" + else None + ), + "simulation_value": ( + record.get_value() + if record.get_field() == "simulation_value" + else None + ), } rows.append(row) # 动态生成 CSV 文件名 csv_filename = f"SCADA_data_{start_date}至{end_date}.csv" # 写入到 CSV 文件 - with open(csv_filename, mode='w', newline='') as file: - writer = csv.DictWriter(file, fieldnames=['time', 'measurement', 'date', 'description', 'device_ID', 'monitored_value', 'datacleaning_value', 'simulation_value']) + with open(csv_filename, mode="w", newline="") as file: + writer = csv.DictWriter( + file, + fieldnames=[ + "time", + "measurement", + "date", + "description", + "device_ID", + "monitored_value", + "datacleaning_value", + "simulation_value", + ], + ) writer.writeheader() writer.writerows(rows) print(f"Data exported to {csv_filename} successfully.") @@ -3174,7 +3966,9 @@ def export_SCADA_data_to_csv(start_date: str, end_date: str, bucket: str="SCADA_ # 2025/02/17 -def export_realtime_simulation_result_to_csv(start_date: str, end_date: str, bucket: str="realtime_simulation_result") -> None: +def export_realtime_simulation_result_to_csv( + start_date: str, end_date: str, bucket: str = "realtime_simulation_result" +) -> None: """ 导出influxdb中realtime_simulation_result这个bucket的数据到csv中 :param start_date: 查询开始的时间,格式为 'YYYY-MM-DD' @@ -3184,18 +3978,30 @@ def export_realtime_simulation_result_to_csv(start_date: str, end_date: str, buc """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 将 start_date 的北京时间转换为 UTC 时间范围 - start_time = (datetime.strptime(start_date, "%Y-%m-%d") - timedelta(days=1)).replace(hour=16, minute=0, second=0, tzinfo=timezone.utc).isoformat() - stop_time = datetime.strptime(end_date, "%Y-%m-%d").replace(hour=15, minute=59, second=59, tzinfo=timezone.utc).isoformat() + start_time = ( + (datetime.strptime(start_date, "%Y-%m-%d") - timedelta(days=1)) + .replace(hour=16, minute=0, second=0, tzinfo=timezone.utc) + .isoformat() + ) + stop_time = ( + datetime.strptime(end_date, "%Y-%m-%d") + .replace(hour=15, minute=59, second=59, tzinfo=timezone.utc) + .isoformat() + ) # 构建 Flux 查询语句,查询指定时间范围内的数据 - flux_query_link = f''' + flux_query_link = f""" from(bucket: "{bucket}") |> range(start: {start_time}, stop: {stop_time}) |> filter(fn: (r) => r["_measurement"] == "link") - ''' + """ # 执行查询 link_tables = query_api.query(flux_query_link) # 存储link类的数据 @@ -3203,19 +4009,19 @@ def export_realtime_simulation_result_to_csv(start_date: str, end_date: str, buc link_data = {} for table in link_tables: for record in table.records: - key = (record.get_time(), record.values.get('ID', None)) + key = (record.get_time(), record.values.get("ID", None)) if key not in link_data: link_data[key] = {} field = record.get_field() link_data[key][field] = record.get_value() - link_data[key]['measurement'] = record.get_measurement() - link_data[key]['date'] = record.values.get('date', None) + link_data[key]["measurement"] = record.get_measurement() + link_data[key]["date"] = record.values.get("date", None) # 构建 Flux 查询语句,查询指定时间范围内的数据 - flux_query_node = f''' + flux_query_node = f""" from(bucket: "{bucket}") |> range(start: {start_time}, stop: {stop_time}) |> filter(fn: (r) => r["_measurement"] == "node") - ''' + """ # 执行查询 node_tables = query_api.query(flux_query_node) # 存储node类的数据 @@ -3223,33 +4029,63 @@ def export_realtime_simulation_result_to_csv(start_date: str, end_date: str, buc node_data = {} for table in node_tables: for record in table.records: - key = (record.get_time(), record.values.get('ID', None)) + key = (record.get_time(), record.values.get("ID", None)) if key not in node_data: node_data[key] = {} field = record.get_field() node_data[key][field] = record.get_value() - node_data[key]['measurement'] = record.get_measurement() - node_data[key]['date'] = record.values.get('date', None) + node_data[key]["measurement"] = record.get_measurement() + node_data[key]["date"] = record.values.get("date", None) for key in set(link_data.keys()): - row = {'time': key[0], "ID": key[1]} + row = {"time": key[0], "ID": key[1]} row.update(link_data.get(key, {})) link_rows.append(row) for key in set(node_data.keys()): - row = {'time': key[0], "ID": key[1]} + row = {"time": key[0], "ID": key[1]} row.update(node_data.get(key, {})) node_rows.append(row) # 动态生成 CSV 文件名 csv_filename_link = f"realtime_simulation_link_result_{start_date}至{end_date}.csv" csv_filename_node = f"realtime_simulation_node_result_{start_date}至{end_date}.csv" # 写入到 CSV 文件 - with open(csv_filename_link, mode='w', newline='') as file: - writer = csv.DictWriter(file, fieldnames=['time', 'measurement', 'date', 'ID', 'flow', 'leakage', 'velocity', 'headloss', 'status', 'setting', 'quality', 'friction', 'reaction']) + with open(csv_filename_link, mode="w", newline="") as file: + writer = csv.DictWriter( + file, + fieldnames=[ + "time", + "measurement", + "date", + "ID", + "flow", + "leakage", + "velocity", + "headloss", + "status", + "setting", + "quality", + "friction", + "reaction", + ], + ) writer.writeheader() writer.writerows(link_rows) - with open(csv_filename_node, mode='w', newline='') as file: - writer = csv.DictWriter(file, fieldnames=['time', 'measurement', 'date', 'ID', 'head', 'pressure', 'actualdemand', - 'demanddeficit', 'totalExternalOutflow', 'quality']) + with open(csv_filename_node, mode="w", newline="") as file: + writer = csv.DictWriter( + file, + fieldnames=[ + "time", + "measurement", + "date", + "ID", + "head", + "pressure", + "actualdemand", + "demanddeficit", + "totalExternalOutflow", + "quality", + ], + ) writer.writeheader() writer.writerows(node_rows) print(f"Data exported to {csv_filename_link} and {csv_filename_node} successfully.") @@ -3257,7 +4093,9 @@ def export_realtime_simulation_result_to_csv(start_date: str, end_date: str, buc # 2025/02/18 -def export_scheme_simulation_result_to_csv_time(start_date: str, end_date: str, bucket: str="scheme_simulation_result") -> None: +def export_scheme_simulation_result_to_csv_time( + start_date: str, end_date: str, bucket: str = "scheme_simulation_result" +) -> None: """ 导出influxdb中scheme_simulation_result这个bucket的数据到csv中 :param start_date: 查询开始的时间,格式为 'YYYY-MM-DD' @@ -3267,18 +4105,30 @@ def export_scheme_simulation_result_to_csv_time(start_date: str, end_date: str, """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() # 将 start_date 的北京时间转换为 UTC 时间范围 - start_time = (datetime.strptime(start_date, "%Y-%m-%d") - timedelta(days=1)).replace(hour=16, minute=0, second=0, tzinfo=timezone.utc).isoformat() - stop_time = datetime.strptime(end_date, "%Y-%m-%d").replace(hour=15, minute=59, second=59, tzinfo=timezone.utc).isoformat() + start_time = ( + (datetime.strptime(start_date, "%Y-%m-%d") - timedelta(days=1)) + .replace(hour=16, minute=0, second=0, tzinfo=timezone.utc) + .isoformat() + ) + stop_time = ( + datetime.strptime(end_date, "%Y-%m-%d") + .replace(hour=15, minute=59, second=59, tzinfo=timezone.utc) + .isoformat() + ) # 构建 Flux 查询语句,查询指定时间范围内的数据 - flux_query_link = f''' + flux_query_link = f""" from(bucket: "{bucket}") |> range(start: {start_time}, stop: {stop_time}) |> filter(fn: (r) => r["_measurement"] == "link") - ''' + """ # 执行查询 link_tables = query_api.query(flux_query_link) # 存储link类的数据 @@ -3286,21 +4136,21 @@ def export_scheme_simulation_result_to_csv_time(start_date: str, end_date: str, link_data = {} for table in link_tables: for record in table.records: - key = (record.get_time(), record.values.get('ID', None)) + key = (record.get_time(), record.values.get("ID", None)) if key not in link_data: link_data[key] = {} field = record.get_field() link_data[key][field] = record.get_value() - link_data[key]['measurement'] = record.get_measurement() - link_data[key]['date'] = record.values.get('date', None) - link_data[key]['scheme_Type'] = record.values.get('scheme_Type', None) - link_data[key]['scheme_Name'] = record.values.get('scheme_Name', None) + link_data[key]["measurement"] = record.get_measurement() + link_data[key]["date"] = record.values.get("date", None) + link_data[key]["scheme_Type"] = record.values.get("scheme_Type", None) + link_data[key]["scheme_Name"] = record.values.get("scheme_Name", None) # 构建 Flux 查询语句,查询指定时间范围内的数据 - flux_query_node = f''' + flux_query_node = f""" from(bucket: "{bucket}") |> range(start: {start_time}, stop: {stop_time}) |> filter(fn: (r) => r["_measurement"] == "node") - ''' + """ # 执行查询 node_tables = query_api.query(flux_query_node) # 存储node类的数据 @@ -3308,34 +4158,68 @@ def export_scheme_simulation_result_to_csv_time(start_date: str, end_date: str, node_data = {} for table in node_tables: for record in table.records: - key = (record.get_time(), record.values.get('ID', None)) + key = (record.get_time(), record.values.get("ID", None)) if key not in node_data: node_data[key] = {} field = record.get_field() node_data[key][field] = record.get_value() - node_data[key]['measurement'] = record.get_measurement() - node_data[key]['date'] = record.values.get('date', None) - node_data[key]['scheme_Type'] = record.values.get('scheme_Type', None) - node_data[key]['scheme_Name'] = record.values.get('scheme_Name', None) + node_data[key]["measurement"] = record.get_measurement() + node_data[key]["date"] = record.values.get("date", None) + node_data[key]["scheme_Type"] = record.values.get("scheme_Type", None) + node_data[key]["scheme_Name"] = record.values.get("scheme_Name", None) for key in set(link_data.keys()): - row = {'time': key[0], "ID": key[1]} + row = {"time": key[0], "ID": key[1]} row.update(link_data.get(key, {})) link_rows.append(row) for key in set(node_data.keys()): - row = {'time': key[0], "ID": key[1]} + row = {"time": key[0], "ID": key[1]} row.update(node_data.get(key, {})) node_rows.append(row) # 动态生成 CSV 文件名 csv_filename_link = f"scheme_simulation_link_result_{start_date}至{end_date}.csv" csv_filename_node = f"scheme_simulation_node_result_{start_date}至{end_date}.csv" # 写入到 CSV 文件 - with open(csv_filename_link, mode='w', newline='') as file: - writer = csv.DictWriter(file, fieldnames=['time', 'measurement', 'date', 'scheme_Type', 'scheme_Name', 'ID', 'flow', 'leakage', 'velocity', 'headloss', 'status', 'setting', 'quality', 'friction', 'reaction']) + with open(csv_filename_link, mode="w", newline="") as file: + writer = csv.DictWriter( + file, + fieldnames=[ + "time", + "measurement", + "date", + "scheme_Type", + "scheme_Name", + "ID", + "flow", + "leakage", + "velocity", + "headloss", + "status", + "setting", + "quality", + "friction", + "reaction", + ], + ) writer.writeheader() writer.writerows(link_rows) - with open(csv_filename_node, mode='w', newline='') as file: - writer = csv.DictWriter(file, fieldnames=['time', 'measurement', 'date', 'scheme_Type', 'scheme_Name', 'ID', 'head', 'pressure', 'actualdemand', - 'demanddeficit', 'totalExternalOutflow', 'quality']) + with open(csv_filename_node, mode="w", newline="") as file: + writer = csv.DictWriter( + file, + fieldnames=[ + "time", + "measurement", + "date", + "scheme_Type", + "scheme_Name", + "ID", + "head", + "pressure", + "actualdemand", + "demanddeficit", + "totalExternalOutflow", + "quality", + ], + ) writer.writeheader() writer.writerows(node_rows) print(f"Data exported to {csv_filename_link} and {csv_filename_node} successfully.") @@ -3343,7 +4227,12 @@ def export_scheme_simulation_result_to_csv_time(start_date: str, end_date: str, # 2025/02/18 -def export_scheme_simulation_result_to_csv_scheme(scheme_Type: str, scheme_Name: str, query_date: str, bucket: str="scheme_simulation_result") -> None: +def export_scheme_simulation_result_to_csv_scheme( + scheme_Type: str, + scheme_Name: str, + query_date: str, + bucket: str = "scheme_simulation_result", +) -> None: """ 导出influxdb中scheme_simulation_result这个bucket的数据到csv中 :param scheme_Type: 查询的方案类型 @@ -3354,17 +4243,29 @@ def export_scheme_simulation_result_to_csv_scheme(scheme_Type: str, scheme_Name: """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) query_api = client.query_api() - start_time = (datetime.strptime(query_date, "%Y-%m-%d") - timedelta(days=1)).replace(hour=16, minute=0, second=0, tzinfo=timezone.utc).isoformat() - stop_time = datetime.strptime(query_date, "%Y-%m-%d").replace(hour=15, minute=59, second=59, tzinfo=timezone.utc).isoformat() + start_time = ( + (datetime.strptime(query_date, "%Y-%m-%d") - timedelta(days=1)) + .replace(hour=16, minute=0, second=0, tzinfo=timezone.utc) + .isoformat() + ) + stop_time = ( + datetime.strptime(query_date, "%Y-%m-%d") + .replace(hour=15, minute=59, second=59, tzinfo=timezone.utc) + .isoformat() + ) # 构建 Flux 查询语句,查询指定时间范围内的数据 - flux_query_link = f''' + flux_query_link = f""" from(bucket: "{bucket}") |> range(start: {start_time}, stop: {stop_time}) |> filter(fn: (r) => r["_measurement"] == "link" and r["scheme_Type"] == "{scheme_Type}" and r["scheme_Name"] == "{scheme_Name}") - ''' + """ # 执行查询 link_tables = query_api.query(flux_query_link) # 存储link类的数据 @@ -3372,21 +4273,21 @@ def export_scheme_simulation_result_to_csv_scheme(scheme_Type: str, scheme_Name: link_data = {} for table in link_tables: for record in table.records: - key = (record.get_time(), record.values.get('ID', None)) + key = (record.get_time(), record.values.get("ID", None)) if key not in link_data: link_data[key] = {} field = record.get_field() link_data[key][field] = record.get_value() - link_data[key]['measurement'] = record.get_measurement() - link_data[key]['date'] = record.values.get('date', None) - link_data[key]['scheme_Type'] = record.values.get('scheme_Type', None) - link_data[key]['scheme_Name'] = record.values.get('scheme_Name', None) + link_data[key]["measurement"] = record.get_measurement() + link_data[key]["date"] = record.values.get("date", None) + link_data[key]["scheme_Type"] = record.values.get("scheme_Type", None) + link_data[key]["scheme_Name"] = record.values.get("scheme_Name", None) # 构建 Flux 查询语句,查询指定时间范围内的数据 - flux_query_node = f''' + flux_query_node = f""" from(bucket: "{bucket}") |> range(start: {start_time}, stop: {stop_time}) |> filter(fn: (r) => r["_measurement"] == "node" and r["scheme_Type"] == "{scheme_Type}" and r["scheme_Name"] == "{scheme_Name}") - ''' + """ # 执行查询 node_tables = query_api.query(flux_query_node) # 存储node类的数据 @@ -3394,41 +4295,81 @@ def export_scheme_simulation_result_to_csv_scheme(scheme_Type: str, scheme_Name: node_data = {} for table in node_tables: for record in table.records: - key = (record.get_time(), record.values.get('ID', None)) + key = (record.get_time(), record.values.get("ID", None)) if key not in node_data: node_data[key] = {} field = record.get_field() node_data[key][field] = record.get_value() - node_data[key]['measurement'] = record.get_measurement() - node_data[key]['date'] = record.values.get('date', None) - node_data[key]['scheme_Type'] = record.values.get('scheme_Type', None) - node_data[key]['scheme_Name'] = record.values.get('scheme_Name', None) + node_data[key]["measurement"] = record.get_measurement() + node_data[key]["date"] = record.values.get("date", None) + node_data[key]["scheme_Type"] = record.values.get("scheme_Type", None) + node_data[key]["scheme_Name"] = record.values.get("scheme_Name", None) for key in set(link_data.keys()): - row = {'time': key[0], "ID": key[1]} + row = {"time": key[0], "ID": key[1]} row.update(link_data.get(key, {})) link_rows.append(row) for key in set(node_data.keys()): - row = {'time': key[0], "ID": key[1]} + row = {"time": key[0], "ID": key[1]} row.update(node_data.get(key, {})) node_rows.append(row) # 动态生成 CSV 文件名 - csv_filename_link = f"scheme_simulation_link_result_{scheme_Name}_of_{scheme_Type}.csv" - csv_filename_node = f"scheme_simulation_node_result_{scheme_Name}_of_{scheme_Type}.csv" + csv_filename_link = ( + f"scheme_simulation_link_result_{scheme_Name}_of_{scheme_Type}.csv" + ) + csv_filename_node = ( + f"scheme_simulation_node_result_{scheme_Name}_of_{scheme_Type}.csv" + ) # 写入到 CSV 文件 - with open(csv_filename_link, mode='w', newline='') as file: - writer = csv.DictWriter(file, fieldnames=['time', 'measurement', 'date', 'scheme_Type', 'scheme_Name', 'ID', 'flow', 'leakage', 'velocity', 'headloss', 'status', 'setting', 'quality', 'friction', 'reaction']) + with open(csv_filename_link, mode="w", newline="") as file: + writer = csv.DictWriter( + file, + fieldnames=[ + "time", + "measurement", + "date", + "scheme_Type", + "scheme_Name", + "ID", + "flow", + "leakage", + "velocity", + "headloss", + "status", + "setting", + "quality", + "friction", + "reaction", + ], + ) writer.writeheader() writer.writerows(link_rows) - with open(csv_filename_node, mode='w', newline='') as file: - writer = csv.DictWriter(file, fieldnames=['time', 'measurement', 'date', 'scheme_Type', 'scheme_Name', 'ID', 'head', 'pressure', 'actualdemand', - 'demanddeficit', 'totalExternalOutflow', 'quality']) + with open(csv_filename_node, mode="w", newline="") as file: + writer = csv.DictWriter( + file, + fieldnames=[ + "time", + "measurement", + "date", + "scheme_Type", + "scheme_Name", + "ID", + "head", + "pressure", + "actualdemand", + "demanddeficit", + "totalExternalOutflow", + "quality", + ], + ) writer.writeheader() writer.writerows(node_rows) print(f"Data exported to {csv_filename_link} and {csv_filename_node} successfully.") client.close() -def upload_cleaned_SCADA_data_to_influxdb(file_path: str, bucket: str="SCADA_data") -> None: +def upload_cleaned_SCADA_data_to_influxdb( + file_path: str, bucket: str = "SCADA_data" +) -> None: """ 将清洗后的SCADA数据导入influxdb,有标准化导入格式 :param file_path: 导入数据的文件 @@ -3437,32 +4378,42 @@ def upload_cleaned_SCADA_data_to_influxdb(file_path: str, bucket: str="SCADA_dat """ data_list = [] - with open(file_path, mode='r', encoding='utf-8-sig') as csv_file: + with open(file_path, mode="r", encoding="utf-8-sig") as csv_file: csv_reader = csv.DictReader(csv_file) for row in csv_reader: # 解析日期和时间字段 - datetime_value = datetime.strptime(row['time'], '%Y-%m-%d %H:%M:%S%z') + datetime_value = datetime.strptime(row["time"], "%Y-%m-%d %H:%M:%S%z") # 处理datacleaning_value为空的情况 - datacleaning_value = float(row['datacleaning_value']) if row['datacleaning_value'] else None + datacleaning_value = ( + float(row["datacleaning_value"]) if row["datacleaning_value"] else None + ) # 处理monitored_value字段类型错误 try: - monitored_value = float(row['monitored_value']) if row['monitored_value'] else None + monitored_value = ( + float(row["monitored_value"]) if row["monitored_value"] else None + ) except ValueError: monitored_value = None # 如果转换失败,则设为None(或其他适当的默认值) - data_list.append({ - 'measurement': row['measurement'], - 'device_ID': row['device_ID'], - 'date': datetime_value.strftime('%Y-%m-%d'), - 'description': row['description'], - 'monitored_value': monitored_value, - 'datacleaning_value': datacleaning_value, - 'datetime': datetime_value - }) + data_list.append( + { + "measurement": row["measurement"], + "device_ID": row["device_ID"], + "date": datetime_value.strftime("%Y-%m-%d"), + "description": row["description"], + "monitored_value": monitored_value, + "datacleaning_value": datacleaning_value, + "datetime": datetime_value, + } + ) client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) try: write_api = client.write_api(write_options=SYNCHRONOUS) @@ -3471,13 +4422,15 @@ def upload_cleaned_SCADA_data_to_influxdb(file_path: str, bucket: str="SCADA_dat print(data) # 创建Point对象 point = ( - Point(data['measurement']) # measurement为mpointName - .tag("device_ID", data['device_ID']) # tag key为mpointId - .tag("date", data['date']) # 具体日期tag,方便查询 - .tag('description', data['description']) - .field("monitored_value", data['monitored_value']) # field key为dataValue - .field('datacleaning_value', data['datacleaning_value']) - .time(data['datetime']) # 时间以datetime为准 + Point(data["measurement"]) # measurement为mpointName + .tag("device_ID", data["device_ID"]) # tag key为mpointId + .tag("date", data["date"]) # 具体日期tag,方便查询 + .tag("description", data["description"]) + .field( + "monitored_value", data["monitored_value"] + ) # field key为dataValue + .field("datacleaning_value", data["datacleaning_value"]) + .time(data["datetime"]) # 时间以datetime为准 ) write_api.write(bucket=bucket, record=point) @@ -3487,10 +4440,11 @@ def upload_cleaned_SCADA_data_to_influxdb(file_path: str, bucket: str="SCADA_dat except Exception as e: print(f"未知错误: {str(e)}") finally: - if 'write_api' in locals(): + if "write_api" in locals(): write_api.close() client.close() + # 2025/05/05 DingZQ # 删除某一天的数据 def delete_data(delete_date: str, bucket: str) -> None: @@ -3502,10 +4456,22 @@ def delete_data(delete_date: str, bucket: str) -> None: """ client = get_new_client() if not client.ping(): - print("{} -- Failed to connect to InfluxDB.".format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + print( + "{} -- Failed to connect to InfluxDB.".format( + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ) + ) - start_time = (datetime.strptime(delete_date, "%Y-%m-%d") - timedelta(days=1)).replace(hour=16, minute=0, second=0, tzinfo=timezone.utc).isoformat() - stop_time = datetime.strptime(delete_date, "%Y-%m-%d").replace(hour=15, minute=59, second=59, tzinfo=timezone.utc).isoformat() + start_time = ( + (datetime.strptime(delete_date, "%Y-%m-%d") - timedelta(days=1)) + .replace(hour=16, minute=0, second=0, tzinfo=timezone.utc) + .isoformat() + ) + stop_time = ( + datetime.strptime(delete_date, "%Y-%m-%d") + .replace(hour=15, minute=59, second=59, tzinfo=timezone.utc) + .isoformat() + ) # 构造删除谓词(InfluxDB Delete API 要求的 SQL-like 语句) # 注意:字段名用 _field,measurement 用 _measurement,标签直接写标签名 @@ -3652,13 +4618,11 @@ if __name__ == "__main__": # except Exception as e: # print(f"连接失败: {e}") - # step2: 先查询pg数据库中scada_info的信息,然后存储SCADA数据到SCADA_data这个bucket里 # query_pg_scada_info_realtime('bb') # query_pg_scada_info_non_realtime('bb') # query_corresponding_query_id_and_element_id('bb') - # 手动执行存储测试 # 示例1:store_realtime_SCADA_data_to_influxdb # store_realtime_SCADA_data_to_influxdb(get_real_value_time='2025-03-16T11:13:00+08:00') @@ -3765,7 +4729,7 @@ if __name__ == "__main__": # print(leakage) # 示例:upload_cleaned_SCADA_data_to_influxdb - upload_cleaned_SCADA_data_to_influxdb(file_path='./标准cleaned_demand_data.csv') + upload_cleaned_SCADA_data_to_influxdb(file_path="./标准cleaned_demand_data.csv") # 示例:delete_data # delete_data(delete_date='2025-05-04', bucket='SCADA_data') diff --git a/main.py b/main.py index 946fd32..b658bdc 100644 --- a/main.py +++ b/main.py @@ -6,7 +6,17 @@ from pydantic import BaseModel from starlette.responses import FileResponse, JSONResponse from starlette.middleware.base import BaseHTTPMiddleware from starlette.types import Receive -from fastapi import FastAPI, File, UploadFile, Response, status, Request, Body, HTTPException,Query +from fastapi import ( + FastAPI, + File, + UploadFile, + Response, + status, + Request, + Body, + HTTPException, + Query, +) from fastapi.responses import PlainTextResponse from fastapi.middleware.gzip import GZipMiddleware from tjnetwork import * @@ -16,7 +26,14 @@ import msgpack from run_simulation import run_simulation, run_simulation_ex from online_Analysis import * from fastapi.middleware.cors import CORSMiddleware -from influxdb_client import InfluxDBClient, BucketsApi, WriteApi, OrganizationsApi, Point, QueryApi +from influxdb_client import ( + InfluxDBClient, + BucketsApi, + WriteApi, + OrganizationsApi, + Point, + QueryApi, +) from typing import List, Dict from datetime import datetime, timedelta, timezone from dateutil import parser @@ -47,7 +64,7 @@ LINK_COUNT = 2 prjs = [] inpDir = "C:/inpfiles/" tmpDir = "C:/tmpfiles/" - +proj_name = project_info.name lockedPrjs = {} if not os.path.exists(inpDir): @@ -56,58 +73,65 @@ if not os.path.exists(inpDir): if not os.path.exists(tmpDir): os.mkdir(tmpDir) + # 全局依赖项 async def global_auth(request: Request): # 白名单跳过 # if request.url.path in WHITE_LIST: - # return + # return # 验证 token = request.headers.get("Authorization") if token != "Bearer 567e33c876a2" and token != "Bearer 38b3be72b8af": raise HTTPException(status_code=401, detail="Invalid token") + # 简易令牌验证(实际项目中应替换为 JWT/OAuth2 等) AUTH_TOKEN = "567e33c876a2" # 预设的有效令牌 + async def verify_token(authorization: Annotated[str, Header()] = None): # 检查请求头是否存在 if not authorization: raise HTTPException(status_code=401, detail="Authorization header missing") - + # 提取 Bearer 后的令牌 (格式: Bearer ) try: token_type, token = authorization.split(" ", 1) if token_type.lower() != "bearer": raise ValueError except ValueError: - raise HTTPException(status_code=401, detail="Invalid authorization format. Use: Bearer ") - + raise HTTPException( + status_code=401, detail="Invalid authorization format. Use: Bearer " + ) + # 验证令牌 if token != AUTH_TOKEN: raise HTTPException(status_code=403, detail="Invalid authentication token") - + return True + # 全局依赖项 # app = FastAPI(dependencies=[Depends(global_auth)]) app = FastAPI() access_tokens = [] + def generate_access_token(username: str, password: str) -> str: """ 根据用户名和密码生成JWT access token - + 参数: username: 用户名 password: 密码 - + 返回: JWT access token字符串 """ if username != "tjwater" or password != "tjwater@123": - raise ValueError("用户名或密码错误") + raise ValueError("用户名或密码错误") token = "567e33c876a2" return token @@ -119,21 +143,18 @@ def generate_access_token(username: str, password: str) -> str: def encode_datetime(obj): """将datetime转换为可序列化的字典结构""" if isinstance(obj, datetime): - return { - '__datetime__': True, - 'as_str': obj.strftime("%Y%m%dT%H:%M:%S.%f") - } + return {"__datetime__": True, "as_str": obj.strftime("%Y%m%dT%H:%M:%S.%f")} return obj + # 反序列化处理器 def decode_datetime(obj): """将字典还原为datetime对象""" - if '__datetime__' in obj: - return datetime.strptime( - obj['as_str'], "%Y%m%dT%H:%M:%S.%f" - ) + if "__datetime__" in obj: + return datetime.strptime(obj["as_str"], "%Y%m%dT%H:%M:%S.%f") return obj + # 初始化 Redis 连接 # 用redis 限制并发访u redis_client = redis.Redis(host="localhost", port=6379, db=0) @@ -155,23 +176,26 @@ app.add_middleware( ) # 定义一个共享变量 -lock_simulation = Value('i', 0) +lock_simulation = Value("i", 0) app.add_middleware(GZipMiddleware, minimum_size=1000) logger = logging.getLogger() logger.setLevel(logging.INFO) + @app.on_event("startup") async def startup_db(): - logger.info('**********************************************************') + logger.info("**********************************************************") logger.info(str(datetime.now())) logger.info("TJWater CloudService is starting...") - logger.info('**********************************************************') + logger.info("**********************************************************") + + # open proj_name by default + print(project_info.name) + open_project(project_info.name) + - # open 'szh' by default - open_project("szh") - ############################################################ # auth ############################################################ @@ -183,19 +207,22 @@ async def fastapi_login(username: str, password: str) -> str: ############################################################ # extension_data ############################################################ -@app.get('/getallextensiondatakeys/') +@app.get("/getallextensiondatakeys/") async def fastapi_get_all_extension_data_keys(network: str) -> list[str]: return get_all_extension_data_keys(network) -@app.get('/getallextensiondata/') + +@app.get("/getallextensiondata/") async def fastapi_get_all_extension_data(network: str) -> dict[str, Any]: return get_all_extension_data(network) -@app.get('/getextensiondata/') + +@app.get("/getextensiondata/") async def fastapi_get_extension_data(network: str, key: str) -> str | None: return get_extension_data(network, key) -@app.post('/setextensiondata',response_model=None) + +@app.post("/setextensiondata", response_model=None) async def fastapi_set_extension_data(network: str, req: Request) -> ChangeSet: props = await req.json() print(props) @@ -203,94 +230,106 @@ async def fastapi_set_extension_data(network: str, req: Request) -> ChangeSet: print(cs.operations[0]) return cs + ############################################################ # project ############################################################ -@app.get('/listprojects/') + +@app.get("/listprojects/") async def fastapi_list_projects() -> list[str]: return list_project() + @app.get("/haveproject/") async def fastapi_have_project(network: str): return have_project(network) + @app.post("/createproject/") async def fastapi_create_project(network: str): create_project(network) return network + @app.post("/deleteproject/") async def fastapi_delete_project(network: str): delete_project(network) return True + @app.get("/isprojectopen/") async def fastapi_is_project_open(network: str): return is_project_open(network) + @app.post("/openproject/") async def fastapi_open_project(network: str): open_project(network) return network + @app.post("/closeproject/") async def fastapi_close_project(network: str): close_project(network) return True + @app.post("/copyproject/") async def fastapi_copy_project(source: str, target: str): copy_project(source, target) return True + @app.post("/importinp/") async def fastapi_import_inp(network: str, req: Request): jo_root = await req.json() - inp_text = jo_root['inp'] - ps = { - "inp" : inp_text - } + inp_text = jo_root["inp"] + ps = {"inp": inp_text} ret = import_inp(network, ChangeSet(ps)) print(ret) return ret -@app.get("/exportinp/",response_model=None) + +@app.get("/exportinp/", response_model=None) async def fastapi_export_inp(network: str, version: str) -> ChangeSet: cs = export_inp(network, version) op = cs.operations[0] open_project(network) - op['vertex'] = json.dumps(get_all_vertices(network)) - op['scada'] = json.dumps(get_all_scada_elements(network)) - op['dma'] = json.dumps(get_all_district_metering_areas(network)) - op['sa'] = json.dumps(get_all_service_areas(network)) - op['vd'] = json.dumps(get_all_virtual_districts(network)) - op['legend'] = get_extension_data(network, 'legend') + op["vertex"] = json.dumps(get_all_vertices(network)) + op["scada"] = json.dumps(get_all_scada_elements(network)) + op["dma"] = json.dumps(get_all_district_metering_areas(network)) + op["sa"] = json.dumps(get_all_service_areas(network)) + op["vd"] = json.dumps(get_all_virtual_districts(network)) + op["legend"] = get_extension_data(network, "legend") - db = get_extension_data(network, 'scada_db') + db = get_extension_data(network, "scada_db") print(db) - scada_db = '' + scada_db = "" if db: scada_db = db print(scada_db) - op['scada_db'] = scada_db - + op["scada_db"] = scada_db + close_project(network) return cs + @app.post("/readinp/") async def fastapi_read_inp(network: str, inp: str) -> bool: read_inp(network, inp) return True + @app.get("/dumpinp/") async def fastapi_dump_inp(network: str, inp: str) -> bool: dump_inp(network, inp) return True + # 必须用这个PlainTextResponse,不然每个key都有引号 -@app.get("/runproject/", response_class = PlainTextResponse) +@app.get("/runproject/", response_class=PlainTextResponse) async def fastapi_run_project(network: str) -> str: lock_key = "exclusive_api_lock" timeout = 120 # 锁自动过期时间(秒) @@ -299,16 +338,17 @@ async def fastapi_run_project(network: str) -> str: acquired = redis_client.set(lock_key, "locked", nx=True, ex=timeout) if not acquired: - raise HTTPException(status_code=409, detail="is in simulation") + raise HTTPException(status_code=409, detail="is in simulation") else: try: return run_project(network) finally: # 手动释放锁(可选,依赖过期时间自动释放更安全) redis_client.delete(lock_key) - + + # DingZQ, 2025-02-04, 返回dict[str, Any] -# output 和 report +# output 和 report # output 是 json # report 是 text @app.get("/runprojectreturndict/") @@ -320,33 +360,38 @@ async def fastapi_run_project_return_dict(network: str) -> dict[str, Any]: acquired = redis_client.set(lock_key, "locked", nx=True, ex=timeout) if not acquired: - raise HTTPException(status_code=409, detail="is in simulation") + raise HTTPException(status_code=409, detail="is in simulation") else: try: return run_project_return_dict(network) finally: # 手动释放锁(可选,依赖过期时间自动释放更安全) - redis_client.delete(lock_key) - + redis_client.delete(lock_key) + + # put in inp folder, name without extension @app.get("/runinp/") async def fastapi_run_inp(network: str) -> str: return run_inp(network) + # path is absolute path @app.get("/dumpoutput/") async def fastapi_dump_output(output: str) -> str: return dump_output(output) + @app.get("/isprojectlocked/") async def fastapi_is_locked(network: str, req: Request): return str in lockedPrjs.keys() + @app.get("/isprojectlockedbyme/") async def fastapi_is_locked_by_me(network: str, req: Request): client_host = req.client.host return lockedPrjs.get(network) == client_host + # 0 successfully locked # 1 already locked by you # 2 locked by others @@ -362,6 +407,7 @@ async def fastapi_lock_project(network: str, req: Request): else: return 2 + @app.post("/unlockproject/") def fastapi_unlock_project(network: str, req: Request): client_host = req.client.host @@ -369,307 +415,380 @@ def fastapi_unlock_project(network: str, req: Request): print("delete key") del lockedPrjs[network] return True - + return False + ### operations -@app.get('/getcurrentoperationid/') + +@app.get("/getcurrentoperationid/") async def fastapi_get_current_operaiton_id(network: str) -> int: return get_current_operation(network) -@app.post('/undo/') + +@app.post("/undo/") async def fastapi_undo(network: str): return execute_undo(network) -@app.post('/redo/') + +@app.post("/redo/") async def fastapi_redo(network: str): return execute_redo(network) -@app.get('/getsnapshots/') + +@app.get("/getsnapshots/") def fastapi_list_snapshot(network: str) -> list[tuple[int, str]]: return list_snapshot(network) -@app.get('/havesnapshot/') + +@app.get("/havesnapshot/") async def fastapi_have_snapshot(network: str, tag: str) -> bool: return have_snapshot(network, tag) - -@app.get('/havesnapshotforoperation/') + + +@app.get("/havesnapshotforoperation/") async def fastapi_have_snapshot_for_operation(network: str, operation: int) -> bool: return have_snapshot_for_operation(network, operation) -@app.get('/havesnapshotforcurrentoperation/') + +@app.get("/havesnapshotforcurrentoperation/") async def fastapi_have_snapshot_for_current_operation(network: str) -> bool: return have_snapshot_for_current_operation(network) -@app.post('/takesnapshotforoperation/') -async def fastapi_take_snapshot_for_operation(network: str, operation: int, tag: str) -> None: + +@app.post("/takesnapshotforoperation/") +async def fastapi_take_snapshot_for_operation( + network: str, operation: int, tag: str +) -> None: return take_snapshot_for_operation(network, operation, tag) -@app.post('takenapshotforcurrentoperation') + +@app.post("takenapshotforcurrentoperation") async def fastapi_take_snapshot_for_current_operation(network: str, tag: str) -> None: return take_snapshot_for_current_operation(network, tag) - -@app.post('/takesnapshot/') + + +@app.post("/takesnapshot/") def fastapi_take_snapshot(network: str, tag: str) -> None: return take_snapshot(network, tag) -@app.post('/picksnapshot/',response_model=None) + +@app.post("/picksnapshot/", response_model=None) def fastapi_pick_snapshot(network: str, tag: str, discard: bool = False) -> ChangeSet: return pick_snapshot(network, tag, discard) -@app.post('/pickoperation/',response_model=None) -async def fastapi_pick_operation(network: str, operation: int, discard: bool = False) -> ChangeSet: + +@app.post("/pickoperation/", response_model=None) +async def fastapi_pick_operation( + network: str, operation: int, discard: bool = False +) -> ChangeSet: return pick_operation(network, operation, discard) -@app.get("/syncwithserver/",response_model=None) + +@app.get("/syncwithserver/", response_model=None) async def fastapi_sync_with_server(network: str, operation: int) -> ChangeSet: return sync_with_server(network, operation) -@app.post("/batch/",response_model=None) -async def fastapi_execute_batch_commands(network: str, req: Request)-> ChangeSet: + +@app.post("/batch/", response_model=None) +async def fastapi_execute_batch_commands(network: str, req: Request) -> ChangeSet: jo_root = await req.json() cs: ChangeSet = ChangeSet() - cs.operations = jo_root['operations'] + cs.operations = jo_root["operations"] rcs = execute_batch_commands(network, cs) return rcs -@app.post("/compressedbatch/",response_model=None) -async def fastapi_execute_compressed_batch_commands(network: str, req: Request)-> ChangeSet: + +@app.post("/compressedbatch/", response_model=None) +async def fastapi_execute_compressed_batch_commands( + network: str, req: Request +) -> ChangeSet: jo_root = await req.json() cs: ChangeSet = ChangeSet() - cs.operations = jo_root['operations'] + cs.operations = jo_root["operations"] return execute_batch_command(network, cs) + @app.get("/getrestoreoperation/") -async def fastapi_get_restore_operation(network : str) -> int: +async def fastapi_get_restore_operation(network: str) -> int: return get_restore_operation(network) + @app.post("/setrestoreoperation/") async def fastapi_set_restore_operation(network: str, operation: int) -> None: return set_restore_operation(network, operation) - + + ############################################################ # type ############################################################ -@app.get('/isnode/') + +@app.get("/isnode/") async def fastapi_is_node(network: str, node: str) -> bool: return is_node(network, node) -@app.get('/isjunction/') + +@app.get("/isjunction/") async def fastapi_is_junction(network: str, node: str) -> bool: return is_junction(network, node) -@app.get('/isreservoir/') + +@app.get("/isreservoir/") async def fastapi_is_reservoir(network: str, node: str) -> bool: return is_reservoir(network, node) -@app.get('/istank/') + +@app.get("/istank/") async def fastapi_is_tank(network: str, node: str) -> bool: return is_tank(network, node) -@app.get('/islink/') + +@app.get("/islink/") async def fastapi_is_link(network: str, link: str) -> bool: return is_link(network, link) -@app.get('/ispipe/') + +@app.get("/ispipe/") async def fastapi_is_pipe(network: str, link: str) -> bool: return is_pipe(network, link) -@app.get('/ispump/') + +@app.get("/ispump/") async def fastapi_is_pump(network: str, link: str) -> bool: return is_pump(network, link) -@app.get('/isvalve/') + +@app.get("/isvalve/") async def fastapi_is_valve(network: str, link: str) -> bool: return is_valve(network, link) -# DingZQ, 2025-02-05 -@app.get('/getnodetype/') -async def fastapi_get_node_type(network: str, node: str) -> str: - return get_node_type(network, node) -@app.get('/getlinktype/') +# DingZQ, 2025-02-05 +@app.get("/getnodetype/") +async def fastapi_get_node_type(network: str, node: str) -> str: + return get_node_type(network, node) + + +@app.get("/getlinktype/") async def fastapi_get_link_type(network: str, link: str) -> str: return get_link_type(network, link) -@app.get('/getelementtype/') + +@app.get("/getelementtype/") async def fastapi_get_element_type(network: str, element: str) -> str: return get_element_type(network, element) -@app.get('/getelementtypevalue/') + +@app.get("/getelementtypevalue/") async def fastapi_get_element_type_value(network: str, element: str) -> int: return get_element_type_value(network, element) -@app.get('/iscurve/') + +@app.get("/iscurve/") async def fastapi_is_curve(network: str, curve: str) -> bool: return is_curve(network, curve) -@app.get('/ispattern/') + +@app.get("/ispattern/") async def fastapi_is_pattern(network: str, pattern: str) -> bool: return is_pattern(network, pattern) + @app.get("/getnodes/") async def fastapi_get_nodes(network: str) -> list[str]: return get_nodes(network) + @app.get("/getlinks/") async def fastapi_get_links(network: str) -> list[str]: return get_links(network) + @app.get("/getcurves/") async def fastapi_get_curves(network: str) -> list[str]: return get_curves(network) + @app.get("/getpatterns/") async def fastapi_get_patterns(network: str) -> list[str]: return get_patterns(network) + @app.get("/getnodelinks/") def get_node_links(network: str, node: str) -> list[str]: return get_node_links(network, node) + ############################################################ # DingZQ, 2025-02-05 # 用统一的接口来获取 Node & Link properties, Node和Link的Id可以一样,不能进一步统一成获取Element 的 properties # Node & Link properties ############################################################ -@app.get('/getnodeproperties/') +@app.get("/getnodeproperties/") async def fast_get_node_properties(network: str, node: str) -> dict[str, Any]: return get_node_properties(network, node) -@app.get('/getlinkproperties/') + +@app.get("/getlinkproperties/") async def fast_get_link_properties(network: str, link: str) -> dict[str, Any]: return get_link_properties(network, link) -@app.get('/getscadaproperties/') + +@app.get("/getscadaproperties/") async def fast_get_scada_properties(network: str, scada: str) -> dict[str, Any]: return get_scada_info(network, scada) -@app.get('/getallscadaproperties/') + +@app.get("/getallscadaproperties/") async def fast_get_all_scada_properties(network: str) -> list[dict[str, Any]]: return get_all_scada_info(network) + # elementtype can be 'node' or 'link' or 'scada' -@app.get('/getelementpropertieswithtype/') -async def fast_get_element_properties_with_type(network: str, elementtype: str, element: str) -> dict[str, Any]: +@app.get("/getelementpropertieswithtype/") +async def fast_get_element_properties_with_type( + network: str, elementtype: str, element: str +) -> dict[str, Any]: return get_element_properties_with_type(network, elementtype, element) + # type can be 'node' or 'link' or 'scada' -@app.get('/getelementproperties/') +@app.get("/getelementproperties/") async def fast_get_element_properties(network: str, element: str) -> dict[str, Any]: return get_element_properties(network, element) + ############################################################ # title 1.[TITLE] ############################################################ -@app.get('/gettitleschema/') +@app.get("/gettitleschema/") async def fast_get_title_schema(network: str) -> dict[str, dict[str, Any]]: return get_title_schema(network) -@app.get('/gettitle/') + +@app.get("/gettitle/") async def fast_get_title(network: str) -> dict[str, Any]: return get_title(network) -@app.get('/settitle/',response_model=None) + +@app.get("/settitle/", response_model=None) async def fastapi_set_title(network: str, req: Request) -> ChangeSet: props = await req.json() return set_title(network, ChangeSet(props)) + ############################################################ # junction 2.[JUNCTIONS] ############################################################ -@app.get('/getjunctionschema') +@app.get("/getjunctionschema") async def fast_get_junction_schema(network: str) -> dict[str, dict[str, Any]]: return get_junction_schema(network) -@app.post("/addjunction/",response_model=None) -async def fastapi_add_junction(network: str, junction: str, x: float, y: float, z: float) -> ChangeSet: - ps = { 'id' : junction, - 'x' : x, - 'y' : y, - 'elevation' : z } + +@app.post("/addjunction/", response_model=None) +async def fastapi_add_junction( + network: str, junction: str, x: float, y: float, z: float +) -> ChangeSet: + ps = {"id": junction, "x": x, "y": y, "elevation": z} return add_junction(network, ChangeSet(ps)) -@app.post("/deletejunction/",response_model=None) + +@app.post("/deletejunction/", response_model=None) async def fastapi_delete_junction(network: str, junction: str) -> ChangeSet: - ps = {'id' : junction} + ps = {"id": junction} return delete_junction(network, ChangeSet(ps)) + @app.get("/getjunctionelevation/") async def fastapi_get_junction_elevation(network: str, junction: str) -> float: ps = get_junction(network, junction) - return ps['elevation'] + return ps["elevation"] + @app.get("/getjunctionx/") async def fastapi_get_junction_x(network: str, junction: str) -> float: ps = get_junction(network, junction) - return ps['x'] + return ps["x"] + @app.get("/getjunctiony/") async def fastapi_get_junction_x(network: str, junction: str) -> float: ps = get_junction(network, junction) - return ps['y'] + return ps["y"] + @app.get("/getjunctioncoord/") async def fastapi_get_junction_coord(network: str, junction: str) -> dict[str, float]: ps = get_junction(network, junction) - coord = { 'x' : ps['x'], - 'y' : ps['y'] } + coord = {"x": ps["x"], "y": ps["y"]} return coord + @app.get("/getjunctiondemand/") async def fastapi_get_junction_demand(network: str, junction: str) -> float: ps = get_junction(network, junction) - return ps['demand'] + return ps["demand"] + @app.get("/getjunctionpattern/") async def fastapi_get_junction_pattern(network: str, junction: str) -> str: ps = get_junction(network, junction) - return ps['pattern'] + return ps["pattern"] -@app.post("/setjunctionelevation/",response_model=None) -async def fastapi_set_junction_elevation(network: str, junction: str, elevation: float) -> ChangeSet: - ps = { 'id' : junction, - 'elevation' : elevation } + +@app.post("/setjunctionelevation/", response_model=None) +async def fastapi_set_junction_elevation( + network: str, junction: str, elevation: float +) -> ChangeSet: + ps = {"id": junction, "elevation": elevation} return set_junction(network, ChangeSet(ps)) -@app.post("/setjunctionx/",response_model=None) + +@app.post("/setjunctionx/", response_model=None) async def fastapi_set_junction_x(network: str, junction: str, x: float) -> ChangeSet: - ps = { 'id' : junction, - 'x' : x } + ps = {"id": junction, "x": x} return set_junction(network, ChangeSet(ps)) - -@app.post("/setjunctiony/",response_model=None) + + +@app.post("/setjunctiony/", response_model=None) async def fastapi_set_junction_y(network: str, junction: str, y: float) -> ChangeSet: - ps = { 'id' : junction, - 'y' : y } + ps = {"id": junction, "y": y} return set_junction(network, ChangeSet(ps)) -@app.post("/setjunctioncoord/",response_model=None) -async def fastapi_set_junction_coord(network: str, junction: str, x: float, y: float) -> ChangeSet: - ps = { 'id' : junction, - 'x' : x, - 'y' : y } - return set_junction(network, ChangeSet(ps)) - -@app.post("/setjunctiondemand/",response_model=None) -async def fastapi_set_junction_demand(network: str, junction: str, demand: float) -> ChangeSet: - ps = { 'id' : junction, - 'demand' : demand } + +@app.post("/setjunctioncoord/", response_model=None) +async def fastapi_set_junction_coord( + network: str, junction: str, x: float, y: float +) -> ChangeSet: + ps = {"id": junction, "x": x, "y": y} return set_junction(network, ChangeSet(ps)) -@app.post("/setjunctionpattern/",response_model=None) -async def fastapi_set_junction_pattern(network: str, junction: str, pattern: str) -> ChangeSet: - ps = { 'id' : junction, - 'pattern' : pattern } + +@app.post("/setjunctiondemand/", response_model=None) +async def fastapi_set_junction_demand( + network: str, junction: str, demand: float +) -> ChangeSet: + ps = {"id": junction, "demand": demand} return set_junction(network, ChangeSet(ps)) + +@app.post("/setjunctionpattern/", response_model=None) +async def fastapi_set_junction_pattern( + network: str, junction: str, pattern: str +) -> ChangeSet: + ps = {"id": junction, "pattern": pattern} + return set_junction(network, ChangeSet(ps)) + + @app.get("/getjunctionproperties/") -async def fastapi_get_junction_properties(network: str, junction: str) -> dict[str, Any]: +async def fastapi_get_junction_properties( + network: str, junction: str +) -> dict[str, Any]: return get_junction(network, junction) + # DingZQ, 2025-03-29 @app.get("/getalljunctionproperties/") async def fastapi_get_all_junction_properties(network: str) -> list[dict[str, Any]]: @@ -688,95 +807,117 @@ async def fastapi_get_all_junction_properties(network: str) -> list[dict[str, An return results -@app.post("/setjunctionproperties/",response_model=None) -async def fastapi_set_junction_properties(network: str, junction: str, req: Request) -> ChangeSet: +@app.post("/setjunctionproperties/", response_model=None) +async def fastapi_set_junction_properties( + network: str, junction: str, req: Request +) -> ChangeSet: props = await req.json() - ps = { 'id' : junction } | props + ps = {"id": junction} | props return set_junction(network, ChangeSet(ps)) + ############################################################ # reservoir 3.[RESERVOIRS] ############################################################ -@app.get('/getreservoirschema') +@app.get("/getreservoirschema") async def fast_get_reservoir_schema(network: str) -> dict[str, dict[str, Any]]: return get_reservoir_schema(network) -@app.post("/addreservoir/",response_model=None) -async def fastapi_add_reservoir(network: str, reservoir: str, x: float, y: float, head: float) -> ChangeSet: - ps = { 'id' : reservoir, - 'x' : x, - 'y' : y, - 'head' : head } - return add_reservoir(network, ChangeSet(ps)) -@app.post("/deletereservoir/",response_model=None) +@app.post("/addreservoir/", response_model=None) +async def fastapi_add_reservoir( + network: str, reservoir: str, x: float, y: float, head: float +) -> ChangeSet: + ps = {"id": reservoir, "x": x, "y": y, "head": head} + return add_reservoir(network, ChangeSet(ps)) + + +@app.post("/deletereservoir/", response_model=None) async def fastapi_delete_reservoir(network: str, reservoir: str) -> ChangeSet: - ps = { 'id' : reservoir } + ps = {"id": reservoir} return delete_reservoir(network, ChangeSet(ps)) + @app.get("/getreservoirhead/") async def fastapi_get_reservoir_head(network: str, reservoir: str) -> float | None: ps = get_reservoir(network, reservoir) - return ps['head'] + return ps["head"] + @app.get("/getreservoirpattern/") async def fastapi_get_reservoir_pattern(network: str, reservoir: str) -> str | None: ps = get_reservoir(network, reservoir) - return ps['pattern'] + return ps["pattern"] + @app.get("/getreservoirx/") -async def fastapi_get_reservoir_x(network: str, reservoir: str) -> dict[str, float] | None: +async def fastapi_get_reservoir_x( + network: str, reservoir: str +) -> dict[str, float] | None: ps = get_reservoir(network, reservoir) - return ps['x'] + return ps["x"] + @app.get("/getreservoiry/") -async def fastapi_get_reservoir_y(network: str, reservoir: str) -> dict[str, float] | None: +async def fastapi_get_reservoir_y( + network: str, reservoir: str +) -> dict[str, float] | None: ps = get_reservoir(network, reservoir) - return ps['y'] + return ps["y"] + @app.get("/getreservoircoord/") -async def fastapi_get_reservoir_y(network: str, reservoir: str) -> dict[str, float] | None: +async def fastapi_get_reservoir_y( + network: str, reservoir: str +) -> dict[str, float] | None: ps = get_reservoir(network, reservoir) - coord = { 'id' : reservoir, - 'x' : ps['x'], - 'y' : ps['y'] } + coord = {"id": reservoir, "x": ps["x"], "y": ps["y"]} return coord -@app.post("/setreservoirhead/",response_model=None) -async def fastapi_set_reservoir_head(network: str, reservoir: str, head: float) -> ChangeSet: - ps = { 'id' : reservoir, - 'head' : head } + +@app.post("/setreservoirhead/", response_model=None) +async def fastapi_set_reservoir_head( + network: str, reservoir: str, head: float +) -> ChangeSet: + ps = {"id": reservoir, "head": head} return set_reservoir(network, ChangeSet(ps)) -@app.post("/setreservoirpattern/",response_model=None) -async def fastapi_set_reservoir_pattern(network: str, reservoir: str, pattern: str) -> ChangeSet: - ps = { 'id' : reservoir, - 'pattern' : pattern } + +@app.post("/setreservoirpattern/", response_model=None) +async def fastapi_set_reservoir_pattern( + network: str, reservoir: str, pattern: str +) -> ChangeSet: + ps = {"id": reservoir, "pattern": pattern} return set_reservoir(network, ChangeSet(ps)) -@app.post("/setreservoirx/",response_model=None) + +@app.post("/setreservoirx/", response_model=None) async def fastapi_set_reservoir_x(network: str, reservoir: str, x: float) -> ChangeSet: - ps = { 'id' : reservoir, - 'x' : x } + ps = {"id": reservoir, "x": x} return set_reservoir(network, ChangeSet(ps)) -@app.post("/setreservoirx/",response_model=None) + +@app.post("/setreservoirx/", response_model=None) async def fastapi_set_reservoir_y(network: str, reservoir: str, y: float) -> ChangeSet: - ps = { 'id' : reservoir, - 'y' : y } + ps = {"id": reservoir, "y": y} return set_reservoir(network, ChangeSet(ps)) -@app.post("/setreservoircoord/",response_model=None) -async def fastapi_set_reservoir_y(network: str, reservoir: str, x: float, y: float) -> ChangeSet: - ps = { 'id' : reservoir, - 'x' : x, - 'y' : y } + +@app.post("/setreservoircoord/", response_model=None) +async def fastapi_set_reservoir_y( + network: str, reservoir: str, x: float, y: float +) -> ChangeSet: + ps = {"id": reservoir, "x": x, "y": y} return set_reservoir(network, ChangeSet(ps)) + @app.get("/getreservoirproperties/") -async def fastapi_get_reservoir_properties(network: str, reservoir: str) -> dict[str, Any]: +async def fastapi_get_reservoir_properties( + network: str, reservoir: str +) -> dict[str, Any]: return get_reservoir(network, reservoir) + # DingZQ, 2025-03-29 @app.get("/getallreservoirproperties/") async def fastapi_get_all_reservoir_properties(network: str) -> list[dict[str, Any]]: @@ -795,168 +936,212 @@ async def fastapi_get_all_reservoir_properties(network: str) -> list[dict[str, A return results -@app.post("/setreservoirproperties/",response_model=None) -async def fastapi_set_reservoir_properties(network: str, reservoir: str - , req: Request) -> ChangeSet: +@app.post("/setreservoirproperties/", response_model=None) +async def fastapi_set_reservoir_properties( + network: str, reservoir: str, req: Request +) -> ChangeSet: props = await req.json() - ps = { 'id' : reservoir } | props + ps = {"id": reservoir} | props return set_reservoir(network, ChangeSet(ps)) ############################################################ # tank 4.[TANKS] ############################################################ -@app.get('/gettankschema') +@app.get("/gettankschema") async def fast_get_tank_schema(network: str) -> dict[str, dict[str, Any]]: return get_tank_schema(network) -@app.post("/addtank/",response_model=None) -async def fastapi_add_tank(network: str, tank: str, x: float, y: float, elevation: float, init_level: float = 0, min_level: float = 0, max_level: float = 0, diameter: float = 0, min_vol: float = 0) -> ChangeSet: - ps = { 'id' : tank, - 'x' : x, - 'y' : y, - 'elevation' : elevation, - 'init_level' : init_level, - 'min_level' : min_level, - 'max_level' : max_level, - 'diameter' : diameter, - 'min_vol' : min_vol - } - return add_tank(network, ChangeSet(ps)) -@app.post("/deletetank/",response_model=None) +@app.post("/addtank/", response_model=None) +async def fastapi_add_tank( + network: str, + tank: str, + x: float, + y: float, + elevation: float, + init_level: float = 0, + min_level: float = 0, + max_level: float = 0, + diameter: float = 0, + min_vol: float = 0, +) -> ChangeSet: + ps = { + "id": tank, + "x": x, + "y": y, + "elevation": elevation, + "init_level": init_level, + "min_level": min_level, + "max_level": max_level, + "diameter": diameter, + "min_vol": min_vol, + } + return add_tank(network, ChangeSet(ps)) + + +@app.post("/deletetank/", response_model=None) async def fastapi_delete_tank(network: str, tank: str) -> ChangeSet: - ps = { 'id' : tank } + ps = {"id": tank} return delete_tank(network, ChangeSet(ps)) + @app.get("/gettankelevation/") async def fastapi_get_tank_elevation(network: str, tank: str) -> float | None: ps = get_tank(network, tank) - return ps['elevation'] + return ps["elevation"] + @app.get("/gettankinitlevel/") async def fastapi_get_tank_init_level(network: str, tank: str) -> float | None: ps = get_tank(network, tank) - return ps['init_level'] + return ps["init_level"] + @app.get("/gettankminlevel/") async def fastapi_get_tank_min_level(network: str, tank: str) -> float | None: ps = get_tank(network, tank) - return ps['min_level'] + return ps["min_level"] + @app.get("/gettankmaxlevel/") async def fastapi_get_tank_max_level(network: str, tank: str) -> float | None: ps = get_tank(network, tank) - return ps['max_level'] + return ps["max_level"] + @app.get("/gettankdiameter/") async def fastapi_get_tank_diameter(network: str, tank: str) -> float | None: ps = get_tank(network, tank) - return ps['diameter'] + return ps["diameter"] + @app.get("/gettankminvol/") async def fastapi_get_tank_min_vol(network: str, tank: str) -> float | None: ps = get_tank(network, tank) - return ps['min_vol'] + return ps["min_vol"] + @app.get("/gettankvolcurve/") async def fastapi_get_tank_vol_curve(network: str, tank: str) -> str | None: ps = get_tank(network, tank) - return ps['vol_curve'] + return ps["vol_curve"] + @app.get("/gettankoverflow/") async def fastapi_get_tank_overflow(network: str, tank: str) -> str | None: ps = get_tank(network, tank) - return ps['overflow'] + return ps["overflow"] + @app.get("/gettankx/") async def fastapi_get_tank_x(network: str, tank: str) -> float: ps = get_tank(network, tank) - return ps['x'] + return ps["x"] + @app.get("/gettanky/") async def fastapi_get_tank_x(network: str, tank: str) -> float: ps = get_tank(network, tank) - return ps['y'] + return ps["y"] + @app.get("/gettankcoord/") async def fastapi_get_tank_coord(network: str, tank: str) -> dict[str, float]: ps = get_tank(network, tank) - coord = { 'x' : ps['x'], - 'y' : ps['y'] } + coord = {"x": ps["x"], "y": ps["y"]} return coord -@app.post("/settankelevation/",response_model=None) -async def fastapi_set_tank_elevation(network: str, tank: str, elevation: float) -> ChangeSet: - ps = { 'id' : tank, - 'elevation' : elevation } + +@app.post("/settankelevation/", response_model=None) +async def fastapi_set_tank_elevation( + network: str, tank: str, elevation: float +) -> ChangeSet: + ps = {"id": tank, "elevation": elevation} return set_tank(network, ChangeSet(ps)) -@app.post("/settankinitlevel/",response_model=None) -async def fastapi_set_tank_init_level(network: str, tank: str, init_level: float) -> ChangeSet: - ps = { 'id' : tank, - 'init_level' : init_level } + +@app.post("/settankinitlevel/", response_model=None) +async def fastapi_set_tank_init_level( + network: str, tank: str, init_level: float +) -> ChangeSet: + ps = {"id": tank, "init_level": init_level} return set_tank(network, ChangeSet(ps)) -@app.post("/settankminlevel/",response_model=None) -async def fastapi_set_tank_min_level(network: str, tank: str, min_level: float) -> ChangeSet: - ps = { 'id' : tank, - 'min_level' : min_level } + +@app.post("/settankminlevel/", response_model=None) +async def fastapi_set_tank_min_level( + network: str, tank: str, min_level: float +) -> ChangeSet: + ps = {"id": tank, "min_level": min_level} return set_tank(network, ChangeSet(ps)) -@app.post("/settankmaxlevel/",response_model=None) -async def fastapi_set_tank_max_level(network: str, tank: str, max_level: float) -> ChangeSet: - ps = { 'id' : tank, - 'max_level' : max_level } + +@app.post("/settankmaxlevel/", response_model=None) +async def fastapi_set_tank_max_level( + network: str, tank: str, max_level: float +) -> ChangeSet: + ps = {"id": tank, "max_level": max_level} return set_tank(network, ChangeSet(ps)) -@app.post("settankdiameter//",response_model=None) -async def fastapi_set_tank_diameter(network: str, tank: str, diameter: float) -> ChangeSet: - ps = { 'id' : tank, - 'diameter' : diameter } + +@app.post("settankdiameter//", response_model=None) +async def fastapi_set_tank_diameter( + network: str, tank: str, diameter: float +) -> ChangeSet: + ps = {"id": tank, "diameter": diameter} return set_tank(network, ChangeSet(ps)) -@app.post("/settankminvol/",response_model=None) -async def fastapi_set_tank_min_vol(network: str, tank: str, min_vol: float) -> ChangeSet: - ps = { 'id' : tank, - 'min_vol' : min_vol } + +@app.post("/settankminvol/", response_model=None) +async def fastapi_set_tank_min_vol( + network: str, tank: str, min_vol: float +) -> ChangeSet: + ps = {"id": tank, "min_vol": min_vol} return set_tank(network, ChangeSet(ps)) -@app.post("/settankvolcurve/",response_model=None) -async def fastapi_set_tank_vol_curve(network: str, tank: str, vol_curve: str) -> ChangeSet: - ps = { 'id' : tank, - 'vol_curve' : vol_curve} + +@app.post("/settankvolcurve/", response_model=None) +async def fastapi_set_tank_vol_curve( + network: str, tank: str, vol_curve: str +) -> ChangeSet: + ps = {"id": tank, "vol_curve": vol_curve} return set_tank(network, ChangeSet(ps)) -@app.post("/settankoverflow/",response_model=None) -async def fastapi_set_tank_overflow(network: str, tank: str, overflow: str) -> ChangeSet: - ps = { 'id' : tank, - 'overflow' : overflow } + +@app.post("/settankoverflow/", response_model=None) +async def fastapi_set_tank_overflow( + network: str, tank: str, overflow: str +) -> ChangeSet: + ps = {"id": tank, "overflow": overflow} return set_tank(network, ChangeSet(ps)) -@app.post("/settankx/",response_model=None) + +@app.post("/settankx/", response_model=None) async def fastapi_set_tank_x(network: str, tank: str, x: float) -> ChangeSet: - ps = { 'id' : tank, - 'x' : x } + ps = {"id": tank, "x": x} return set_tank(network, ChangeSet(ps)) -@app.post("/settanky/",response_model=None) + +@app.post("/settanky/", response_model=None) async def fastapi_set_tank_y(network: str, tank: str, y: float) -> ChangeSet: - ps = { 'id' : tank, - 'y' : y } + ps = {"id": tank, "y": y} return set_tank(network, ChangeSet(ps)) -@app.post("/settankcoord/",response_model=None) -async def fastapi_set_tank_coord(network: str, tank: str, x: float, y: float) -> ChangeSet: - ps = { 'id' : tank, - 'x' : x, - 'y' : y } + +@app.post("/settankcoord/", response_model=None) +async def fastapi_set_tank_coord( + network: str, tank: str, x: float, y: float +) -> ChangeSet: + ps = {"id": tank, "x": x, "y": y} return set_tank(network, ChangeSet(ps)) + @app.get("/gettankproperties/") async def fastapi_get_tank_properties(network: str, tank: str) -> dict[str, Any]: return get_tank(network, tank) + # DingZQ, 2025-03-29 @app.get("/getalltankproperties/") async def fastapi_get_all_tank_properties(network: str) -> list[dict[str, Any]]: @@ -974,112 +1159,142 @@ async def fastapi_get_all_tank_properties(network: str) -> list[dict[str, Any]]: return results -@app.post("/settankproperties/",response_model=None) -async def fastapi_set_tank_properties(network: str, tank: str, req: Request) -> ChangeSet: + +@app.post("/settankproperties/", response_model=None) +async def fastapi_set_tank_properties( + network: str, tank: str, req: Request +) -> ChangeSet: props = await req.json() - ps = { 'id' : tank } | props - return set_tank(network, ChangeSet(ps)) + ps = {"id": tank} | props + return set_tank(network, ChangeSet(ps)) + ############################################################ # pipe 4.[PIPES] ############################################################ -@app.get('/getpipeschema') +@app.get("/getpipeschema") async def fastapi_get_pipe_schema(network: str) -> dict[str, dict[str, Any]]: return get_pipe_schema(network) -@app.post("/addpipe/",response_model=None) -async def fastapi_add_pipe(network: str, pipe: str, node1: str, node2: str, length: float = 0, - diameter: float = 0, roughness: float = 0, minor_loss: float = 0, status: str = PIPE_STATUS_OPEN) -> ChangeSet: - ps = { 'id' : pipe, - 'node1' : node1, - 'node2' : node2, - 'length' : length, - 'diameter' : diameter, - 'roughness' : roughness, - 'minor_loss' : minor_loss, - 'status' : status } + +@app.post("/addpipe/", response_model=None) +async def fastapi_add_pipe( + network: str, + pipe: str, + node1: str, + node2: str, + length: float = 0, + diameter: float = 0, + roughness: float = 0, + minor_loss: float = 0, + status: str = PIPE_STATUS_OPEN, +) -> ChangeSet: + ps = { + "id": pipe, + "node1": node1, + "node2": node2, + "length": length, + "diameter": diameter, + "roughness": roughness, + "minor_loss": minor_loss, + "status": status, + } return add_pipe(network, ChangeSet(ps)) -@app.post("/deletepipe/",response_model=None) + +@app.post("/deletepipe/", response_model=None) async def fastapi_delete_pipe(network: str, pipe: str) -> ChangeSet: - ps = {'id' : pipe} + ps = {"id": pipe} return delete_pipe(network, ChangeSet(ps)) + @app.get("/getpipenode1/") async def fastapi_get_pipe_node1(network: str, pipe: str) -> str | None: ps = get_pipe(network, pipe) - return ps['node1'] + return ps["node1"] + @app.get("/getpipenode2/") async def fastapi_get_pipe_node2(network: str, pipe: str) -> str | None: ps = get_pipe(network, pipe) - return ps['node2'] + return ps["node2"] + @app.get("/getpipelength/") async def fastapi_get_pipe_length(network: str, pipe: str) -> float | None: ps = get_pipe(network, pipe) - return ps['length'] + return ps["length"] + @app.get("/getpipediameter/") async def fastapi_get_pipe_diameter(network: str, pipe: str) -> float | None: ps = get_pipe(network, pipe) - return ps['diameter'] + return ps["diameter"] + @app.get("/getpiperoughness/") async def fastapi_get_pipe_roughness(network: str, pipe: str) -> float | None: ps = get_pipe(network, pipe) - return ps['roughness'] + return ps["roughness"] + @app.get("/getpipeminorloss/") async def fastapi_get_pipe_minor_loss(network: str, pipe: str) -> float | None: ps = get_pipe(network, pipe) - return ps['minor_loss'] + return ps["minor_loss"] + @app.get("/getpipestatus/") async def fastapi_get_pipe_status(network: str, pipe: str) -> str | None: ps = get_pipe(network, pipe) - return ps['status'] + return ps["status"] -@app.post("/setpipenode1/",response_model=None) + +@app.post("/setpipenode1/", response_model=None) async def fastapi_set_pipe_node1(network: str, pipe: str, node1: str) -> ChangeSet: - ps = { 'id' : pipe, - 'node1' : node1 } + ps = {"id": pipe, "node1": node1} return set_pipe(network, ChangeSet(ps)) -@app.post("/setpipenode2/",response_model=None) + +@app.post("/setpipenode2/", response_model=None) async def fastapi_set_pipe_node2(network: str, pipe: str, node2: str) -> ChangeSet: - ps = { 'id' : pipe, - 'node2' : node2 } + ps = {"id": pipe, "node2": node2} return set_pipe(network, ChangeSet(ps)) -@app.post("/setpipelength/",response_model=None) + +@app.post("/setpipelength/", response_model=None) async def fastapi_set_pipe_length(network: str, pipe: str, length: float) -> ChangeSet: - ps = { 'id' : pipe, - 'length' : length } + ps = {"id": pipe, "length": length} return set_pipe(network, ChangeSet(ps)) -@app.post("/setpipediameter/",response_model=None) -async def fastapi_set_pipe_diameter(network: str, pipe: str, diameter: float) -> ChangeSet: - ps = { 'id' : pipe, - 'diameter' : diameter } + +@app.post("/setpipediameter/", response_model=None) +async def fastapi_set_pipe_diameter( + network: str, pipe: str, diameter: float +) -> ChangeSet: + ps = {"id": pipe, "diameter": diameter} return set_pipe(network, ChangeSet(ps)) -@app.post("/setpiperoughness/",response_model=None) -async def fastapi_set_pipe_roughness(network: str, pipe: str, roughness: float) -> ChangeSet: - ps = { 'id' : pipe, - 'roughness' : roughness } + +@app.post("/setpiperoughness/", response_model=None) +async def fastapi_set_pipe_roughness( + network: str, pipe: str, roughness: float +) -> ChangeSet: + ps = {"id": pipe, "roughness": roughness} return set_pipe(network, ChangeSet(ps)) -@app.post("/setpipeminorloss/",response_model=None) -async def fastapi_set_pipe_minor_loss(network: str, pipe: str, minor_loss: float) -> ChangeSet: - ps = { 'id' : pipe, - 'minor_loss' : minor_loss } + +@app.post("/setpipeminorloss/", response_model=None) +async def fastapi_set_pipe_minor_loss( + network: str, pipe: str, minor_loss: float +) -> ChangeSet: + ps = {"id": pipe, "minor_loss": minor_loss} return set_pipe(network, ChangeSet(ps)) -@app.post("/setpipestatus/",response_model=None) + +@app.post("/setpipestatus/", response_model=None) async def fastapi_set_pipe_status(network: str, pipe: str, status: str) -> ChangeSet: - ps = { 'id' : pipe, - 'status' : status } + ps = {"id": pipe, "status": status} print(status) print(ps) @@ -1088,12 +1303,14 @@ async def fastapi_set_pipe_status(network: str, pipe: str, status: str) -> Chang print(ret) return ret + @app.get("/getpipeproperties/") async def fastapi_get_pipe_properties(network: str, pipe: str) -> dict[str, Any]: return get_pipe(network, pipe) + # DingZQ, 2025-03-29 -@app.get('/getallpipeproperties/') +@app.get("/getallpipeproperties/") async def fastapi_get_all_pipe_properties(network: str) -> list[dict[str, Any]]: # 缓存查询结果提高性能 global redis_client @@ -1110,62 +1327,68 @@ async def fastapi_get_all_pipe_properties(network: str) -> list[dict[str, Any]]: return results -@app.post("/setpipeproperties/",response_model=None) -async def fastapi_set_pipe_properties(network: str, pipe: str, req: Request) -> ChangeSet: +@app.post("/setpipeproperties/", response_model=None) +async def fastapi_set_pipe_properties( + network: str, pipe: str, req: Request +) -> ChangeSet: props = await req.json() - ps = { 'id' : pipe } | props + ps = {"id": pipe} | props return set_pipe(network, ChangeSet(ps)) - + ############################################################ # pump 4.[PUMPS] ############################################################ -@app.get('/getpumpschema') +@app.get("/getpumpschema") async def fastapi_get_pump_schema(network: str) -> dict[str, dict[str, Any]]: return get_pump_schema(network) -@app.post("/addpump/",response_model=None) -async def fastapi_add_pump(network: str, pump: str, node1: str, node2: str, power: float = 0.0) -> ChangeSet: - ps = { 'id' : pump, - 'node1' : node1, - 'node2' : node2, - 'power' : power - } + +@app.post("/addpump/", response_model=None) +async def fastapi_add_pump( + network: str, pump: str, node1: str, node2: str, power: float = 0.0 +) -> ChangeSet: + ps = {"id": pump, "node1": node1, "node2": node2, "power": power} return add_pump(network, ChangeSet(ps)) -@app.post("/deletepump/",response_model=None) + +@app.post("/deletepump/", response_model=None) async def fastapi_delete_pump(network: str, pump: str) -> ChangeSet: - ps = { 'id' : pump } - return delete_pump(network, ChangeSet(ps)) + ps = {"id": pump} + return delete_pump(network, ChangeSet(ps)) + @app.get("/getpumpnode1/") async def fastapi_get_pump_node1(network: str, pump: str) -> str | None: ps = get_pump(network, pump) - return ps['node1'] + return ps["node1"] + @app.get("/getpumpnode2/") async def fastapi_get_pump_node2(network: str, pump: str) -> str | None: ps = get_pump(network, pump) - return ps['node2'] + return ps["node2"] -@app.post("/setpumpnode1/",response_model=None) + +@app.post("/setpumpnode1/", response_model=None) async def fastapi_set_pump_node1(network: str, pump: str, node1: str) -> ChangeSet: - ps = { 'id' : pump, - 'node1' : node1 } + ps = {"id": pump, "node1": node1} return set_pump(network, ChangeSet(ps)) -@app.post("/setpumpnode2/",response_model=None) + +@app.post("/setpumpnode2/", response_model=None) async def fastapi_set_pump_node2(network: str, pump: str, node2: str) -> ChangeSet: - ps = { 'id' : pump, - 'node2' : node2 } + ps = {"id": pump, "node2": node2} return set_pump(network, ChangeSet(ps)) + @app.get("/getpumpproperties/") async def fastapi_get_pump_properties(network: str, pump: str) -> dict[str, Any]: return get_pump(network, pump) + # DingZQ, 2025-03-29 -@app.get('/getallpumpproperties/') +@app.get("/getallpumpproperties/") async def fastapi_get_all_pump_properties(network: str) -> list[dict[str, Any]]: # 缓存查询结果提高性能 global redis_client @@ -1181,103 +1404,131 @@ async def fastapi_get_all_pump_properties(network: str) -> list[dict[str, Any]]: return results -@app.post("/setpumpproperties/",response_model=None) -async def fastapi_set_pump_properties(network: str, pump: str, req: Request) -> ChangeSet: + +@app.post("/setpumpproperties/", response_model=None) +async def fastapi_set_pump_properties( + network: str, pump: str, req: Request +) -> ChangeSet: props = await req.json() - ps = { 'id' : pump } | props - return set_pump(network, ChangeSet(ps)) + ps = {"id": pump} | props + return set_pump(network, ChangeSet(ps)) ############################################################ # valve 4.[VALVES] ############################################################ -@app.get('/getvalveschema') +@app.get("/getvalveschema") async def fastapi_get_valve_schema(network: str) -> dict[str, dict[str, Any]]: return get_valve_schema(network) -@app.post("/addvalve/",response_model=None) -async def fastapi_add_valve(network: str, valve: str, node1: str, node2: str, diameter: float = 0, v_type: str = VALVES_TYPE_PRV, setting: float = 0, minor_loss: float = 0) -> ChangeSet: - ps = { 'id' : valve, - 'node1' : node1, - 'node2' : node2, - 'diameter' : diameter, - 'v_type' : v_type, - 'setting' : setting, - 'minor_loss' : minor_loss } - - return add_valve(network, ChangeSet(ps)) -@app.post("/deletevalve/",response_model=None) +@app.post("/addvalve/", response_model=None) +async def fastapi_add_valve( + network: str, + valve: str, + node1: str, + node2: str, + diameter: float = 0, + v_type: str = VALVES_TYPE_PRV, + setting: float = 0, + minor_loss: float = 0, +) -> ChangeSet: + ps = { + "id": valve, + "node1": node1, + "node2": node2, + "diameter": diameter, + "v_type": v_type, + "setting": setting, + "minor_loss": minor_loss, + } + + return add_valve(network, ChangeSet(ps)) + + +@app.post("/deletevalve/", response_model=None) async def fastapi_delete_valve(network: str, valve: str) -> ChangeSet: - ps = { 'id' : valve } - return delete_valve(network, ChangeSet(ps)) + ps = {"id": valve} + return delete_valve(network, ChangeSet(ps)) + @app.get("/getvalvenode1/") async def fastapi_get_valve_node1(network: str, valve: str) -> str | None: ps = get_valve(network, valve) - return ps['node1'] + return ps["node1"] + @app.get("/getvalvenode2/") async def fastapi_get_valve_node2(network: str, valve: str) -> str | None: ps = get_valve(network, valve) - return ps['node2'] + return ps["node2"] + @app.get("/getvalvediameter/") async def fastapi_get_valve_diameter(network: str, valve: str) -> float | None: ps = get_valve(network, valve) - return ps['diameter'] + return ps["diameter"] + @app.get("/getvalvetype/") async def fastapi_get_valve_type(network: str, valve: str) -> str | None: ps = get_valve(network, valve) - return ps['type'] + return ps["type"] + @app.get("/getvalvesetting/") async def fastapi_get_valve_setting(network: str, valve: str) -> float | None: ps = get_valve(network, valve) - return ps['setting'] + return ps["setting"] + @app.get("/getvalveminorloss/") async def fastapi_get_valve_minor_loss(network: str, valve: str) -> float | None: ps = get_valve(network, valve) - return ps['minor_loss'] + return ps["minor_loss"] -@app.post("/setvalvenode1/",response_model=None) + +@app.post("/setvalvenode1/", response_model=None) async def fastapi_set_valve_node1(network: str, valve: str, node1: str) -> ChangeSet: - ps = { 'id' : valve, - 'node1' : node1 } + ps = {"id": valve, "node1": node1} return set_valve(network, ChangeSet(ps)) -@app.post("/setvalvenode2/",response_model=None) + +@app.post("/setvalvenode2/", response_model=None) async def fastapi_set_valve_node2(network: str, valve: str, node2: str) -> ChangeSet: - ps = { 'id' : valve, - 'node2' : node2 } + ps = {"id": valve, "node2": node2} return set_valve(network, ChangeSet(ps)) -@app.post("/setvalvenodediameter/",response_model=None) -async def fastapi_set_valve_diameter(network: str, valve: str, diameter: float) -> ChangeSet: - ps = { 'id' : valve, - 'diameter' : diameter } + +@app.post("/setvalvenodediameter/", response_model=None) +async def fastapi_set_valve_diameter( + network: str, valve: str, diameter: float +) -> ChangeSet: + ps = {"id": valve, "diameter": diameter} return set_valve(network, ChangeSet(ps)) -@app.post("/setvalvetype/",response_model=None) + +@app.post("/setvalvetype/", response_model=None) async def fastapi_set_valve_type(network: str, valve: str, type: str) -> ChangeSet: - ps = { 'id' : valve, - 'type' : type } + ps = {"id": valve, "type": type} return set_valve(network, ChangeSet(ps)) -@app.post("/setvalvesetting/",response_model=None) -async def fastapi_set_valve_setting(network: str, valve: str, setting: float) -> ChangeSet: - ps = { 'id' : valve, - 'setting' : setting } + +@app.post("/setvalvesetting/", response_model=None) +async def fastapi_set_valve_setting( + network: str, valve: str, setting: float +) -> ChangeSet: + ps = {"id": valve, "setting": setting} return set_valve(network, ChangeSet(ps)) + @app.get("/getvalveproperties/") async def fastapi_get_valve_properties(network: str, valve: str) -> dict[str, Any]: return get_valve(network, valve) + # DingZQ, 2025-03-29 -@app.get('/getallvalveproperties/') +@app.get("/getallvalveproperties/") async def fastapi_get_all_valve_properties(network: str) -> list[dict[str, Any]]: # 缓存查询结果提高性能 global redis_client @@ -1293,27 +1544,31 @@ async def fastapi_get_all_valve_properties(network: str) -> list[dict[str, Any]] return results -@app.post("/setvalveproperties/",response_model=None) -async def fastapi_set_valve_properties(network: str, valve: str, req: Request) -> ChangeSet: + +@app.post("/setvalveproperties/", response_model=None) +async def fastapi_set_valve_properties( + network: str, valve: str, req: Request +) -> ChangeSet: props = await req.json() - ps = { 'id' : valve } | props - return set_valve(network, ChangeSet(ps)) - - + ps = {"id": valve} | props + return set_valve(network, ChangeSet(ps)) + + # node & link -@app.post("/deletenode/",response_model=None) +@app.post("/deletenode/", response_model=None) async def fastapi_delete_node(network: str, node: str) -> ChangeSet: - ps = {'id' : node} + ps = {"id": node} if is_junction(network, node): return delete_junction(network, ChangeSet(ps)) elif is_reservoir(network, node): return delete_reservoir(network, ChangeSet(ps)) elif is_tank(network, node): return delete_tank(network, ChangeSet(ps)) - -@app.post("/deletelink/",response_model=None) + + +@app.post("/deletelink/", response_model=None) async def fastapi_delete_link(network: str, link: str) -> ChangeSet: - ps = {'id' : link} + ps = {"id": link} if is_pipe(network, link): return delete_pipe(network, ChangeSet(ps)) elif is_pump(network, link): @@ -1321,6 +1576,7 @@ async def fastapi_delete_link(network: str, link: str) -> ChangeSet: elif is_valve(network, link): return delete_valve(network, ChangeSet(ps)) + ############################################################ # tag 8.[TAGS] ############################################################ @@ -1329,150 +1585,181 @@ async def fastapi_delete_link(network: str, link: str) -> ChangeSet: # TAG_TYPE_LINK = api.TAG_TYPE_LINK # -@app.get('/gettagschema/') + +@app.get("/gettagschema/") async def fastapi_get_tag_schema(network: str) -> dict[str, dict[str, Any]]: return get_tag_schema(network) -@app.get('/gettag/') + +@app.get("/gettag/") async def fastapi_get_tag(network: str, t_type: str, id: str) -> dict[str, Any]: return get_tag(network, t_type, id) -@app.get('/gettags/') + +@app.get("/gettags/") async def fastapi_get_tags(network: str) -> list[dict[str, Any]]: tags = get_tags(network) print(tags) return tags + # example: # set_tag(p, ChangeSet({'t_type': TAG_TYPE_NODE, 'id': 'j1', 'tag': 'j1t' })) # set_tag(p, ChangeSet({'t_type': TAG_TYPE_LINK, 'id': 'p0', 'tag': 'p0t' })) -@app.post('/settag/',response_model=None) +@app.post("/settag/", response_model=None) async def fastapi_set_tag(network: str, req: Request) -> ChangeSet: props = await req.json() return set_tag(network, ChangeSet(props)) - + + ############################################################ # demand 9.[DEMANDS] ############################################################ -@app.get('/getdemandschema') +@app.get("/getdemandschema") async def fastapi_get_demand_schema(network: str) -> dict[str, dict[str, Any]]: return get_demand_schema(network) + @app.get("/getdemandproperties/") async def fastapi_get_demand_properties(network: str, junction: str) -> dict[str, Any]: return get_demand(network, junction) + # example: set_demand(p, ChangeSet({'junction': 'j1', 'demands': [{'demand': 10.0, 'pattern': None, 'category': 'x'}, {'demand': 20.0, 'pattern': None, 'category': None}]})) -@app.post("/setdemandproperties/",response_model=None) -async def fastapi_set_demand_properties(network: str, junction: str, req: Request) -> ChangeSet: +@app.post("/setdemandproperties/", response_model=None) +async def fastapi_set_demand_properties( + network: str, junction: str, req: Request +) -> ChangeSet: props = await req.json() - ps = { 'junction' : junction } | props - return set_demand(network, ChangeSet(ps)) + ps = {"junction": junction} | props + return set_demand(network, ChangeSet(ps)) + ############################################################ # status 10.[STATUS] init_status ############################################################ -@app.get('/getstatusschema') +@app.get("/getstatusschema") async def fastapi_get_status_schema(network: str) -> dict[str, dict[str, Any]]: return get_status_schema(network) + @app.get("/getstatus/") async def fastapi_get_status(network: str, link: str) -> dict[str, Any]: return get_status(network, link) + # example: set_status(p, ChangeSet({'link': 'p0', 'status': LINK_STATUS_OPEN, 'setting': 10.0})) -@app.post("/setstatus/",response_model=None) -async def fastapi_set_status_properties(network: str, link: str, req: Request) -> ChangeSet: +@app.post("/setstatus/", response_model=None) +async def fastapi_set_status_properties( + network: str, link: str, req: Request +) -> ChangeSet: props = await req.json() - ps = { 'link' : link } | props - return set_status(network, ChangeSet(ps)) + ps = {"link": link} | props + return set_status(network, ChangeSet(ps)) + ############################################################ # pattern 11.[PATTERNS] ############################################################ -@app.get('/getpatternschema') +@app.get("/getpatternschema") async def fastapi_get_pattern_schema(network: str) -> dict[str, dict[str, Any]]: return get_pattern_schema(network) -@app.post("/addpattern/",response_model=None) + +@app.post("/addpattern/", response_model=None) async def fastapi_add_pattern(network: str, pattern: str, req: Request) -> ChangeSet: props = await req.json() - ps = { - 'id' : pattern, - } | props + ps = { + "id": pattern, + } | props return add_pattern(network, ChangeSet(ps)) -@app.post("/deletepattern/",response_model=None) + +@app.post("/deletepattern/", response_model=None) async def fastapi_delete_pattern(network: str, pattern: str) -> ChangeSet: - ps = { 'id' : pattern } + ps = {"id": pattern} return delete_pattern(network, ChangeSet(ps)) + @app.get("/getpatternproperties/") async def fastapi_get_pattern_properties(network: str, pattern: str) -> dict[str, Any]: return get_pattern(network, pattern) + # example: set_pattern(p, ChangeSet({'id' : 'p0', 'factors': [1.0, 2.0, 3.0]})) -@app.post("/setpatternproperties/",response_model=None) -async def fastapi_set_pattern_properties(network: str, pattern: str, req: Request) -> ChangeSet: +@app.post("/setpatternproperties/", response_model=None) +async def fastapi_set_pattern_properties( + network: str, pattern: str, req: Request +) -> ChangeSet: props = await req.json() - ps = { 'id' : pattern } | props - return set_pattern(network, ChangeSet(ps)) + ps = {"id": pattern} | props + return set_pattern(network, ChangeSet(ps)) + ############################################################ # curve 12.[CURVES] ############################################################ -@app.get('/getcurveschema') +@app.get("/getcurveschema") async def fastapi_get_curve_schema(network: str) -> dict[str, dict[str, Any]]: return get_curve_schema(network) -@app.post("/addcurve/",response_model=None) + +@app.post("/addcurve/", response_model=None) async def fastapi_add_curve(network: str, curve: str, req: Request) -> ChangeSet: props = await req.json() print(props) - ps = { - 'id' : curve, - } | props - - print(ps) - - return add_curve(network, ChangeSet(ps)) + ps = { + "id": curve, + } | props -@app.post("/deletecurve/",response_model=None) + print(ps) + + return add_curve(network, ChangeSet(ps)) + + +@app.post("/deletecurve/", response_model=None) async def fastapi_delete_curve(network: str, curve: str) -> ChangeSet: - ps = { 'id' : curve } + ps = {"id": curve} return delete_curve(network, ChangeSet(ps)) + @app.get("/getcurveproperties/") async def fastapi_get_curve_properties(network: str, curve: str) -> dict[str, Any]: return get_curve(network, curve) + # example: set_curve(p, ChangeSet({'id' : 'c0', 'c_type' : CURVE_TYPE_PUMP, 'coords': [{'x': 1.0, 'y': 2.0}, {'x': 2.0, 'y': 1.0}]})) -@app.post("/setcurveproperties/",response_model=None) -async def fastapi_set_curve_properties(network: str, curve: str, req: Request) -> ChangeSet: +@app.post("/setcurveproperties/", response_model=None) +async def fastapi_set_curve_properties( + network: str, curve: str, req: Request +) -> ChangeSet: props = await req.json() # c_type放到request中 - ps = { 'id' : curve } | props - return set_curve(network, ChangeSet(ps)) + ps = {"id": curve} | props + return set_curve(network, ChangeSet(ps)) ############################################################ # control 13.[CONTROLS] ############################################################ -@app.get('/getcontrolschema/') +@app.get("/getcontrolschema/") async def fastapi_get_control_schema(network: str) -> dict[str, dict[str, Any]]: return get_control_schema(network) + @app.get("/getcontrolproperties/") async def fastapi_get_control_properties(network: str) -> dict[str, Any]: return get_control(network) + # example: set_control(p, ChangeSet({'control': 'x'})) -@app.post("/setcontrolproperties/",response_model=None) +@app.post("/setcontrolproperties/", response_model=None) async def fastapi_set_control_properties(network: str, req: Request) -> ChangeSet: props = await req.json() - return set_control(network, ChangeSet(props)) + return set_control(network, ChangeSet(props)) + ############################################################ # rule 14.[RULES] @@ -1481,12 +1768,14 @@ async def fastapi_set_control_properties(network: str, req: Request) -> ChangeSe async def fastapi_get_rule_schema(network: str) -> dict[str, dict[str, Any]]: return get_rule_schema(network) + @app.get("/getruleproperties/") async def fastapi_get_rule_properties(network: str) -> dict[str, Any]: return get_rule(network) + # example: set_rule(p, ChangeSet({'rule': 'x'})) -@app.post("/setruleproperties/",response_model=None) +@app.post("/setruleproperties/", response_model=None) async def fastapi_set_rule_properties(network: str, req: Request) -> ChangeSet: props = await req.json() return set_rule(network, ChangeSet(props)) @@ -1499,133 +1788,159 @@ async def fastapi_set_rule_properties(network: str, req: Request) -> ChangeSet: async def fastapi_get_energy_schema(network: str) -> dict[str, dict[str, Any]]: return get_energy_schema(network) + @app.get("/getenergyproperties/") async def fastapi_get_energy_properties(network: str) -> dict[str, Any]: return get_energy(network) -@app.post("/setenergyproperties/",response_model=None) + +@app.post("/setenergyproperties/", response_model=None) async def fastapi_set_energy_properties(network: str, req: Request) -> ChangeSet: props = await req.json() return set_energy(network, ChangeSet(props)) + @app.get("/getpumpenergyschema/") async def fastapi_get_pump_energy_schema(network: str) -> dict[str, dict[str, Any]]: return get_pump_energy_schema(network) + @app.get("/getpumpenergyproperties//") async def fastapi_get_pump_energy_proeprties(network: str, pump: str) -> dict[str, Any]: return get_pump_energy(network, pump) -@app.get("/setpumpenergyproperties//",response_model=None) -async def fastapi_set_pump_energy_properties(network: str, pump: str, req: Request) -> ChangeSet: + +@app.get("/setpumpenergyproperties//", response_model=None) +async def fastapi_set_pump_energy_properties( + network: str, pump: str, req: Request +) -> ChangeSet: props = await req.json() - ps = { 'id' : pump } | props + ps = {"id": pump} | props return set_pump_energy(network, ChangeSet(ps)) + ############################################################ # emitter 16.[EMITTERS] ############################################################ -@app.get('/getemitterschema') +@app.get("/getemitterschema") async def fastapi_get_emitter_schema(network: str) -> dict[str, dict[str, Any]]: return get_emitter_schema(network) + @app.get("/getemitterproperties/") async def fastapi_get_emitter_properties(network: str, junction: str) -> dict[str, Any]: return get_emitter(network, junction) + # example: set_emitter(p, ChangeSet({'junction': 'j1', 'coefficient': 10.0})) -@app.post("/setemitterproperties/",response_model=None) -async def fastapi_set_emitter_properties(network: str, junction: str, req: Request) -> ChangeSet: +@app.post("/setemitterproperties/", response_model=None) +async def fastapi_set_emitter_properties( + network: str, junction: str, req: Request +) -> ChangeSet: props = await req.json() - ps = { 'junction' : junction } | props - return set_emitter(network, ChangeSet(ps)) + ps = {"junction": junction} | props + return set_emitter(network, ChangeSet(ps)) ############################################################ # quality 17.[QUALITY] ############################################################ -@app.get('/getqualityschema/') +@app.get("/getqualityschema/") async def fastapi_get_quality_schema(network: str) -> dict[str, dict[str, Any]]: return get_quality_schema(network) -@app.get('/getqualityproperties/') + +@app.get("/getqualityproperties/") async def fastapi_get_quality_properties(network: str, node: str) -> dict[str, Any]: return get_quality(network, node) + # example: set_quality(p, ChangeSet({'node': 'j1', 'quality': 10.0})) -@app.post("/setqualityproperties/",response_model=None) +@app.post("/setqualityproperties/", response_model=None) async def fastapi_set_quality_properties(network: str, req: Request) -> ChangeSet: props = await req.json() - return set_quality(network, ChangeSet(props)) - + return set_quality(network, ChangeSet(props)) + ############################################################ # source 18.[SOURCES] ############################################################ -@app.get('/getsourcechema/') +@app.get("/getsourcechema/") async def fastapi_get_source_schema(network: str) -> dict[str, dict[str, Any]]: return get_source_schema(network) -@app.get('/getsource/') + +@app.get("/getsource/") async def fastapi_get_source(network: str, node: str) -> dict[str, Any]: return get_source(network, node) -@app.post('/setsource/',response_model=None) + +@app.post("/setsource/", response_model=None) async def fastapi_set_source(network: str, req: Request) -> ChangeSet: props = await req.json() return set_source(network, ChangeSet(props)) + # example: add_source(p, ChangeSet({'node': 'j0', 's_type': SOURCE_TYPE_CONCEN, 'strength': 10.0, 'pattern': 'p0'})) -@app.post('/addsource/',response_model=None) +@app.post("/addsource/", response_model=None) async def fastapi_add_source(network: str, req: Request) -> ChangeSet: props = await req.json() return add_source(network, ChangeSet(props)) -@app.post('/deletesource/',response_model=None) + +@app.post("/deletesource/", response_model=None) async def fastapi_delete_source(network: str, node: str) -> ChangeSet: - props = { 'node': node } + props = {"node": node} return delete_source(network, ChangeSet(props)) ############################################################ # reaction 19.[REACTIONS] ############################################################ -@app.get('/getreactionschema/') +@app.get("/getreactionschema/") async def fastapi_get_reaction_schema(network: str) -> dict[str, dict[str, Any]]: return get_reaction_schema(network) -@app.get('/getreaction/') + +@app.get("/getreaction/") async def fastapi_get_reaction(network: str) -> dict[str, Any]: return get_reaction(network) -@app.post('/setreaction/',response_model=None) + +@app.post("/setreaction/", response_model=None) # set_reaction(p, ChangeSet({ 'ORDER BULK' : '10' })) async def fastapi_set_reaction(network: str, req: Request) -> ChangeSet: props = await req.json() return set_reaction(network, ChangeSet(props)) -@app.get('/getpipereactionschema/') + +@app.get("/getpipereactionschema/") async def fastapi_get_pipe_reaction_schema(network: str) -> dict[str, dict[str, Any]]: return get_pipe_reaction_schema(network) -@app.get('/getpipereaction/') + +@app.get("/getpipereaction/") async def fastapi_get_pipe_reaction(network: str, pipe: str) -> dict[str, Any]: return get_pipe_reaction(network, pipe) -@app.post('/setpipereaction/',response_model=None) + +@app.post("/setpipereaction/", response_model=None) async def fastapi_set_pipe_reaction(network: str, req: Request) -> ChangeSet: props = await req.json() return set_pipe_reaction(network, ChangeSet(props)) -@app.get('/gettankreactionschema/') + +@app.get("/gettankreactionschema/") async def fastapi_get_tank_reaction_schema(network: str) -> dict[str, dict[str, Any]]: return get_tank_reaction_schema(network) -@app.get('/gettankreaction/') + +@app.get("/gettankreaction/") async def fastapi_get_tank_reaction(network: str, tank: str) -> dict[str, Any]: return get_tank_reaction(network, tank) -@app.post('/settankreaction/',response_model=None) + +@app.post("/settankreaction/", response_model=None) async def fastapi_set_tank_reaction(network: str, req: Request) -> ChangeSet: props = await req.json() return set_tank_reaction(network, ChangeSet(props)) @@ -1634,26 +1949,30 @@ async def fastapi_set_tank_reaction(network: str, req: Request) -> ChangeSet: ############################################################ # mixing 20.[MIXING] ############################################################ -@app.get('/getmixingschema/') +@app.get("/getmixingschema/") async def fastapi_get_mixing_schema(network: str) -> dict[str, dict[str, Any]]: return get_mixing_schema(network) -@app.get('/getmixing/') + +@app.get("/getmixing/") async def fastapi_get_mixing(network: str, tank: str) -> dict[str, Any]: return get_mixing(network, tank) -@app.post('/setmixing/',response_model=None) + +@app.post("/setmixing/", response_model=None) async def fastapi_set_mixing(network: str, req: Request) -> ChangeSet: props = await req.json() return api.set_mixing(network, ChangeSet(props)) + # example: add_mixing(p, ChangeSet({'tank': 't0', 'model': MIXING_MODEL_MIXED, 'value': 10.0})) -@app.post('/addmixing/',response_model=None) +@app.post("/addmixing/", response_model=None) async def fastapi_add_mixing(network: str, req: Request) -> ChangeSet: props = await req.json() return add_mixing(network, ChangeSet(props)) -@app.post('/deletemixing/',response_model=None) + +@app.post("/deletemixing/", response_model=None) async def fastapi_delete_mixing(network: str, req: Request) -> ChangeSet: props = await req.json() return delete_mixing(network, ChangeSet(props)) @@ -1662,34 +1981,40 @@ async def fastapi_delete_mixing(network: str, req: Request) -> ChangeSet: ############################################################ # time 21.[TIME] ############################################################ -@app.get('/gettimeschema') +@app.get("/gettimeschema") async def fastapi_get_time_schema(network: str) -> dict[str, dict[str, Any]]: return get_time_schema(network) + @app.get("/gettimeproperties/") async def fastapi_get_time_properties(network: str) -> dict[str, Any]: return get_time(network) -@app.post("/settimeproperties/",response_model=None) + +@app.post("/settimeproperties/", response_model=None) async def fastapi_set_time_properties(network: str, req: Request) -> ChangeSet: props = await req.json() - return set_time(network, ChangeSet(props)) + return set_time(network, ChangeSet(props)) + ############################################################ # option 23.[OPTIONS] ############################################################ -@app.get('/getoptionschema/') +@app.get("/getoptionschema/") async def fastapi_get_option_schema(network: str) -> dict[str, dict[str, Any]]: return get_option_v3_schema(network) + @app.get("/getoptionproperties/") async def fastapi_get_option_properties(network: str) -> dict[str, Any]: return get_option_v3(network) -@app.post("/setoptionproperties/",response_model=None) + +@app.post("/setoptionproperties/", response_model=None) async def fastapi_set_option_properties(network: str, req: Request) -> ChangeSet: props = await req.json() - return set_option_v3(network, ChangeSet(props)) + return set_option_v3(network, ChangeSet(props)) + ############################################################ # coord 24.[COORDINATES] @@ -1698,6 +2023,7 @@ async def fastapi_set_option_properties(network: str, req: Request) -> ChangeSet async def fastapi_get_node_coord(network: str, node: str) -> dict[str, float] | None: return get_node_coord(network, node) + # DingZQ, 2025-01-27, get all node coord/links # nodes: id:type:x:y # links: id:type:node1:node2 @@ -1720,27 +2046,28 @@ async def fastapi_get_network_geometries(network: str) -> dict[str, Any] | None: for node_id, coord in coords.items(): nodes.append(f"{node_id}:{coord['type']}:{coord['x']}:{coord['y']}") links = get_network_link_nodes(network) - + # return list of scadas. scada : id, x, y # scadas = get_all_scada_elements(network) # data from WMH's scada info scadas = get_all_scada_info(network) - results = { 'nodes': nodes, - 'links': links, - 'scadas': scadas } + results = {"nodes": nodes, "links": links, "scadas": scadas} # 缓存查询结果提高性能 redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime)) - + return results + # DingZQ, 2024-12-31, get major node coord # id:type:x:y # type: junction, reservoir, tank @app.get("/getmajornodecoords/") -async def fastapi_get_major_node_coords(network: str, diameter: int) -> list[str] | None: +async def fastapi_get_major_node_coords( + network: str, diameter: int +) -> list[str] | None: start_time = time.time() coords = get_major_node_coords(network, diameter) end_time = time.time() @@ -1751,19 +2078,24 @@ async def fastapi_get_major_node_coords(network: str, diameter: int) -> list[str result.append(f"{node_id}:{coord['type']}:{coord['x']}:{coord['y']}") return result + # DingZQ, 2025-01-03, get network in extent @app.get("/getnetworkinextent/") -async def fastapi_get_network_in_extent(network: str, x1: float, y1: float, x2: float, y2: float) -> dict[str, Any] | None: +async def fastapi_get_network_in_extent( + network: str, x1: float, y1: float, x2: float, y2: float +) -> dict[str, Any] | None: nodes = api.get_nodes_in_extent(network, x1, y1, x2, y2) links = api.get_links_in_extent(network, x1, y1, x2, y2) - return { 'nodes': nodes, 'links': links } + return {"nodes": nodes, "links": links} -# DingZQ, 2024-12-08, get all links' start and end node + +# DingZQ, 2024-12-08, get all links' start and end node # link_id:link_type:node_id1:node_id2 @app.get("/getnetworklinknodes/") async def fastapi_get_network_link_nodes(network: str) -> list[str] | None: return get_network_link_nodes(network) + # DingZQ 2024-12-31 # 获取直径大于800的管道 @app.get("/getmajorpipenodes/") @@ -1774,63 +2106,77 @@ async def fastapi_get_major_pipe_nodes(network: str, diameter: int) -> list[str] logger.info("get_major_pipe_nodes: %s, time: %s", result, end_time - start_time) return result + ############################################################ # vertex 25.[VERTICES] ############################################################ -@app.get('/getvertexschema/') +@app.get("/getvertexschema/") async def fastapi_get_vertex_schema(network: str) -> dict[str, dict[str, Any]]: return get_vertex_schema(network) -@app.get('/getvertexproperties/') + +@app.get("/getvertexproperties/") async def fastapi_get_vertex_properties(network: str, link: str) -> dict[str, Any]: return get_vertex(network, link) + # set_vertex(p, ChangeSet({'link' : 'p0', 'coords': [{'x': 1.0, 'y': 2.0}, {'x': 2.0, 'y': 1.0}]})) -@app.post('/setvertexproperties/',response_model=None) +@app.post("/setvertexproperties/", response_model=None) async def fastapi_set_vertex_properties(network: str, req: Request) -> ChangeSet: props = await req.json() - return set_vertex(network, ChangeSet(props)) + return set_vertex(network, ChangeSet(props)) -@app.post('/addvertex/',response_model=None) + +@app.post("/addvertex/", response_model=None) async def fastapi_add_vertex(network: str, req: Request) -> ChangeSet: props = await req.json() - return add_vertex(network, ChangeSet(props)) + return add_vertex(network, ChangeSet(props)) -@app.post('/deletevertex/',response_model=None) + +@app.post("/deletevertex/", response_model=None) async def fastapi_delete_vertex(network: str, req: Request) -> ChangeSet: props = await req.json() return api.delete_vertex(network, ChangeSet(props)) -@app.get('/getallvertexlinks/', response_class = PlainTextResponse) + +@app.get("/getallvertexlinks/", response_class=PlainTextResponse) async def fastapi_get_all_vertex_links(network: str) -> list[str]: return json.dumps(get_all_vertex_links(network)) -@app.get('/getallvertices/', response_class = PlainTextResponse) + +@app.get("/getallvertices/", response_class=PlainTextResponse) async def fastapi_get_all_vertices(network: str) -> list[dict[str, Any]]: return json.dumps(get_all_vertices(network)) + ############################################################ # label 26.[LABELS] ############################################################ -@app.get('/getlabelschema/') +@app.get("/getlabelschema/") async def fastapi_get_label_schema(network: str) -> dict[str, dict[str, Any]]: return get_label_schema(network) -@app.get('/getlabelproperties/') -async def fastapi_get_label_properties(network: str, x: float, y: float) -> dict[str, Any]: + +@app.get("/getlabelproperties/") +async def fastapi_get_label_properties( + network: str, x: float, y: float +) -> dict[str, Any]: return get_label(network, x, y) -@app.post('/setlabelproperties/',response_model=None) + +@app.post("/setlabelproperties/", response_model=None) async def fastapi_set_label_properties(network: str, req: Request) -> ChangeSet: props = await req.json() return set_label(network, ChangeSet(props)) -@app.post('/addlabel/',response_model=None) + +@app.post("/addlabel/", response_model=None) async def fastapi_add_label(network: str, req: Request) -> ChangeSet: props = await req.json() return add_label(network, ChangeSet(props)) -@app.post('/deletelabel/',response_model=None) + +@app.post("/deletelabel/", response_model=None) async def fastapi_delete_label(network: str, req: Request) -> ChangeSet: props = await req.json() return delete_label(network, ChangeSet(props)) @@ -1839,54 +2185,64 @@ async def fastapi_delete_label(network: str, req: Request) -> ChangeSet: ############################################################ # backdrop 27.[BACKDROP] ############################################################ -@app.get('/getbackdropschema/') +@app.get("/getbackdropschema/") async def fastapi_get_backdrop_schema(network: str) -> dict[str, dict[str, Any]]: return get_backdrop_schema(network) -@app.get('/getbackdropproperties/') + +@app.get("/getbackdropproperties/") async def fastapi_get_backdrop_properties(network: str) -> dict[str, Any]: return get_backdrop(network) -@app.post('/setbackdropproperties/',response_model=None) + +@app.post("/setbackdropproperties/", response_model=None) async def fastapi_set_backdrop_properties(network: str, req: Request) -> ChangeSet: props = await req.json() return set_backdrop(network, ChangeSet(props)) + ############################################################ # scada_device 29 ############################################################ -@app.get('/getscadadeviceschema/') +@app.get("/getscadadeviceschema/") async def fastapi_get_scada_device_schema(network: str) -> dict[str, dict[str, Any]]: return get_scada_device_schema(network) -@app.get('/getscadadevice/') + +@app.get("/getscadadevice/") async def fastapi_get_scada_device(network: str, id: str) -> dict[str, Any]: return get_scada_device(network, id) -@app.post('/setscadadevice/',response_model=None) + +@app.post("/setscadadevice/", response_model=None) async def fastapi_set_scada_device(network: str, req: Request) -> ChangeSet: props = await req.json() return set_scada_device(network, ChangeSet(props)) -@app.post('/addscadadevice/',response_model=None) + +@app.post("/addscadadevice/", response_model=None) async def fastapi_add_scada_device(network: str, req: Request) -> ChangeSet: props = await req.json() return add_scada_device(network, ChangeSet(props)) -@app.post('/deletescadadevice/',response_model=None) + +@app.post("/deletescadadevice/", response_model=None) async def fastapi_delete_scada_device(network: str, req: Request) -> ChangeSet: props = await req.json() return delete_scada_device(network, ChangeSet(props)) -@app.post('/cleanscadadevice/',response_model=None) + +@app.post("/cleanscadadevice/", response_model=None) async def fastapi_clean_scada_device(network: str) -> ChangeSet: return clean_scada_device(network) -@app.get('/getallscadadeviceids/') + +@app.get("/getallscadadeviceids/") async def fastapi_get_all_scada_device_ids(network: str) -> list[str]: return get_all_scada_device_ids(network) -@app.get('/getallscadadevices/', response_class = PlainTextResponse) + +@app.get("/getallscadadevices/", response_class=PlainTextResponse) async def fastapi_get_all_scada_devices(network: str) -> list[dict[str, Any]]: return json.dumps(get_all_scada_devices(network)) @@ -1894,34 +2250,41 @@ async def fastapi_get_all_scada_devices(network: str) -> list[dict[str, Any]]: ############################################################ # scada_device_data 30 ############################################################ -@app.get('/getscadadevicedataschema/') -async def fastapi_get_scada_device_data_schema(network: str) -> dict[str, dict[str, Any]]: +@app.get("/getscadadevicedataschema/") +async def fastapi_get_scada_device_data_schema( + network: str, +) -> dict[str, dict[str, Any]]: return get_scada_device_data_schema(network) -@app.get('/getscadadevicedata/') + +@app.get("/getscadadevicedata/") async def fastapi_get_scada_device_data(network: str, id: str) -> dict[str, Any]: return get_scada_device_data(network, id) + # example: set_scada_device_data(p, ChangeSet({'device_id': 'sm_device', 'data': [{ 'time': '2023-02-10 00:02:22', 'value': 100.0 }, { 'time': '2023-02-10 00:03:22', 'value': 200.0 }]})) # time format must be 'YYYY-MM-DD HH:MM:SS' -@app.post('/setscadadevicedata/',response_model=None) +@app.post("/setscadadevicedata/", response_model=None) async def fastapi_set_scada_device_data(network: str, req: Request) -> ChangeSet: props = await req.json() return set_scada_device_data(network, ChangeSet(props)) + # example: add_scada_device_data(p, ChangeSet({'device_id': 'sm_device', 'time': '2023-02-10 00:02:22', 'value': 100.0})) -@app.post('/addscadadevicedata/',response_model=None) +@app.post("/addscadadevicedata/", response_model=None) async def fastapi_add_scada_device_data(network: str, req: Request) -> ChangeSet: props = await req.json() return add_scada_device_data(network, ChangeSet(props)) + # example: delete_scada_device_data(p, ChangeSet({'device_id': 'sm_device', 'time': '2023-02-12 00:02:22'})) -@app.post('/deletescadadevicedata/',response_model=None) +@app.post("/deletescadadevicedata/", response_model=None) async def fastapi_delete_scada_device_data(network: str, req: Request) -> ChangeSet: props = await req.json() return delete_scada_device_data(network, ChangeSet(props)) -@app.post('/cleanscadadevicedata/',response_model=None) + +@app.post("/cleanscadadevicedata/", response_model=None) async def fastapi_clean_scada_device_data(network: str) -> ChangeSet: return clean_scada_device_data(network) @@ -1929,255 +2292,336 @@ async def fastapi_clean_scada_device_data(network: str) -> ChangeSet: ############################################################ # scada_element 31 ############################################################ -@app.get('/getscadaelementschema/') +@app.get("/getscadaelementschema/") async def fastapi_get_scada_element_schema(network: str) -> dict[str, dict[str, Any]]: return get_scada_element_schema(network) -@app.get('/getscadaelements/') + +@app.get("/getscadaelements/") async def fastapi_get_scada_elements(network: str) -> list[str]: return get_all_scada_elements(network) -@app.get('/getscadaelement/') + +@app.get("/getscadaelement/") async def fastapi_get_scada_element(network: str, id: str) -> dict[str, Any]: return get_scada_element(network, id) -@app.post('/setscadaelement/',response_model=None) + +@app.post("/setscadaelement/", response_model=None) async def fastapi_set_scada_element(network: str, req: Request) -> ChangeSet: props = await req.json() return set_scada_element(network, ChangeSet(props)) -@app.post('/addscadaelement/',response_model=None) + +@app.post("/addscadaelement/", response_model=None) async def fastapi_add_scada_element(network: str, req: Request) -> ChangeSet: props = await req.json() return add_scada_element(network, ChangeSet(props)) -@app.post('/deletescadaelement/',response_model=None) + +@app.post("/deletescadaelement/", response_model=None) async def fastapi_delete_scada_element(network: str, req: Request) -> ChangeSet: props = await req.json() return delete_scada_element(network, ChangeSet(props)) -@app.post('/cleanscadaelement/',response_model=None) + +@app.post("/cleanscadaelement/", response_model=None) async def fastapi_clean_scada_element(network: str) -> ChangeSet: return clean_scada_element(network) + ############################################################ # general_region 32 ############################################################ -@app.get('/getregionschema/') +@app.get("/getregionschema/") async def fastapi_get_region_schema(network: str) -> dict[str, dict[str, Any]]: return get_region_schema(network) -@app.get('/getregion/') + +@app.get("/getregion/") async def fastapi_get_region(network: str, id: str) -> dict[str, Any]: return get_region(network, id) -@app.post('/setregion/',response_model=None) -async def fastapi_set_region(network : str, req: Request) -> ChangeSet: + +@app.post("/setregion/", response_model=None) +async def fastapi_set_region(network: str, req: Request) -> ChangeSet: props = await req.json() return set_region(network, ChangeSet(props)) + # example: add_region(p, ChangeSet({'id': 'r', 'boundary': [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 0.0)]})) -@app.post('/addregion/',response_model=None) +@app.post("/addregion/", response_model=None) async def fastapi_add_region(network: str, req: Request) -> ChangeSet: props = await req.json() return add_region(network, ChangeSet(props)) -@app.post('/deleteregion/',response_model=None) + +@app.post("/deleteregion/", response_model=None) async def fastapi_delete_region(network: str, req: Request) -> ChangeSet: props = await req.json() return delete_region(network, ChangeSet(props)) + ############################################################ # district_metering_area 33 ############################################################ -@app.get('/calculatedistrictmeteringareafornodes/') -async def fastapi_calculate_district_metering_area_for_nodes(network: str, req: Request) -> list[list[str]]: +@app.get("/calculatedistrictmeteringareafornodes/") +async def fastapi_calculate_district_metering_area_for_nodes( + network: str, req: Request +) -> list[list[str]]: props = await req.json() - nodes = props['nodes'] - part_count = props['part_count'] - part_type = props['part_type'] - return calculate_district_metering_area_for_nodes(network, nodes, part_count, part_type) + nodes = props["nodes"] + part_count = props["part_count"] + part_type = props["part_type"] + return calculate_district_metering_area_for_nodes( + network, nodes, part_count, part_type + ) -@app.get('/calculatedistrictmeteringareaforregion/') -async def fastapi_calculate_district_metering_area_for_region(network: str, req: Request) -> list[list[str]]: - props = await req.json() - region = props['region'] - part_count = props['part_count'] - part_type = props['part_type'] - return calculate_district_metering_area_for_region(network, region, part_count, part_type) -@app.get('/calculatedistrictmeteringareafornetwork/') -async def fastapi_calculate_district_metering_area_for_network(network: str, req: Request) -> list[list[str]]: +@app.get("/calculatedistrictmeteringareaforregion/") +async def fastapi_calculate_district_metering_area_for_region( + network: str, req: Request +) -> list[list[str]]: props = await req.json() - part_count = props['part_count'] - part_type = props['part_type'] + region = props["region"] + part_count = props["part_count"] + part_type = props["part_type"] + return calculate_district_metering_area_for_region( + network, region, part_count, part_type + ) + + +@app.get("/calculatedistrictmeteringareafornetwork/") +async def fastapi_calculate_district_metering_area_for_network( + network: str, req: Request +) -> list[list[str]]: + props = await req.json() + part_count = props["part_count"] + part_type = props["part_type"] return calculate_district_metering_area_for_network(network, part_count, part_type) -@app.get('/getdistrictmeteringareaschema/') -async def fastapi_get_district_metering_area_schema(network: str) -> dict[str, dict[str, Any]]: + +@app.get("/getdistrictmeteringareaschema/") +async def fastapi_get_district_metering_area_schema( + network: str, +) -> dict[str, dict[str, Any]]: return get_district_metering_area_schema(network) -@app.get('/getdistrictmeteringarea/') + +@app.get("/getdistrictmeteringarea/") async def fastapi_get_district_metering_area(network: str, id: str) -> dict[str, Any]: return get_district_metering_area(network, id) -@app.post('/setdistrictmeteringarea/',response_model=None) + +@app.post("/setdistrictmeteringarea/", response_model=None) async def fastapi_set_district_metering_area(network: str, req: Request) -> ChangeSet: props = await req.json() return set_district_metering_area(network, ChangeSet(props)) -@app.post('/adddistrictmeteringarea/',response_model=None) + +@app.post("/adddistrictmeteringarea/", response_model=None) async def fastapi_add_district_metering_area(network: str, req: Request) -> ChangeSet: props = await req.json() -# boundary should be [(x,y), (x,y)] - boundary = props['boundary'] + # boundary should be [(x,y), (x,y)] + boundary = props["boundary"] newBoundary = [] for pt in boundary: newBoundary.append((pt[0], pt[1])) - - props['boundary'] = newBoundary - + + props["boundary"] = newBoundary + return add_district_metering_area(network, ChangeSet(props)) -@app.post('/deletedistrictmeteringarea/',response_model=None) -async def fastapi_delete_district_metering_area(network: str, req: Request) -> ChangeSet: + +@app.post("/deletedistrictmeteringarea/", response_model=None) +async def fastapi_delete_district_metering_area( + network: str, req: Request +) -> ChangeSet: props = await req.json() return delete_district_metering_area(network, ChangeSet(props)) -@app.get('/getalldistrictmeteringareaids/') + +@app.get("/getalldistrictmeteringareaids/") async def fastapi_get_all_district_metering_area_ids(network: str) -> list[str]: return get_all_district_metering_area_ids(network) -@app.get('/getalldistrictmeteringareas/') + +@app.get("/getalldistrictmeteringareas/") async def getalldistrictmeteringareas(network: str) -> list[dict[str, Any]]: return get_all_district_metering_areas(network) -@app.post('/generatedistrictmeteringarea/',response_model=None) -async def fastapi_generate_district_metering_area(network: str, part_count: int, part_type: int, inflate_delta: float) -> ChangeSet: - return generate_district_metering_area(network, part_count, part_type, inflate_delta) -@app.post('/generatesubdistrictmeteringarea/',response_model=None) -async def fastapi_generate_sub_district_metering_area(network: str, dma: str, part_count: int, part_type: int, inflate_delta: float) -> ChangeSet: +@app.post("/generatedistrictmeteringarea/", response_model=None) +async def fastapi_generate_district_metering_area( + network: str, part_count: int, part_type: int, inflate_delta: float +) -> ChangeSet: + return generate_district_metering_area( + network, part_count, part_type, inflate_delta + ) + + +@app.post("/generatesubdistrictmeteringarea/", response_model=None) +async def fastapi_generate_sub_district_metering_area( + network: str, dma: str, part_count: int, part_type: int, inflate_delta: float +) -> ChangeSet: print(network) print(dma) print(part_count) print(part_type) print(inflate_delta) - return generate_sub_district_metering_area(network, dma, part_count, part_type, inflate_delta) + return generate_sub_district_metering_area( + network, dma, part_count, part_type, inflate_delta + ) + ############################################################ # service_area 34 ############################################################ -@app.get('/calculateservicearea/') -async def fastapi_calculate_service_area(network: str, time_index: int) -> dict[str, Any]: +@app.get("/calculateservicearea/") +async def fastapi_calculate_service_area( + network: str, time_index: int +) -> dict[str, Any]: return calculate_service_area(network, time_index) -@app.get('/getserviceareaschema/') + +@app.get("/getserviceareaschema/") async def fastapi_get_service_area_schema(network: str) -> dict[str, dict[str, Any]]: return get_service_area_schema(network) -@app.get('/getservicearea/') + +@app.get("/getservicearea/") async def fastapi_get_service_area(network: str, id: str) -> dict[str, Any]: return get_service_area(network, id) -@app.post('/setservicearea/',response_model=None) + +@app.post("/setservicearea/", response_model=None) async def fastapi_set_service_area(network: str, req: Request) -> ChangeSet: props = await req.json() return set_service_area(network, ChangeSet(props)) -@app.post('/addservicearea/',response_model=None) + +@app.post("/addservicearea/", response_model=None) async def fastapi_add_service_area(network: str, req: Request) -> ChangeSet: props = await req.json() return add_service_area(network, ChangeSet(props)) -@app.post('/deleteservicearea/',response_model=None) + +@app.post("/deleteservicearea/", response_model=None) async def fastapi_delete_service_area(network: str, req: Request) -> ChangeSet: props = await req.json() return delete_service_area(network, ChangeSet(props)) -@app.get('/getallserviceareas/') + +@app.get("/getallserviceareas/") async def fastapi_get_all_service_areas(network: str) -> list[dict[str, Any]]: return get_all_service_areas(network) -@app.post('/generateservicearea/',response_model=None) -async def fastapi_generate_service_area(network: str, inflate_delta: float) -> ChangeSet: + +@app.post("/generateservicearea/", response_model=None) +async def fastapi_generate_service_area( + network: str, inflate_delta: float +) -> ChangeSet: return generate_service_area(network, inflate_delta) ############################################################ # virtual_district 35 ############################################################ -@app.get('/calculatevirtualdistrict/') -async def fastapi_calculate_virtual_district(network: str, centers: list[str]) -> dict[str, list[Any]]: +@app.get("/calculatevirtualdistrict/") +async def fastapi_calculate_virtual_district( + network: str, centers: list[str] +) -> dict[str, list[Any]]: return calculate_virtual_district(network, centers) -@app.get('/getvirtualdistrictschema/') -async def fastapi_get_virtual_district_schema(network: str) -> dict[str, dict[str, Any]]: + +@app.get("/getvirtualdistrictschema/") +async def fastapi_get_virtual_district_schema( + network: str, +) -> dict[str, dict[str, Any]]: return get_virtual_district_schema(network) -@app.get('/getvirtualdistrict/') + +@app.get("/getvirtualdistrict/") async def fastapi_get_virtual_district(network: str, id: str) -> dict[str, Any]: return get_virtual_district(network, id) -@app.post('/setvirtualdistrict/',response_model=None) + +@app.post("/setvirtualdistrict/", response_model=None) async def fastapi_set_virtual_district(network: str, req: Request) -> ChangeSet: props = await req.json() return set_virtual_district(network, ChangeSet(props)) -@app.post('/addvirtualdistrict/',response_model=None) + +@app.post("/addvirtualdistrict/", response_model=None) async def fastapi_add_virtual_district(network: str, req: Request) -> ChangeSet: props = await req.json() return add_virtual_district(network, ChangeSet(props)) -@app.post('/deletevirtualdistrict/',response_model=None) + +@app.post("/deletevirtualdistrict/", response_model=None) async def fastapi_delete_virtual_district(network: str, req: Request) -> ChangeSet: props = await req.json() return delete_virtual_district(network, ChangeSet(props)) -@app.get('/getallvirtualdistrict/') + +@app.get("/getallvirtualdistrict/") async def fastapi_get_all_virtual_district(network: str) -> list[dict[str, Any]]: return get_all_virtual_districts(network) -@app.post('/generatevirtualdistrict/',response_model=None) -async def fastapi_generate_virtual_district(network: str, inflate_delta: float, req: Request) -> ChangeSet: + +@app.post("/generatevirtualdistrict/", response_model=None) +async def fastapi_generate_virtual_district( + network: str, inflate_delta: float, req: Request +) -> ChangeSet: props = await req.json() - return generate_virtual_district(network, props['centers'], inflate_delta) + return generate_virtual_district(network, props["centers"], inflate_delta) + ############################################################ # water_distribution_area 36 ############################################################ -@app.get('/calculatedemandtonodes/') -async def fastapi_calculate_demand_to_nodes(network: str, req: Request) -> dict[str, float]: +@app.get("/calculatedemandtonodes/") +async def fastapi_calculate_demand_to_nodes( + network: str, req: Request +) -> dict[str, float]: props = await req.json() - demand = props['demand'] - nodes = props['nodes'] + demand = props["demand"] + nodes = props["nodes"] return calculate_demand_to_nodes(network, demand, nodes) -@app.get('/calculatedemandtoregion/') -async def fastapi_calculate_demand_to_region(network: str, req: Request) -> dict[str, float]: + +@app.get("/calculatedemandtoregion/") +async def fastapi_calculate_demand_to_region( + network: str, req: Request +) -> dict[str, float]: props = await req.json() - demand = props['demand'] - region = props['region'] + demand = props["demand"] + region = props["region"] return calculate_demand_to_region(network, demand, region) -@app.get('/calculatedemandtonetwork/') -async def fastapi_calculate_demand_to_network(network: str, demand: float) -> dict[str, float]: + +@app.get("/calculatedemandtonetwork/") +async def fastapi_calculate_demand_to_network( + network: str, demand: float +) -> dict[str, float]: return calculate_demand_to_network(network, demand) ########################################################### # scada_info 38 || written by WMH ############################################################ -@app.get('/getscadainfoschema/') +@app.get("/getscadainfoschema/") async def fastapi_get_scada_info_schema(network: str) -> dict[str, dict[str, Any]]: - return get_scada_info_schema(network) + return get_scada_info_schema(network) -@app.get('/getscadainfo/') + +@app.get("/getscadainfo/") async def fastapi_get_scada_info(network: str, id: str) -> dict[str, float]: return get_scada_info(network, id) -@app.get('/getallscadainfo/') + +@app.get("/getallscadainfo/") async def fastapi_get_all_scada_info(network: str) -> list[dict[str, float]]: return get_all_scada_info(network) @@ -2185,31 +2629,35 @@ async def fastapi_get_all_scada_info(network: str) -> list[dict[str, float]]: ########################################################### # user 39 ########################################################### -@app.get('/getuserschema/') +@app.get("/getuserschema/") async def fastapi_get_user_schema(network: str) -> dict[str, dict[Any, Any]]: - return get_user_schema(network) + return get_user_schema(network) -@app.get('/getuser/') + +@app.get("/getuser/") async def fastapi_get_user(network: str, user_name: str) -> dict[Any, Any]: return get_user(network, user_name) -@app.get('/getallusers/') + +@app.get("/getallusers/") async def fastapi_get_all_users(network: str) -> list[dict[Any, Any]]: return get_all_users(network) ############################################################ # scheme 40 -############################################################ -@app.get('/getschemeschema/') +############################################################ +@app.get("/getschemeschema/") async def fastapi_get_scheme_schema(network: str) -> dict[str, dict[Any, Any]]: - return get_scheme_schema(network) + return get_scheme_schema(network) -@app.get('/getscheme/') + +@app.get("/getscheme/") async def fastapi_get_scheme(network: str, schema_name: str) -> dict[Any, Any]: return get_scheme(network, schema_name) -@app.get('/getallschemes/') + +@app.get("/getallschemes/") async def fastapi_get_all_schemes(network: str) -> list[dict[Any, Any]]: return get_all_schemes(network) @@ -2217,23 +2665,35 @@ async def fastapi_get_all_schemes(network: str) -> list[dict[Any, Any]]: ############################################################ # pipe_risk_probability 41 ############################################################ -@app.get('/getpiperiskprobabilitynow/') -async def fastapi_get_pipe_risk_probability_now(network: str, pipe_id: str) -> dict[str, Any]: +@app.get("/getpiperiskprobabilitynow/") +async def fastapi_get_pipe_risk_probability_now( + network: str, pipe_id: str +) -> dict[str, Any]: return get_pipe_risk_probability_now(network, pipe_id) -@app.get('/getpiperiskprobability/') -async def fastapi_get_pipe_risk_probability(network: str, pipe_id: str) -> dict[str, Any]: + +@app.get("/getpiperiskprobability/") +async def fastapi_get_pipe_risk_probability( + network: str, pipe_id: str +) -> dict[str, Any]: return get_pipe_risk_probability(network, pipe_id) -@app.get('/getpipesriskprobability/') -async def fastapi_get_pipes_risk_probability(network: str, pipe_ids: str) -> list[dict[str, Any]]: - pipeids = pipe_ids.split(',') + +@app.get("/getpipesriskprobability/") +async def fastapi_get_pipes_risk_probability( + network: str, pipe_ids: str +) -> list[dict[str, Any]]: + pipeids = pipe_ids.split(",") return get_pipes_risk_probability(network, pipeids) -@app.get('/getnetworkpiperiskprobabilitynow/') -async def fastapi_get_network_pipe_risk_probability_now(network: str) -> list[dict[str, Any]]: + +@app.get("/getnetworkpiperiskprobabilitynow/") +async def fastapi_get_network_pipe_risk_probability_now( + network: str, +) -> list[dict[str, Any]]: return get_network_pipe_risk_probability_now(network) + # 返回一个字典,key 是管道的 id,value 是管道的几何信息 # 几何信息是一个字典,包含 start 和 end 两个 key,value 是管道的起点和终点的坐标 # 例如: @@ -2247,7 +2707,7 @@ async def fastapi_get_network_pipe_risk_probability_now(network: str) -> list[di # 29.814950582 # ] # }, -@app.get('/getpiperiskprobabilitygeometries/') +@app.get("/getpiperiskprobabilitygeometries/") async def fastapi_get_pipe_risk_probability_geometries(network: str) -> dict[str, Any]: return get_pipe_risk_probability_geometries(network) @@ -2255,7 +2715,7 @@ async def fastapi_get_pipe_risk_probability_geometries(network: str) -> dict[str ############################################################ # sensor_placement 42 ############################################################ -@app.get('/getallsensorplacements/') +@app.get("/getallsensorplacements/") async def fastapi_get_all_sensor_placements(network: str) -> list[dict[Any, Any]]: return get_all_sensor_placements(network) @@ -2263,83 +2723,72 @@ async def fastapi_get_all_sensor_placements(network: str) -> list[dict[Any, Any] ############################################################ # burst_locate_result 43 ############################################################ -@app.get('/getallburstlocateresults/') +@app.get("/getallburstlocateresults/") async def fastapi_get_all_burst_locate_results(network: str) -> list[dict[Any, Any]]: return get_all_burst_locate_results(network) - - - - - - - - - - - - - - - # inp file @app.post("/uploadinp/", status_code=status.HTTP_200_OK) -async def fastapi_upload_inp(afile: bytes, name: str ): +async def fastapi_upload_inp(afile: bytes, name: str): filePath = inpDir + str(name) - f = open(filePath, 'wb') + f = open(filePath, "wb") f.write(afile) f.close() return True + @app.get("/downloadinp/", status_code=status.HTTP_200_OK) async def fastapi_download_inp(name: str, response: Response): - filePath = inpDir + name + filePath = inpDir + name if os.path.exists(filePath): - return FileResponse(filePath, media_type='application/octet-stream', filename="inp.inp") + return FileResponse( + filePath, media_type="application/octet-stream", filename="inp.inp" + ) else: response.status_code = status.HTTP_400_BAD_REQUEST return True - - + + # DingZQ, 2024-12-28, convert v3 to v2 -@app.get("/convertv3tov2/",response_model=None) +@app.get("/convertv3tov2/", response_model=None) async def fastapi_convert_v3_to_v2(req: Request) -> ChangeSet: - network = 'v3Tov2' + network = "v3Tov2" jo_root = await req.json() - inp = jo_root['inp'] + inp = jo_root["inp"] cs = convert_inp_v3_to_v2(inp) op = cs.operations[0] open_project(network) - op['vertex'] = json.dumps(get_all_vertices(network)) - op['scada'] = json.dumps(get_all_scada_elements(network)) - op['dma'] = json.dumps(get_all_district_metering_areas(network)) - op['sa'] = json.dumps(get_all_service_areas(network)) - op['vd'] = json.dumps(get_all_virtual_districts(network)) - op['legend'] = get_extension_data(network, 'legend') + op["vertex"] = json.dumps(get_all_vertices(network)) + op["scada"] = json.dumps(get_all_scada_elements(network)) + op["dma"] = json.dumps(get_all_district_metering_areas(network)) + op["sa"] = json.dumps(get_all_service_areas(network)) + op["vd"] = json.dumps(get_all_virtual_districts(network)) + op["legend"] = get_extension_data(network, "legend") - db = get_extension_data(network, 'scada_db') + db = get_extension_data(network, "scada_db") print(db) - scada_db = '' + scada_db = "" if db: scada_db = db print(scada_db) - op['scada_db'] = scada_db - + op["scada_db"] = scada_db + close_project(network) return cs - + + @app.get("/getjson/") async def fastapi_get_json(): return JSONResponse( - status_code = status.HTTP_400_BAD_REQUEST, + status_code=status.HTTP_400_BAD_REQUEST, content={ - 'code': 400, - 'message': "this is message", - 'data': 123, - } + "code": 400, + "message": "this is message", + "data": 123, + }, ) @@ -2352,36 +2801,103 @@ async def fastapi_get_realtimedata(): data = [random.randint(0, 100) for _ in range(100)] return data + @app.get("/getsimulationresult/") async def fastapi_get_simulationresult(): data = [random.randint(0, 100) for _ in range(100)] return data + # 下面几个query 函数,都是从 influxdb 中查询的,不与 network 绑定,用固定的network 名字 + # DingZQ 2025-01-31 # def query_latest_record_by_ID(ID: str, type: str, bucket: str="realtime_data", client: InfluxDBClient=client) -> dict: @app.get("/querynodelatestrecordbyid/") async def fastapi_query_node_latest_record_by_id(id: str): - return influxdb_api.query_latest_record_by_ID(id, type='node') + return influxdb_api.query_latest_record_by_ID(id, type="node") + @app.get("/querylinklatestrecordbyid/") async def fastapi_query_link_latest_record_by_id(id: str): - return influxdb_api.query_latest_record_by_ID(id, type='link') + return influxdb_api.query_latest_record_by_ID(id, type="link") + # query scada @app.get("/queryscadalatestrecordbyid/") async def fastapi_query_scada_latest_record_by_id(id: str): - return influxdb_api.query_latest_record_by_ID(id, type='scada') - + return influxdb_api.query_latest_record_by_ID(id, type="scada") # def query_all_record_by_time(query_time: str, bucket: str="realtime_data", client: InfluxDBClient=client) -> tuple: @app.get("/queryallrecordsbytime/") async def fastapi_query_all_records_by_time(querytime: str) -> dict[str, list]: results: tuple = influxdb_api.query_all_records_by_time(query_time=querytime) - return { "nodes": results[0], - "links": results[1] } + return {"nodes": results[0], "links": results[1]} + + +# def query_all_record_by_time_property(querytime: str, type: str, property: str, bucket: str = "realtime_simulation_result") -> tuple: +@app.get("/queryallrecordsbytimeproperty/") +async def fastapi_query_all_record_by_time_property( + querytime: str, type: str, property: str, bucket: str = "realtime_simulation_result" +) -> dict[str, list]: + results: tuple = influxdb_api.query_all_record_by_time_property( + query_time=querytime, type=type, property=property, bucket=bucket + ) + return {"results": results} + + +@app.get("/queryallschemerecordsbytimeproperty/") +async def fastapi_query_all_scheme_record_by_time_property( + querytime: str, + type: str, + property: str, + schemename: str, + bucket: str = "scheme_simulation_result", +) -> dict[str, list]: + """ + 查询指定方案某一时刻的所有记录,查询 'node' 或 'link' 的某一属性值 + + :param querytime: 查询时间,格式为 '2024-11-24T17:30:00+08:00' + :param type: 查询类型 'node' 或 'link' + :param property: 查询的属性字段名 + :param schemename: 方案名称,如 "FANGAN1761124840355" + :param bucket: 数据存储的bucket名称 + :return: 包含查询结果的字典 + """ + results: list = influxdb_api.query_all_scheme_record_by_time_property( + query_time=querytime, + type=type, + property=property, + scheme_name=schemename, + bucket=bucket, + ) + return {"results": results} + + +@app.get("/querysimulationrecordsbyidtime/") +async def fastapi_query_simulation_record_by_ids_time( + id: str, querytime: str, type: str, bucket: str = "realtime_simulation_result" +) -> dict[str, list]: + results: tuple = influxdb_api.query_simulation_result_by_ID_time( + ID=id, type=type, query_time=querytime, bucket=bucket + ) + return {"results": results} + + +@app.get("/queryschemesimulationrecordsbyidtime/") +async def fastapi_query_scheme_simulation_record_by_ids_time( + scheme_name: str, + id: str, + querytime: str, + type: str, + bucket: str = "scheme_simulation_result", +) -> dict[str, list]: + results: tuple = influxdb_api.query_scheme_simulation_result_by_ID_time( + scheme_name=scheme_name, ID=id, type=type, query_time=querytime, bucket=bucket + ) + return {"results": results} + @app.get("/queryallrecordsbydate/") async def fastapi_query_all_records_by_date(querydate: str) -> dict: @@ -2406,8 +2922,7 @@ async def fastapi_query_all_records_by_date(querydate: str) -> dict: logger.info(f"query from influxdb") nodes_links: tuple = influxdb_api.query_all_records_by_date(query_date=querydate) - results = { "nodes": nodes_links[0], - "links": nodes_links[1] } + results = {"nodes": nodes_links[0], "links": nodes_links[1]} # 今天的不要去缓存 if not is_today_or_future: @@ -2418,8 +2933,11 @@ async def fastapi_query_all_records_by_date(querydate: str) -> dict: return results + @app.get("/queryallrecordsbytimerange/") -async def fastapi_query_all_records_by_time_range(starttime: str, endtime: str) -> dict[str, list]: +async def fastapi_query_all_records_by_time_range( + starttime: str, endtime: str +) -> dict[str, list]: # 缓存查询结果提高性能 global redis_client @@ -2432,9 +2950,10 @@ async def fastapi_query_all_records_by_time_range(starttime: str, endtime: str) loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) return loaded_dict - nodes_links: tuple = influxdb_api.query_all_records_by_time_range(starttime=starttime, endtime=endtime) - results = { "nodes": nodes_links[0], - "links": nodes_links[1] } + nodes_links: tuple = influxdb_api.query_all_records_by_time_range( + starttime=starttime, endtime=endtime + ) + results = {"nodes": nodes_links[0], "links": nodes_links[1]} # 今天的不要去缓存 if not time_api.is_today_or_future(starttime): @@ -2442,9 +2961,12 @@ async def fastapi_query_all_records_by_time_range(starttime: str, endtime: str) return results -#2025-03-15, DingZQ + +# 2025-03-15, DingZQ @app.get("/queryallrecordsbydatewithtype/") -async def fastapi_query_all_records_by_date_with_type(querydate: str, querytype: str) -> list: +async def fastapi_query_all_records_by_date_with_type( + querydate: str, querytype: str +) -> list: # 缓存查询结果提高性能 global redis_client cache_key = f"queryallrecordsbydatewithtype_{querydate}_{querytype}" @@ -2454,15 +2976,20 @@ async def fastapi_query_all_records_by_date_with_type(querydate: str, querytype: loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) return loaded_dict - results = influxdb_api.query_all_records_by_date_with_type(query_date=querydate, query_type=querytype) + results = influxdb_api.query_all_records_by_date_with_type( + query_date=querydate, query_type=querytype + ) - packed = msgpack.packb(results, default=encode_datetime) + packed = msgpack.packb(results, default=encode_datetime) redis_client.set(cache_key, packed) return results + @app.get("/queryallrecordsbyidsdatetype/") -async def fastapi_query_all_records_by_ids_date_type(ids:str, querydate: str, querytype: str) -> list: +async def fastapi_query_all_records_by_ids_date_type( + ids: str, querydate: str, querytype: str +) -> list: # 缓存查询结果提高性能 global redis_client cache_key = f"queryallrecordsbydatewithtype_{querydate}_{querytype}" @@ -2472,13 +2999,15 @@ async def fastapi_query_all_records_by_ids_date_type(ids:str, querydate: str, qu # 使用自定义的反序列化函数 results = msgpack.unpackb(data, object_hook=decode_datetime) else: - results = influxdb_api.query_all_records_by_date_with_type(query_date=querydate, query_type=querytype) - packed = msgpack.packb(results, default=encode_datetime) + results = influxdb_api.query_all_records_by_date_with_type( + query_date=querydate, query_type=querytype + ) + packed = msgpack.packb(results, default=encode_datetime) redis_client.set(cache_key, packed) query_ids = ids.split(",") e_results = py_linq.Enumerable(results) - lst_results = e_results.where(lambda x: x['ID'] in query_ids).to_list() + lst_results = e_results.where(lambda x: x["ID"] in query_ids).to_list() return lst_results @@ -2486,7 +3015,9 @@ async def fastapi_query_all_records_by_ids_date_type(ids:str, querydate: str, qu # 查询指定日期、类型、属性的所有记录 # 返回 [{'time': '2024-01-01T00:00:00Z', 'ID': '1', 'value': 1.0}, {'time': '2024-01-01T00:00:00Z', 'ID': '2', 'value': 2.0}] @app.get("/queryallrecordsbydateproperty/") -async def fastapi_query_all_records_by_date_property(querydate: str, querytype: str, property: str) -> list[dict]: +async def fastapi_query_all_records_by_date_property( + querydate: str, querytype: str, property: str +) -> list[dict]: # 缓存查询结果提高性能 global redis_client cache_key = f"queryallrecordsbydateproperty_{querydate}_{querytype}_{property}" @@ -2496,8 +3027,10 @@ async def fastapi_query_all_records_by_date_property(querydate: str, querytype: loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) return loaded_dict - result_dict = influxdb_api.query_all_record_by_date_property(query_date=querydate, type=querytype, property=property) - packed = msgpack.packb(result_dict, default=encode_datetime) + result_dict = influxdb_api.query_all_record_by_date_property( + query_date=querydate, type=querytype, property=property + ) + packed = msgpack.packb(result_dict, default=encode_datetime) redis_client.set(cache_key, packed) return result_dict @@ -2505,81 +3038,113 @@ async def fastapi_query_all_records_by_date_property(querydate: str, querytype: # def query_curve_by_ID_property_daterange(ID: str, type: str, property: str, start_date: str, end_date: str, bucket: str="realtime_data", client: InfluxDBClient=client) -> list: @app.get("/querynodecurvebyidpropertydaterange/") -async def fastapi_query_node_curve_by_id_property_daterange(id: str, prop: str, startdate: str, enddate: str): - return influxdb_api.query_curve_by_ID_property_daterange(id, type='node', property=prop, start_date=startdate, end_date=enddate) +async def fastapi_query_node_curve_by_id_property_daterange( + id: str, prop: str, startdate: str, enddate: str +): + return influxdb_api.query_curve_by_ID_property_daterange( + id, type="node", property=prop, start_date=startdate, end_date=enddate + ) + @app.get("/querylinkcurvebyidpropertydaterange/") -async def fastapi_query_link_curve_by_id_property_daterange(id: str, prop: str, startdate: str, enddate: str): - return influxdb_api.query_curve_by_ID_property_daterange(id, type='link', property=prop, start_date=startdate, end_date=enddate) +async def fastapi_query_link_curve_by_id_property_daterange( + id: str, prop: str, startdate: str, enddate: str +): + return influxdb_api.query_curve_by_ID_property_daterange( + id, type="link", property=prop, start_date=startdate, end_date=enddate + ) + # ids 用,隔开 # 返回 { 'id': value1, 'id2': value2 } # def query_SCADA_data_by_device_ID_and_time(query_ids_list: List[str], query_time: str, bucket: str="SCADA_data", client: InfluxDBClient=client) -> Dict[str, float]: @app.get("/queryscadadatabydeviceidandtime/") async def fastapi_query_scada_data_by_device_id_and_time(ids: str, querytime: str): - query_ids = ids.split(',') + query_ids = ids.split(",") logger.info(querytime) - return influxdb_api.query_SCADA_data_by_device_ID_and_time(query_ids_list=query_ids, query_time=querytime) + return influxdb_api.query_SCADA_data_by_device_ID_and_time( + query_ids_list=query_ids, query_time=querytime + ) + # 2025/05/04 DingZQ # 对于SCAD的曲线数据,我们需要有4 套数据值 -# 1. 原始数据 +# 1. 原始数据 # 2. 补充的数据 (补充前面第一步缺失的数据) # 3. 清洗后的数据点 (用五角星表示) -# 4. 模拟曲线 +# 4. 模拟曲线 + # 查询到的SCADA原始数据 # 数据1 @app.get("/queryscadadatabydeviceidandtimerange/") -async def fastapi_query_scada_data_by_device_id_and_time_range(ids: str, starttime: str, endtime: str): +async def fastapi_query_scada_data_by_device_id_and_time_range( + ids: str, starttime: str, endtime: str +): - print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") + print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") + + query_ids = ids.split(",") + return influxdb_api.query_SCADA_data_by_device_ID_and_timerange( + query_ids_list=query_ids, start_time=starttime, end_time=endtime + ) - query_ids = ids.split(',') - return influxdb_api.query_SCADA_data_by_device_ID_and_timerange(query_ids_list=query_ids, start_time=starttime, end_time=endtime) # 查询到的SCADA补充的数据 # 数据2 # 注意: 这里的id是 scada_info中的 api_query_id @app.get("/queryfillingscadadatabydeviceidandtimerange/") -async def fastapi_query_filling_scada_data_by_device_id_and_time_range(ids: str, starttime: str, endtime: str): +async def fastapi_query_filling_scada_data_by_device_id_and_time_range( + ids: str, starttime: str, endtime: str +): - print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") + print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") + + query_ids = ids.split(",") + return influxdb_api.query_filling_SCADA_data_by_device_ID_and_timerange( + query_ids_list=query_ids, start_time=starttime, end_time=endtime + ) - query_ids = ids.split(',') - return influxdb_api.query_filling_SCADA_data_by_device_ID_and_timerange(query_ids_list=query_ids, start_time=starttime, end_time=endtime) # 查询到的SCADA清洗后的数据点 # 数据3 # 注意: 这里的id是 scada_info中的 api_query_id @app.get("/querycleaningscadadatabydeviceidandtimerange/") -async def fastapi_query_cleaning_scada_data_by_device_id_and_time_range(ids: str, starttime: str, endtime: str): - - print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") - - query_ids = ids.split(',') - return influxdb_api.query_cleaning_SCADA_data_by_device_ID_and_timerange(query_ids_list=query_ids, start_time=starttime, end_time=endtime) +async def fastapi_query_cleaning_scada_data_by_device_id_and_time_range( + ids: str, starttime: str, endtime: str +): + print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") + query_ids = ids.split(",") + return influxdb_api.query_cleaning_SCADA_data_by_device_ID_and_timerange( + query_ids_list=query_ids, start_time=starttime, end_time=endtime + ) # 查询指定时间范围内,多个SCADA设备的清洗后的数据 # DingZQ, 2025-04-19 # 2025/05/04 DingZQ 这个是将原始数据跟清洗后的数据合并到一起,暂时不需要用这个API @app.get("/querycleanedscadadatabydeviceidandtimerange/") -async def fastapi_query_cleaned_scada_data_by_device_id_and_time_range(ids: str, starttime: str, endtime: str): +async def fastapi_query_cleaned_scada_data_by_device_id_and_time_range( + ids: str, starttime: str, endtime: str +): - print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") - - query_ids = ids.split(',') - return influxdb_api.query_cleaned_SCADA_data_by_device_ID_and_timerange(query_ids_list=query_ids, start_time=starttime, end_time=endtime) + print(f"query_ids: {ids}, starttime: {starttime}, endtime: {endtime}") + query_ids = ids.split(",") + return influxdb_api.query_cleaned_SCADA_data_by_device_ID_and_timerange( + query_ids_list=query_ids, start_time=starttime, end_time=endtime + ) @app.get("/queryscadadatabydeviceidanddate/") async def fastapi_query_scada_data_by_device_id_and_date(ids: str, querydate: str): - query_ids = ids.split(',') - return influxdb_api.query_SCADA_data_by_device_ID_and_date(query_ids_list=query_ids, query_date=querydate) + query_ids = ids.split(",") + return influxdb_api.query_SCADA_data_by_device_ID_and_date( + query_ids_list=query_ids, query_date=querydate + ) + # DingZQ, 2025-03-08 # 返回所有SCADA设备在指定日期的所有记录 @@ -2606,17 +3171,20 @@ async def fastapi_query_all_scada_records_by_date(querydate: str): # 今天的不要去缓存 if not is_today_or_future: logger.info(f"save to cache redis") - packed = msgpack.packb(result_dict, default=encode_datetime) + packed = msgpack.packb(result_dict, default=encode_datetime) redis_client.set(cache_key, packed) logger.info(f"return results") return result_dict + # DingZQ, 2025-03-15 # Scheme @app.get("/queryallschemeallrecords/") -async def fastapi_query_all_scheme_all_records(schemetype: str, schemename: str, querydate: str) -> tuple: +async def fastapi_query_all_scheme_all_records( + schemetype: str, schemename: str, querydate: str +) -> tuple: # 缓存查询结果提高性能 global redis_client cache_key = f"queryallschemeallrecords_{schemetype}_{schemename}_{querydate}" @@ -2626,16 +3194,21 @@ async def fastapi_query_all_scheme_all_records(schemetype: str, schemename: str, loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime) return loaded_dict - results = influxdb_api.query_scheme_all_record(scheme_Type=schemetype, scheme_Name=schemename, query_date=querydate) - packed = msgpack.packb(results, default=encode_datetime) + results = influxdb_api.query_scheme_all_record( + scheme_Type=schemetype, scheme_Name=schemename, query_date=querydate + ) + packed = msgpack.packb(results, default=encode_datetime) redis_client.set(cache_key, packed) return results + # DingZQ, 2025-03-21 # 缓存是用的queryallschemeallrecords的缓存 @app.get("/queryschemeallrecordsproperty/") -async def fastapi_query_all_scheme_all_records_property(schemetype: str, schemename: str, querydate: str, querytype: str, queryproperty: str) -> list: +async def fastapi_query_all_scheme_all_records_property( + schemetype: str, schemename: str, querydate: str, querytype: str, queryproperty: str +) -> list: # 缓存查询结果提高性能 global redis_client cache_key = f"queryallschemeallrecords_{schemetype}_{schemename}_{querydate}" @@ -2645,23 +3218,27 @@ async def fastapi_query_all_scheme_all_records_property(schemetype: str, schemen # 使用自定义的反序列化函数 all_results = msgpack.unpackb(data, object_hook=decode_datetime) else: - all_results = influxdb_api.query_scheme_all_record(scheme_Type=schemetype, scheme_Name=schemename, query_date=querydate) - packed = msgpack.packb(all_results, default=encode_datetime) + all_results = influxdb_api.query_scheme_all_record( + scheme_Type=schemetype, scheme_Name=schemename, query_date=querydate + ) + packed = msgpack.packb(all_results, default=encode_datetime) redis_client.set(cache_key, packed) results = None - if querytype == 'node': + if querytype == "node": results = all_results[0] elif querytype == "link": results = all_results[1] return results - + + @app.post("/clearrediskey/") async def fastapi_clear_redis_key(key: str): redis_client.delete(key) return True + @app.post("/clearrediskeys/") async def fastapi_clear_redis_keys(keys: str): # delete keys contains the key @@ -2670,11 +3247,13 @@ async def fastapi_clear_redis_keys(keys: str): return True + @app.post("/clearallredis/") async def fastapi_clear_all_redis(): redis_client.flushdb() return True + @app.get("/queryredis/") async def fastapi_query_redis(): return redis_client.keys("*") @@ -2684,12 +3263,12 @@ async def fastapi_query_redis(): async def fastapi_query_influxdb_buckets(): return influxdb_api.query_buckets() + @app.get("/queryinfluxdbbucketmeasurements/") async def fastapi_query_influxdb_bucket_measurements(bucket: str): return influxdb_api.query_measurements(bucket=bucket) - # DingZQ, 2024-12-31, generate openapi.json def generate_openapi_json(): openapi_json_path = "openapi.json" @@ -2697,8 +3276,6 @@ def generate_openapi_json(): json.dump(app.openapi(), file, indent=4) - - ############################################################ # real_time api 37 # example: http://127.0.0.1:8000/runsimulation?network=beibeizone&start_time=2024-04-01T08:00:00Z @@ -2740,47 +3317,71 @@ def generate_openapi_json(): # #os.rename(filename2, filename) # return result + # DingZQ, 2025-05-17 class Download_History_Data_Manually(BaseModel): """ download_date:样式如 datetime(2025, 5, 4) """ + download_date: datetime + # DingZQ, 2025-05-17 @app.post("/download_history_data_manually/") -async def fastapi_download_history_data_manually(data: Download_History_Data_Manually) -> None: +async def fastapi_download_history_data_manually( + data: Download_History_Data_Manually, +) -> None: item = data.dict() # 创建东八区时区对象 tz = timezone(timedelta(hours=8)) - begin_dt = datetime.combine(item['download_date'].date(), time.min).replace(tzinfo=tz) - end_dt = datetime.combine(item['download_date'].date(), time(23, 59, 59)).replace(tzinfo=tz) + begin_dt = datetime.combine(item["download_date"].date(), time.min).replace( + tzinfo=tz + ) + end_dt = datetime.combine(item["download_date"].date(), time(23, 59, 59)).replace( + tzinfo=tz + ) # 2. 转为字符串 begin_time = begin_dt.isoformat() end_time = end_dt.isoformat() - - influxdb_api.download_history_data_manually(begin_time=begin_time, end_time=end_time) + + influxdb_api.download_history_data_manually( + begin_time=begin_time, end_time=end_time + ) # DingZQ, 2025-05-17 +# 新增开始时间和持续时间参数 class Run_Simulation_Manually_by_Date(BaseModel): """ name:数据库名称 simulation_date:样式如 2025-05-04 + start_time:开始时间,样式如 08:00:00 + duration:持续时间,单位为分钟 """ + name: str simulation_date: str + start_time: str + duration: int -def run_simulation_manually_by_date(network_name: str, base_date: datetime) -> None: - # 循环生成96个时间点(15分钟间隔) - for i in range(96): - # 计算当前时间偏移 - time_offset = timedelta(minutes=15 * i) - # 生成完整时间对象 - current_time = base_date + time_offset +def run_simulation_manually_by_date( + network_name: str, base_date: datetime, start_time: str, duration: int +) -> None: + # 解析开始时间 + start_hour, start_minute, start_second = map(int, start_time.split(":")) + start_datetime = base_date.replace( + hour=start_hour, minute=start_minute, second=start_second + ) + # 计算结束时间 + end_datetime = start_datetime + timedelta(minutes=duration) + + # 生成时间点,每15分钟一个 + current_time = start_datetime + while current_time < end_datetime: # 格式化成ISO8601带时区格式 iso_time = current_time.strftime("%Y-%m-%dT%H:%M:%S") + "+08:00" @@ -2788,39 +3389,71 @@ def run_simulation_manually_by_date(network_name: str, base_date: datetime) -> N simulation.run_simulation( name=network_name, simulation_type="realtime", - modify_pattern_start_time=iso_time + modify_pattern_start_time=iso_time, ) + # 增加15分钟 + current_time += timedelta(minutes=15) + + @app.post("/runsimulationmanuallybydate/") -async def fastapi_run_simulation_manually_by_date(data: Run_Simulation_Manually_by_Date) -> None: +async def fastapi_run_simulation_manually_by_date( + data: Run_Simulation_Manually_by_Date, +) -> dict[str, str]: item = data.dict() print(f"item: {item}") - filename = 'c:/lock.simulation' - filename2 = 'c:/lock.simulation2' + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" if os.path.exists(filename2): - print('file exists') + print("file exists") raise HTTPException(status_code=409, detail="is in simulation") else: - print('file doesnt exists') - simulation.query_corresponding_element_id_and_query_id(item['name']) - simulation.query_corresponding_pattern_id_and_query_id(item['name']) - region_result = simulation.query_non_realtime_region(item['name']) + print("file doesnt exists") + try: + simulation.query_corresponding_element_id_and_query_id(item["name"]) + simulation.query_corresponding_pattern_id_and_query_id(item["name"]) + region_result = simulation.query_non_realtime_region(item["name"]) - globals.source_outflow_region_id = simulation.get_source_outflow_region_id(item['name'], region_result) - globals.realtime_region_pipe_flow_and_demand_id = simulation.query_realtime_region_pipe_flow_and_demand_id( - item['name'], region_result) - globals.pipe_flow_region_patterns = simulation.query_pipe_flow_region_patterns(item['name']) + globals.source_outflow_region_id = simulation.get_source_outflow_region_id( + item["name"], region_result + ) + globals.realtime_region_pipe_flow_and_demand_id = ( + simulation.query_realtime_region_pipe_flow_and_demand_id( + item["name"], region_result + ) + ) + globals.pipe_flow_region_patterns = ( + simulation.query_pipe_flow_region_patterns(item["name"]) + ) - globals.non_realtime_region_patterns = simulation.query_non_realtime_region_patterns(item['name'], region_result) - globals.source_outflow_region_patterns, globals.realtime_region_pipe_flow_and_demand_patterns = \ - simulation.get_realtime_region_patterns( - item['name'], globals.source_outflow_region_id, globals.realtime_region_pipe_flow_and_demand_id) + globals.non_realtime_region_patterns = ( + simulation.query_non_realtime_region_patterns( + item["name"], region_result + ) + ) + ( + globals.source_outflow_region_patterns, + globals.realtime_region_pipe_flow_and_demand_patterns, + ) = simulation.get_realtime_region_patterns( + item["name"], + globals.source_outflow_region_id, + globals.realtime_region_pipe_flow_and_demand_id, + ) - base_date = datetime.strptime(item['simulation_date'], "%Y-%m-%d") + base_date = datetime.strptime(item["simulation_date"], "%Y-%m-%d") - thread = threading.Thread(target=lambda: run_simulation_manually_by_date(item['name'], base_date)) - thread.start() + thread = threading.Thread( + target=lambda: run_simulation_manually_by_date( + item["name"], base_date, item["start_time"], item["duration"] + ) + ) + thread.start() + thread.join() # 等待线程完成 + + return {"status": "success"} + except Exception as e: + return {"status": "error", "message": str(e)} # thread.join() # DingZQ 08152025 @@ -2866,11 +3499,11 @@ async def fastapi_run_simulation_manually_by_date(data: Run_Simulation_Manually_ # #os.rename(filename2, filename) # return result - + ############################################################ # burst analysis api 38 -#example:http://127.0.0.1:8000/burst_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&burst_ID=ZBBGXSZW000001&burst_size=200&duration=1800 -############################################################ +# example:http://127.0.0.1:8000/burst_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&burst_ID=ZBBGXSZW000001&burst_size=200&duration=1800 +############################################################ # @app.get("/burst_analysis/", response_class = PlainTextResponse) # async def fastapi_burst_analysis(network: str,start_time:str,burst_ID:str,burst_size:float,burst_flow:float=None,duration:int=None) -> str: @@ -2914,28 +3547,30 @@ class BurstAnalysis(BaseModel): modify_valve_opening: Optional[dict[str, float]] = None scheme_Name: Optional[str] = None + @app.post("/burst_analysis/") async def fastapi_burst_analysis(data: BurstAnalysis) -> str: item = data.dict() - filename = 'c:/lock.simulation' - filename2 = 'c:/lock.simulation2' + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" if os.path.exists(filename2): - print('file exists') + print("file exists") raise HTTPException(status_code=409, detail="is in simulation") else: - print('file doesnt exists') - #os.rename(filename, filename2) - burst_analysis(name=item['name'], - modify_pattern_start_time=item['modify_pattern_start_time'], - burst_ID=item['burst_ID'], - burst_size=item['burst_size'], - modify_total_duration=item['modify_total_duration'], - modify_fixed_pump_pattern=item['modify_fixed_pump_pattern'], - modify_variable_pump_pattern=item['modify_variable_pump_pattern'], - modify_valve_opening=item['modify_valve_opening'], - scheme_Name=item['scheme_Name'] + print("file doesnt exists") + # os.rename(filename, filename2) + burst_analysis( + name=item["name"], + modify_pattern_start_time=item["modify_pattern_start_time"], + burst_ID=item["burst_ID"], + burst_size=item["burst_size"], + modify_total_duration=item["modify_total_duration"], + modify_fixed_pump_pattern=item["modify_fixed_pump_pattern"], + modify_variable_pump_pattern=item["modify_variable_pump_pattern"], + modify_valve_opening=item["modify_valve_opening"], + scheme_Name=item["scheme_Name"], ) - #os.rename(filename2, filename) + # os.rename(filename2, filename) """ # 将 时间转换成日期,然后缓存这个计算结果 @@ -2959,83 +3594,120 @@ async def fastapi_burst_analysis(data: BurstAnalysis) -> str: return "success" + ############################################################ # valve close analysis api 39 -#example:http://127.0.0.1:8000/valve_close_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&valves=GSD2307192058577780A3287D78&valves=GSD2307192058572E953B707226(S2)&duration=1800 -############################################################ +# example:http://127.0.0.1:8000/valve_close_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&valves=GSD2307192058577780A3287D78&valves=GSD2307192058572E953B707226(S2)&duration=1800 +############################################################ -@app.get("/valve_close_analysis/", response_class = PlainTextResponse) -async def fastapi_valve_close_analysis(network: str,start_time:str,valves:Annotated[list[str], Query()],duration:int=None) -> str: - filename = 'c:/lock.simulation' - filename2 = 'c:/lock.simulation2' + +@app.get("/valve_close_analysis/", response_class=PlainTextResponse) +async def fastapi_valve_close_analysis( + network: str, + start_time: str, + valves: Annotated[list[str], Query()], + duration: int = None, +) -> str: + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" if os.path.exists(filename2): - print('file exists') - raise HTTPException(status_code=409, detail="is in simulation") + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") else: - print('file doesnt exists') - #os.rename(filename, filename2) - result = valve_close_analysis(network,start_time,valves,duration) - #os.rename(filename2, filename) + print("file doesnt exists") + # os.rename(filename, filename2) + result = valve_close_analysis(network, start_time, valves, duration) + # os.rename(filename2, filename) return result + ############################################################ # pipe flushing analysis api 40 -#example:http://127.0.0.1:8000/flushing_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&valves=GSD230719205857733F8F5214FF&valves=GSD230719205857C0AF65B6A170&valves_k=0.5&valves_k=0.5&drainage_node_ID=GSD2307192058570DEDF28E4F73&flush_flow=0&duration=1800 -############################################################ +# example:http://127.0.0.1:8000/flushing_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&valves=GSD230719205857733F8F5214FF&valves=GSD230719205857C0AF65B6A170&valves_k=0.5&valves_k=0.5&drainage_node_ID=GSD2307192058570DEDF28E4F73&flush_flow=0&duration=1800 +############################################################ -@app.get("/flushing_analysis/", response_class = PlainTextResponse) -async def fastapi_flushing_analysis(network: str,start_time:str,valves:Annotated[list[str], Query()],valves_k:Annotated[list[float], Query()],drainage_node_ID:str,flush_flow:float=0,duration:int=None) -> str: - filename = 'c:/lock.simulation' - filename2 = 'c:/lock.simulation2' + +@app.get("/flushing_analysis/", response_class=PlainTextResponse) +async def fastapi_flushing_analysis( + network: str, + start_time: str, + valves: Annotated[list[str], Query()], + valves_k: Annotated[list[float], Query()], + drainage_node_ID: str, + flush_flow: float = 0, + duration: int = None, +) -> str: + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" if os.path.exists(filename2): - print('file exists') - raise HTTPException(status_code=409, detail="is in simulation") + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") else: - print('file doesnt exists') - #os.rename(filename, filename2) - result = flushing_analysis(network,start_time,valves,valves_k,drainage_node_ID,flush_flow,duration) - #os.rename(filename2, filename) + print("file doesnt exists") + # os.rename(filename, filename2) + result = flushing_analysis( + network, + start_time, + valves, + valves_k, + drainage_node_ID, + flush_flow, + duration, + ) + # os.rename(filename2, filename) return result - ############################################################ # contaminant_simulation api 41 -#example:http://127.0.0.1:8000/contaminant_simulation?network=beibeizone&start_time=2024-04-01T08:00:00Z&source=ZBBDTZDP002677&concentration=100&duration=1800 -############################################################ +# example:http://127.0.0.1:8000/contaminant_simulation?network=beibeizone&start_time=2024-04-01T08:00:00Z&source=ZBBDTZDP002677&concentration=100&duration=1800 +############################################################ -@app.get("/contaminant_simulation/", response_class = PlainTextResponse) -async def fastapi_contaminant_simulation(network: str,start_time:str,source:str,concentration:float,duration:int=900,pattern:str=None) -> str: - filename = 'c:/lock.simulation' - filename2 = 'c:/lock.simulation2' + +@app.get("/contaminant_simulation/", response_class=PlainTextResponse) +async def fastapi_contaminant_simulation( + network: str, + start_time: str, + source: str, + concentration: float, + duration: int = 900, + pattern: str = None, +) -> str: + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" if os.path.exists(filename2): - print('file exists') - raise HTTPException(status_code=409, detail="is in simulation") + print("file exists") + raise HTTPException(status_code=409, detail="is in simulation") else: - print('file doesnt exists') - #os.rename(filename, filename2) - result = contaminant_simulation(network,start_time,source,concentration,duration,pattern) - #os.rename(filename2, filename) + print("file doesnt exists") + # os.rename(filename, filename2) + result = contaminant_simulation( + network, start_time, source, concentration, duration, pattern + ) + # os.rename(filename2, filename) return result + ############################################################ # age analysis api 42 -#example:http://127.0.0.1:8000/age_analysis/?network=bb&start_time=2024-04-01T00:00:00Z&end_time=2024-04-01T08:00:00Z&duration=28800 +# example:http://127.0.0.1:8000/age_analysis/?network=bb&start_time=2024-04-01T00:00:00Z&end_time=2024-04-01T08:00:00Z&duration=28800 ############################################################ -@app.get("/age_analysis/", response_class = PlainTextResponse) -async def fastapi_age_analysis(network: str, start_time:str, end_time:str, duration:int) -> str: - filename = 'c:/lock.simulation' - filename2 = 'c:/lock.simulation2' +@app.get("/age_analysis/", response_class=PlainTextResponse) +async def fastapi_age_analysis( + network: str, start_time: str, end_time: str, duration: int +) -> str: + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" if os.path.exists(filename2): - print('file exists') + print("file exists") raise HTTPException(status_code=409, detail="is in simulation") else: - print('file doesnt exists') - #os.rename(filename, filename2) - result = age_analysis(network,start_time,end_time,duration) - #os.rename(filename2, filename) + print("file doesnt exists") + # os.rename(filename, filename2) + result = age_analysis(network, start_time, end_time, duration) + # os.rename(filename2, filename) return result @@ -3056,18 +3728,23 @@ class SchedulingAnalysis(BaseModel): @app.post("/scheduling_analysis/") async def fastapi_scheduling_analysis(data: SchedulingAnalysis) -> str: data = data.dict() - filename = 'c:/lock.simulation' - filename2 = 'c:/lock.simulation2' + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" if os.path.exists(filename2): - print('file exists') + print("file exists") raise HTTPException(status_code=409, detail="is in simulation") else: - print('file doesnt exists') - #os.rename(filename, filename2) - result = scheduling_simulation(data['network'], data['start_time'], - data['pump_control'], data['tank_id'], - data['water_plant_output_id'], data['time_delta']) - #os.rename(filename2, filename) + print("file doesnt exists") + # os.rename(filename, filename2) + result = scheduling_simulation( + data["network"], + data["start_time"], + data["pump_control"], + data["tank_id"], + data["water_plant_output_id"], + data["time_delta"], + ) + # os.rename(filename2, filename) return result @@ -3094,19 +3771,21 @@ class PressureRegulation(BaseModel): @app.post("/pressure_regulation/") async def fastapi_pressure_regulation(data: PressureRegulation) -> str: item = data.dict() - filename = 'c:/lock.simulation' - filename2 = 'c:/lock.simulation2' + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" if os.path.exists(filename2): - print('file exists') + print("file exists") raise HTTPException(status_code=409, detail="is in simulation") else: - print('file doesnt exists') - #os.rename(filename, filename2) - result = pressure_regulation(prj_name=item['network'], - start_datetime=item['start_time'], - pump_control=item['pump_control'], - tank_initial_level_control=item['tank_init_level']) - #os.rename(filename2, filename) + print("file doesnt exists") + # os.rename(filename, filename2) + result = pressure_regulation( + prj_name=item["network"], + start_datetime=item["start_time"], + pump_control=item["pump_control"], + tank_initial_level_control=item["tank_init_level"], + ) + # os.rename(filename2, filename) return result @@ -3136,20 +3815,22 @@ class ProjectManagement(BaseModel): @app.post("/project_management/") async def fastapi_project_management(data: ProjectManagement) -> str: item = data.dict() - filename = 'c:/lock.simulation' - filename2 = 'c:/lock.simulation2' + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" if os.path.exists(filename2): - print('file exists') + print("file exists") raise HTTPException(status_code=409, detail="is in simulation") else: - print('file doesnt exists') - #os.rename(filename, filename2) - result = project_management(prj_name=item['network'], - start_datetime=item['start_time'], - pump_control=item['pump_control'], - tank_initial_level_control=item['tank_init_level'], - region_demand_control=item['region_demand']) - #os.rename(filename2, filename) + print("file doesnt exists") + # os.rename(filename, filename2) + result = project_management( + prj_name=item["network"], + start_datetime=item["start_time"], + pump_control=item["pump_control"], + tank_initial_level_control=item["tank_init_level"], + region_demand_control=item["region_demand"], + ) + # os.rename(filename2, filename) return result @@ -3164,25 +3845,25 @@ async def fastapi_project_management(data: ProjectManagement) -> str: @app.post("/network_project/") async def fastapi_network_project(file: UploadFile = File()) -> str: - temp_file_path = './inp/' + temp_file_path = "./inp/" if not os.path.exists(temp_file_path): os.mkdir(temp_file_path) temp_file_name = f'network_project_{datetime.now().strftime("%Y%m%d")}' - temp_file_path = f'{temp_file_path}{temp_file_name}.inp' + temp_file_path = f"{temp_file_path}{temp_file_name}.inp" with open(temp_file_path, "wb") as buffer: shutil.copyfileobj(file.file, buffer) buffer.close() - filename = 'c:/lock.simulation' - filename2 = 'c:/lock.simulation2' + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" if os.path.exists(filename2): - print('file exists') + print("file exists") raise HTTPException(status_code=409, detail="is in simulation") else: - print('file doesnt exists') + print("file doesnt exists") result = run_inp(temp_file_name) - #os.rename(filename2, filename) + # os.rename(filename2, filename) return result @@ -3204,18 +3885,23 @@ class DailySchedulingAnalysis(BaseModel): @app.post("/daily_scheduling_analysis/") async def fastapi_daily_scheduling_analysis(data: DailySchedulingAnalysis) -> str: data = data.dict() - filename = 'c:/lock.simulation' - filename2 = 'c:/lock.simulation2' + filename = "c:/lock.simulation" + filename2 = "c:/lock.simulation2" if os.path.exists(filename2): - print('file exists') + print("file exists") raise HTTPException(status_code=409, detail="is in simulation") else: - print('file doesnt exists') - #os.rename(filename, filename2) - result = daily_scheduling_simulation(data['network'], data['start_time'], - data['pump_control'], data['reservoir_id'], data['tank_id'], - data['water_plant_output_id']) - #os.rename(filename2, filename) + print("file doesnt exists") + # os.rename(filename, filename2) + result = daily_scheduling_simulation( + data["network"], + data["start_time"], + data["pump_control"], + data["reservoir_id"], + data["tank_id"], + data["water_plant_output_id"], + ) + # os.rename(filename2, filename) return result @@ -3227,7 +3913,7 @@ async def fastapi_daily_scheduling_analysis(data: DailySchedulingAnalysis) -> st @app.post("/network_update/") async def fastapi_network_update(file: UploadFile = File()) -> str: # 默认文件夹 - default_folder = './' + default_folder = "./" # 使用当前时间生成临时文件名 temp_file_name = f'network_update_{datetime.now().strftime("%Y%m%d")}' @@ -3264,32 +3950,43 @@ class PumpFailureState(BaseModel): async def fastapi_pump_failure(data: PumpFailureState) -> str: item = data.dict() - with open('./pump_failure_message.txt', 'a', encoding='utf-8-sig') as f1: - f1.write('[{}] {}\n'.format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'), item)) # save message + with open("./pump_failure_message.txt", "a", encoding="utf-8-sig") as f1: + f1.write( + "[{}] {}\n".format(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), item) + ) # save message status_info = item.copy() - with open('./pump_failure_status.txt', 'r', encoding='utf-8-sig') as f2: + with open("./pump_failure_status.txt", "r", encoding="utf-8-sig") as f2: lines = f2.readlines() first_stage_pump_status_dict = json.loads(json.dumps(eval(lines[0]))) - second_stage_pump_status_dict = json.loads(json.dumps(eval(lines[-1]))) # read local file - pump_status_dict = {'first': first_stage_pump_status_dict, # first-stage pump - 'second': second_stage_pump_status_dict} # second-stage pump - for pump_type in status_info['pump_status'].keys(): # 'first' or 'second' + second_stage_pump_status_dict = json.loads( + json.dumps(eval(lines[-1])) + ) # read local file + pump_status_dict = { + "first": first_stage_pump_status_dict, # first-stage pump + "second": second_stage_pump_status_dict, + } # second-stage pump + for pump_type in status_info["pump_status"].keys(): # 'first' or 'second' if pump_type in pump_status_dict.keys(): # the type of pumps exists - if all(pump_id in pump_status_dict[pump_type].keys() - for pump_id in status_info['pump_status'][pump_type].keys()): # all pump IDs exist - for pump_id in status_info['pump_status'][pump_type].keys(): + if all( + pump_id in pump_status_dict[pump_type].keys() + for pump_id in status_info["pump_status"][pump_type].keys() + ): # all pump IDs exist + for pump_id in status_info["pump_status"][pump_type].keys(): pump_status_dict[pump_type][pump_id] = int( - status_info['pump_status'][pump_type][pump_id]) # modify status dict + status_info["pump_status"][pump_type][pump_id] + ) # modify status dict else: - return json.dumps('ERROR: Wrong Pump ID') + return json.dumps("ERROR: Wrong Pump ID") else: - return json.dumps('ERROR: Wrong Pump Type') + return json.dumps("ERROR: Wrong Pump Type") - with open('./pump_failure_status.txt', 'w', encoding='utf-8-sig') as f2_: - f2_.write('{}\n{}'.format(pump_status_dict['first'], pump_status_dict['second'])) # save local file + with open("./pump_failure_status.txt", "w", encoding="utf-8-sig") as f2_: + f2_.write( + "{}\n{}".format(pump_status_dict["first"], pump_status_dict["second"]) + ) # save local file - return json.dumps('SUCCESS') + return json.dumps("SUCCESS") ############################################################ @@ -3305,20 +4002,19 @@ class Pressure_Sensor_Placement_Sensitivity(BaseModel): @app.post("/pressure_sensor_placement_sensitivity/") -async def fastapi_pressure_sensor_placement_sensitivity(data: Pressure_Sensor_Placement_Sensitivity) -> None: +async def fastapi_pressure_sensor_placement_sensitivity( + data: Pressure_Sensor_Placement_Sensitivity, +) -> None: item = data.dict() - pressure_sensor_placement_sensitivity(name=item['name'], - scheme_name=item['scheme_name'], - sensor_number=item['sensor_number'], - min_diameter=item['min_diameter'], - username=item['username'] + pressure_sensor_placement_sensitivity( + name=item["name"], + scheme_name=item["scheme_name"], + sensor_number=item["sensor_number"], + min_diameter=item["min_diameter"], + username=item["username"], ) - - - - class Item(BaseModel): str_info: str dict_info: Optional[dict] = None @@ -3331,8 +4027,12 @@ async def get_dict(item: Item): if __name__ == "__main__": - #uvicorn.run(app, host="0.0.0.0", port=8000) - #url='http://127.0.0.1:8000/valve_close_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&valve_IDs=GSD2307192058577780A3287D78&valve_IDs=GSD2307192058572E953B707226(S2)&duration=1800' - url='http://127.0.0.1:8000/burst_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&burst_ID=ZBBGXSZW000001&duration=1800' - Request.get(url,) + # uvicorn.run(app, host="0.0.0.0", port=8000) + # url='http://127.0.0.1:8000/valve_close_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&valve_IDs=GSD2307192058577780A3287D78&valve_IDs=GSD2307192058572E953B707226(S2)&duration=1800' + # url='http://127.0.0.1:8000/burst_analysis?network=beibeizone&start_time=2024-04-01T08:00:00Z&burst_ID=ZBBGXSZW000001&duration=1800' + url = "http://192.168.1.36:8000/queryallschemeallrecords/?schemename=Fangan0817114448&querydate=2025-08-13&schemetype=burst_Analysis" + # response = Request.get(url) + import requests + + response = requests.get(url)