Change bb to project_info.name

This commit is contained in:
DingZQ
2025-08-13 18:12:05 +08:00
parent d745e6f011
commit e4b1648041
9 changed files with 34 additions and 29 deletions

View File

@@ -3,6 +3,7 @@ import auto_store_non_realtime_SCADA_data
import asyncio import asyncio
import influxdb_api import influxdb_api
import influxdb_info import influxdb_info
import project_info
# 为了让多个任务并发运行,我们可以用 asyncio.to_thread 分别启动它们 # 为了让多个任务并发运行,我们可以用 asyncio.to_thread 分别启动它们
async def main(): async def main():
@@ -16,8 +17,8 @@ if __name__ == "__main__":
token = influxdb_info.token token = influxdb_info.token
org_name = influxdb_info.org org_name = influxdb_info.org
influxdb_api.query_pg_scada_info_realtime('bb') influxdb_api.query_pg_scada_info_realtime(project_info.name)
influxdb_api.query_pg_scada_info_non_realtime('bb') influxdb_api.query_pg_scada_info_non_realtime(project_info.name)
# 用 asyncio 并发启动两个任务 # 用 asyncio 并发启动两个任务
asyncio.run(main()) asyncio.run(main())

View File

@@ -10,6 +10,7 @@ import shutil
from influxdb_client import InfluxDBClient, BucketsApi, WriteApi, OrganizationsApi, Point, QueryApi from influxdb_client import InfluxDBClient, BucketsApi, WriteApi, OrganizationsApi, Point, QueryApi
import simulation import simulation
import influxdb_info import influxdb_info
import project_info
def setup_logger(): def setup_logger():
# 创建日志目录 # 创建日志目录
@@ -103,19 +104,19 @@ def run_simulation_job() -> None:
if current_time.minute % 15 == 0: if current_time.minute % 15 == 0:
print(f"{current_time.strftime('%Y-%m-%d %H:%M:%S')} -- Start simulation task.") print(f"{current_time.strftime('%Y-%m-%d %H:%M:%S')} -- Start simulation task.")
# 计算前获取scada_info中的信息按照设定的方法修改pg数据库 # 计算前获取scada_info中的信息按照设定的方法修改pg数据库
simulation.query_corresponding_element_id_and_query_id("bb") simulation.query_corresponding_element_id_and_query_id(project_info.name)
simulation.query_corresponding_pattern_id_and_query_id('bb') simulation.query_corresponding_pattern_id_and_query_id(project_info.name)
region_result = simulation.query_non_realtime_region('bb') region_result = simulation.query_non_realtime_region(project_info.name)
globals.source_outflow_region_id = simulation.get_source_outflow_region_id('bb', region_result) globals.source_outflow_region_id = simulation.get_source_outflow_region_id(project_info.name, region_result)
globals.realtime_region_pipe_flow_and_demand_id = simulation.query_realtime_region_pipe_flow_and_demand_id('bb', region_result) globals.realtime_region_pipe_flow_and_demand_id = simulation.query_realtime_region_pipe_flow_and_demand_id(project_info.name, region_result)
globals.pipe_flow_region_patterns = simulation.query_pipe_flow_region_patterns('bb') globals.pipe_flow_region_patterns = simulation.query_pipe_flow_region_patterns(project_info.name)
globals.non_realtime_region_patterns = simulation.query_non_realtime_region_patterns('bb', region_result) globals.non_realtime_region_patterns = simulation.query_non_realtime_region_patterns(project_info.name, region_result)
globals.source_outflow_region_patterns, realtime_region_pipe_flow_and_demand_patterns = simulation.get_realtime_region_patterns('bb', globals.source_outflow_region_patterns, realtime_region_pipe_flow_and_demand_patterns = simulation.get_realtime_region_patterns(project_info.name,
globals.source_outflow_region_id, globals.source_outflow_region_id,
globals.realtime_region_pipe_flow_and_demand_id) globals.realtime_region_pipe_flow_and_demand_id)
modify_pattern_start_time: str = get_next_15minute_time() # 获取下一个15分钟时间点 modify_pattern_start_time: str = get_next_15minute_time() # 获取下一个15分钟时间点
# print(modify_pattern_start_time) # print(modify_pattern_start_time)
simulation.run_simulation(name='bb', simulation_type="realtime", modify_pattern_start_time=modify_pattern_start_time) simulation.run_simulation(name=project_info.name, simulation_type="realtime", modify_pattern_start_time=modify_pattern_start_time)
logger.info('{} -- Successfully run simulation and store realtime simulation result.'.format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) logger.info('{} -- Successfully run simulation and store realtime simulation result.'.format(datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
else: else:
@@ -150,6 +151,6 @@ if __name__ == "__main__":
client = InfluxDBClient(url=url, token=token) client = InfluxDBClient(url=url, token=token)
# step2: 先查询pg数据库中scada_info的信息然后存储SCADA数据到SCADA_data这个bucket里 # step2: 先查询pg数据库中scada_info的信息然后存储SCADA数据到SCADA_data这个bucket里
influxdb_api.query_pg_scada_info_realtime('bb') influxdb_api.query_pg_scada_info_realtime(project_info.name)
# 自动执行 # 自动执行
realtime_task() realtime_task()

View File

@@ -8,6 +8,7 @@ from logging.handlers import TimedRotatingFileHandler
import time import time
from influxdb_client import InfluxDBClient, BucketsApi, WriteApi, OrganizationsApi, Point, QueryApi from influxdb_client import InfluxDBClient, BucketsApi, WriteApi, OrganizationsApi, Point, QueryApi
import influxdb_info import influxdb_info
import project_info
def setup_logger(): def setup_logger():
# 创建日志目录 # 创建日志目录
@@ -133,6 +134,6 @@ if __name__ == "__main__":
client = InfluxDBClient(url=url, token=token) client = InfluxDBClient(url=url, token=token)
# step2: 先查询pg数据库中scada_info的信息然后存储SCADA数据到SCADA_data这个bucket里 # step2: 先查询pg数据库中scada_info的信息然后存储SCADA数据到SCADA_data这个bucket里
influxdb_api.query_pg_scada_info_non_realtime('bb') influxdb_api.query_pg_scada_info_non_realtime(project_info.name)
# 自动执行 # 自动执行
store_non_realtime_SCADA_data_task() store_non_realtime_SCADA_data_task()

View File

@@ -17,6 +17,7 @@ import pandas as pd
import openpyxl import openpyxl
import pytz import pytz
import influxdb_info import influxdb_info
import project_info
import time_api import time_api
# influxdb数据库连接信息 # influxdb数据库连接信息

View File

@@ -705,7 +705,7 @@ def network_update(file_path: str) -> None:
print(f"history_patterns_flows文件存在开始处理...") print(f"history_patterns_flows文件存在开始处理...")
# 连接到 PostgreSQL 数据库(这里是数据库 "bb" # 连接到 PostgreSQL 数据库(这里是数据库 "bb"
with psycopg.connect("dbname=bb host=127.0.0.1") as conn: with psycopg.connect(f"dbname={project_info.name} host=127.0.0.1") as conn:
with conn.cursor() as cur: with conn.cursor() as cur:
with open(csv_path, newline='', encoding='utf-8-sig') as csvfile: with open(csv_path, newline='', encoding='utf-8-sig') as csvfile:
reader = csv.DictReader(csvfile) reader = csv.DictReader(csvfile)

View File

@@ -8,7 +8,7 @@ import json
import pytz import pytz
import requests import requests
import time import time
import project_info
url_path = 'http://10.101.15.16:9000/loong' # 内网 url_path = 'http://10.101.15.16:9000/loong' # 内网
# url_path = 'http://183.64.62.100:9057/loong' # 外网 # url_path = 'http://183.64.62.100:9057/loong' # 外网
@@ -848,7 +848,7 @@ if __name__ == '__main__':
# run_simulation("beibeizone","2024-04-01T08:00:00Z") # run_simulation("beibeizone","2024-04-01T08:00:00Z")
# read_inp('bb_server', 'model20_en.inp') # read_inp('bb_server', 'model20_en.inp')
run_simulation_ex( run_simulation_ex(
name='bb', simulation_type='extended', start_datetime='2024-11-09T02:30:00Z', name=project_info.name, simulation_type='extended', start_datetime='2024-11-09T02:30:00Z',
# end_datetime='2024-05-30T16:00:00Z', # end_datetime='2024-05-30T16:00:00Z',
# duration=0, # duration=0,
# pump_control={'PU00006': [45, 40]} # pump_control={'PU00006': [45, 40]}

View File

@@ -19,7 +19,7 @@ from spopt.region import Skater
from shapely.geometry import Point from shapely.geometry import Point
import geopandas as gpd import geopandas as gpd
from sklearn.metrics import pairwise_distances from sklearn.metrics import pairwise_distances
import project_info
# 2025/03/12 # 2025/03/12
# Step1: 获取节点坐标 # Step1: 获取节点坐标
@@ -639,7 +639,7 @@ def get_ID(name: str, sensor_num: int, min_diameter: int) -> list[str]:
if __name__ == '__main__': if __name__ == '__main__':
sensorindex = get_ID(name='bb', sensor_num=20, min_diameter=300) sensorindex = get_ID(name=project_info.name, sensor_num=20, min_diameter=300)
print(sensorindex) print(sensorindex)
# 将 sensor_coord 字典转换为 DataFrame # 将 sensor_coord 字典转换为 DataFrame

View File

@@ -12,7 +12,7 @@ from wntr.epanet.toolkit import EpanetException
from numpy.linalg import slogdet from numpy.linalg import slogdet
import random import random
from tjnetwork import * from tjnetwork import *
import project_info
# 2025/03/12 # 2025/03/12
# Step1: 获取节点坐标 # Step1: 获取节点坐标
@@ -472,7 +472,7 @@ def get_sensor_coord(name: str, sensor_num: int) -> dict[str, float]:
if __name__ == '__main__': if __name__ == '__main__':
sensor_coord = get_sensor_coord(name='bb', sensor_num=20) sensor_coord = get_sensor_coord(name=project_info.name, sensor_num=20)
print(sensor_coord) print(sensor_coord)
# ''' # '''
# 初始测压点布置根据灵敏度来布置计算初始情况下的校准过程的error # 初始测压点布置根据灵敏度来布置计算初始情况下的校准过程的error

View File

@@ -18,6 +18,7 @@ import psycopg
import logging import logging
import globals import globals
import uuid import uuid
import project_info
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
@@ -929,16 +930,16 @@ def run_simulation(name: str, simulation_type: str, modify_pattern_start_time: s
if __name__ == "__main__": if __name__ == "__main__":
# 计算前获取scada_info中的信息按照设定的方法修改pg数据库 # 计算前获取scada_info中的信息按照设定的方法修改pg数据库
query_corresponding_element_id_and_query_id("bb") query_corresponding_element_id_and_query_id(project_info.name)
query_corresponding_pattern_id_and_query_id('bb') query_corresponding_pattern_id_and_query_id(project_info.name)
region_result = query_non_realtime_region('bb') region_result = query_non_realtime_region(project_info.name)
globals.source_outflow_region_id = get_source_outflow_region_id('bb', region_result) globals.source_outflow_region_id = get_source_outflow_region_id(project_info.name, region_result)
globals.realtime_region_pipe_flow_and_demand_id = query_realtime_region_pipe_flow_and_demand_id('bb', region_result) globals.realtime_region_pipe_flow_and_demand_id = query_realtime_region_pipe_flow_and_demand_id(project_info.name, region_result)
globals.pipe_flow_region_patterns = query_pipe_flow_region_patterns('bb') globals.pipe_flow_region_patterns = query_pipe_flow_region_patterns(project_info.name)
globals.non_realtime_region_patterns = query_non_realtime_region_patterns('bb', region_result) globals.non_realtime_region_patterns = query_non_realtime_region_patterns(project_info.name, region_result)
globals.source_outflow_region_patterns, globals.realtime_region_pipe_flow_and_demand_patterns = get_realtime_region_patterns('bb', globals.source_outflow_region_id, globals.realtime_region_pipe_flow_and_demand_id) globals.source_outflow_region_patterns, globals.realtime_region_pipe_flow_and_demand_patterns = get_realtime_region_patterns(project_info.name, globals.source_outflow_region_id, globals.realtime_region_pipe_flow_and_demand_id)
# 基础日期和时间(日期部分保持不变) # 基础日期和时间(日期部分保持不变)
base_date = datetime(2025, 5, 4) base_date = datetime(2025, 5, 4)
@@ -956,7 +957,7 @@ if __name__ == "__main__":
# 执行函数调用 # 执行函数调用
run_simulation( run_simulation(
name='bb', name=project_info.name,
simulation_type="realtime", simulation_type="realtime",
modify_pattern_start_time=iso_time modify_pattern_start_time=iso_time
) )