还原旧依赖文件;使用事务 Delete And Copy 加速数据的存储/覆盖

This commit is contained in:
JIANG
2025-12-10 11:06:09 +08:00
parent 77cc7236fc
commit 8a9345dfcc
5 changed files with 260 additions and 250 deletions

Binary file not shown.

View File

@@ -8,20 +8,11 @@ if __name__ == "__main__":
if sys.platform == "win32": if sys.platform == "win32":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
# 创建配置对象 # 用 uvicorn.run 支持 workers 参数
config = uvicorn.Config( uvicorn.run(
"main:app", "main:app",
host="0.0.0.0", host="0.0.0.0",
port=8000, port=8000,
loop="asyncio", # 强制使用 asyncio 事件循环 workers=2, # 这里可以设置多进程
workers=2, # 如果需要多进程,可以取消注释 loop="asyncio",
) )
server = uvicorn.Server(config)
# 创建并设置事件循环
if sys.platform == "win32":
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# 运行服务器
asyncio.run(server.serve())

View File

@@ -1197,7 +1197,7 @@ def run_simulation(
cs.append(valve_status) cs.append(valve_status)
set_status(name_c, cs) set_status(name_c, cs)
# 运行并返回结果 # 运行并返回结果
result = run_project(name_c) run_project(name_c)
time_cost_end = time.perf_counter() time_cost_end = time.perf_counter()
print( print(
"{} -- Hydraulic simulation finished, cost time: {:.2f} s.".format( "{} -- Hydraulic simulation finished, cost time: {:.2f} s.".format(
@@ -1227,6 +1227,7 @@ def run_simulation(
# print(num_periods_result) # print(num_periods_result)
# print(node_result) # print(node_result)
# 存储 # 存储
starttime = time.time()
if simulation_type.upper() == "REALTIME": if simulation_type.upper() == "REALTIME":
TimescaleInternalStorage.store_realtime_simulation( TimescaleInternalStorage.store_realtime_simulation(
node_result, link_result, modify_pattern_start_time node_result, link_result, modify_pattern_start_time
@@ -1240,6 +1241,8 @@ def run_simulation(
modify_pattern_start_time, modify_pattern_start_time,
num_periods_result, num_periods_result,
) )
endtime = time.time()
logging.info("store time: %f", endtime - starttime)
# 暂不需要再次存储 SCADA 模拟信息 # 暂不需要再次存储 SCADA 模拟信息
# TimescaleInternalStorage.fill_scheme_simulation_result_to_SCADA(scheme_Type=scheme_Type, scheme_Name=scheme_Name) # TimescaleInternalStorage.fill_scheme_simulation_result_to_SCADA(scheme_Type=scheme_Type, scheme_Name=scheme_Name)

View File

@@ -12,27 +12,28 @@ class RealtimeRepository:
@staticmethod @staticmethod
async def insert_links_batch(conn: AsyncConnection, data: List[dict]): async def insert_links_batch(conn: AsyncConnection, data: List[dict]):
"""Batch insert for realtime.link_simulation using INSERT for performance.""" """Batch insert for realtime.link_simulation using DELETE then COPY for performance."""
if not data: if not data:
return return
query = """
INSERT INTO realtime.link_simulation (time, id, flow, friction, headloss, quality, reaction, setting, status, velocity) # 假设同一批次的数据时间是相同的
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, id) DO UPDATE SET
flow = EXCLUDED.flow, # 使用事务确保原子性
friction = EXCLUDED.friction, async with conn.transaction():
headloss = EXCLUDED.headloss,
quality = EXCLUDED.quality,
reaction = EXCLUDED.reaction,
setting = EXCLUDED.setting,
status = EXCLUDED.status,
velocity = EXCLUDED.velocity
"""
async with conn.cursor() as cur: async with conn.cursor() as cur:
await cur.executemany( # 1. 先删除该时间点的旧数据
query, await cur.execute(
[ "DELETE FROM realtime.link_simulation WHERE time = %s",
( (target_time,)
)
# 2. 使用 COPY 快速写入新数据
async with cur.copy(
"COPY realtime.link_simulation (time, id, flow, friction, headloss, quality, reaction, setting, status, velocity) FROM STDIN"
) as copy:
for item in data:
await copy.write_row((
item["time"], item["time"],
item["id"], item["id"],
item.get("flow"), item.get("flow"),
@@ -43,34 +44,32 @@ class RealtimeRepository:
item.get("setting"), item.get("setting"),
item.get("status"), item.get("status"),
item.get("velocity"), item.get("velocity"),
) ))
for item in data
],
)
@staticmethod @staticmethod
def insert_links_batch_sync(conn: Connection, data: List[dict]): def insert_links_batch_sync(conn: Connection, data: List[dict]):
"""Batch insert for realtime.link_simulation using INSERT for performance (sync version).""" """Batch insert for realtime.link_simulation using DELETE then COPY for performance (sync version)."""
if not data: if not data:
return return
query = """
INSERT INTO realtime.link_simulation (time, id, flow, friction, headloss, quality, reaction, setting, status, velocity) # 假设同一批次的数据时间是相同的
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, id) DO UPDATE SET
flow = EXCLUDED.flow, # 使用事务确保原子性
friction = EXCLUDED.friction, with conn.transaction():
headloss = EXCLUDED.headloss,
quality = EXCLUDED.quality,
reaction = EXCLUDED.reaction,
setting = EXCLUDED.setting,
status = EXCLUDED.status,
velocity = EXCLUDED.velocity
"""
with conn.cursor() as cur: with conn.cursor() as cur:
cur.executemany( # 1. 先删除该时间点的旧数据
query, cur.execute(
[ "DELETE FROM realtime.link_simulation WHERE time = %s",
( (target_time,)
)
# 2. 使用 COPY 快速写入新数据
with cur.copy(
"COPY realtime.link_simulation (time, id, flow, friction, headloss, quality, reaction, setting, status, velocity) FROM STDIN"
) as copy:
for item in data:
copy.write_row((
item["time"], item["time"],
item["id"], item["id"],
item.get("flow"), item.get("flow"),
@@ -81,10 +80,7 @@ class RealtimeRepository:
item.get("setting"), item.get("setting"),
item.get("status"), item.get("status"),
item.get("velocity"), item.get("velocity"),
) ))
for item in data
],
)
@staticmethod @staticmethod
async def get_link_by_time_range( async def get_link_by_time_range(
@@ -213,59 +209,63 @@ class RealtimeRepository:
async def insert_nodes_batch(conn: AsyncConnection, data: List[dict]): async def insert_nodes_batch(conn: AsyncConnection, data: List[dict]):
if not data: if not data:
return return
query = """
INSERT INTO realtime.node_simulation (time, id, actual_demand, total_head, pressure, quality) # 假设同一批次的数据时间是相同的
VALUES (%s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, id) DO UPDATE SET
actual_demand = EXCLUDED.actual_demand, # 使用事务确保原子性
total_head = EXCLUDED.total_head, async with conn.transaction():
pressure = EXCLUDED.pressure,
quality = EXCLUDED.quality
"""
async with conn.cursor() as cur: async with conn.cursor() as cur:
await cur.executemany( # 1. 先删除该时间点的旧数据
query, await cur.execute(
[ "DELETE FROM realtime.node_simulation WHERE time = %s",
( (target_time,)
)
# 2. 使用 COPY 快速写入新数据
async with cur.copy(
"COPY realtime.node_simulation (time, id, actual_demand, total_head, pressure, quality) FROM STDIN"
) as copy:
for item in data:
await copy.write_row((
item["time"], item["time"],
item["id"], item["id"],
item.get("actual_demand"), item.get("actual_demand"),
item.get("total_head"), item.get("total_head"),
item.get("pressure"), item.get("pressure"),
item.get("quality"), item.get("quality"),
) ))
for item in data
],
)
@staticmethod @staticmethod
def insert_nodes_batch_sync(conn: Connection, data: List[dict]): def insert_nodes_batch_sync(conn: Connection, data: List[dict]):
if not data: if not data:
return return
query = """
INSERT INTO realtime.node_simulation (time, id, actual_demand, total_head, pressure, quality) # 假设同一批次的数据时间是相同的
VALUES (%s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, id) DO UPDATE SET
actual_demand = EXCLUDED.actual_demand, # 使用事务确保原子性
total_head = EXCLUDED.total_head, with conn.transaction():
pressure = EXCLUDED.pressure,
quality = EXCLUDED.quality
"""
with conn.cursor() as cur: with conn.cursor() as cur:
cur.executemany( # 1. 先删除该时间点的旧数据
query, cur.execute(
[ "DELETE FROM realtime.node_simulation WHERE time = %s",
( (target_time,)
)
# 2. 使用 COPY 快速写入新数据
with cur.copy(
"COPY realtime.node_simulation (time, id, actual_demand, total_head, pressure, quality) FROM STDIN"
) as copy:
for item in data:
copy.write_row((
item["time"], item["time"],
item["id"], item["id"],
item.get("actual_demand"), item.get("actual_demand"),
item.get("total_head"), item.get("total_head"),
item.get("pressure"), item.get("pressure"),
item.get("quality"), item.get("quality"),
) ))
for item in data
],
)
@staticmethod @staticmethod
async def get_node_by_time_range( async def get_node_by_time_range(

View File

@@ -13,26 +13,30 @@ class SchemeRepository:
@staticmethod @staticmethod
async def insert_links_batch(conn: AsyncConnection, data: List[dict]): async def insert_links_batch(conn: AsyncConnection, data: List[dict]):
"""Batch insert for scheme.link_simulation using INSERT for performance.""" """Batch insert for scheme.link_simulation using DELETE then COPY for performance."""
if not data: if not data:
return return
query = """
INSERT INTO scheme.link_simulation (time, scheme_type, scheme_name, id, flow, friction, headloss, quality, reaction, setting, status, velocity) # 假设同一批次的数据时间、scheme_typescheme_name 是相同的
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, scheme_type, scheme_name, id) DO UPDATE SET target_scheme_type = data[0]["scheme_type"]
flow = EXCLUDED.flow, target_scheme_name = data[0]["scheme_name"]
friction = EXCLUDED.friction,
headloss = EXCLUDED.headloss, # 使用事务确保原子性
quality = EXCLUDED.quality, async with conn.transaction():
reaction = EXCLUDED.reaction,
setting = EXCLUDED.setting,
status = EXCLUDED.status,
velocity = EXCLUDED.velocity
"""
async with conn.cursor() as cur: async with conn.cursor() as cur:
await cur.executemany( # 1. 先删除该时间点、scheme_type、scheme_name 的旧数据
query, await cur.execute(
[ "DELETE FROM scheme.link_simulation WHERE time = %s AND scheme_type = %s AND scheme_name = %s",
(target_time, target_scheme_type, target_scheme_name),
)
# 2. 使用 COPY 快速写入新数据
async with cur.copy(
"COPY scheme.link_simulation (time, scheme_type, scheme_name, id, flow, friction, headloss, quality, reaction, setting, status, velocity) FROM STDIN"
) as copy:
for item in data:
await copy.write_row(
( (
item["time"], item["time"],
item["scheme_type"], item["scheme_type"],
@@ -47,32 +51,34 @@ class SchemeRepository:
item.get("status"), item.get("status"),
item.get("velocity"), item.get("velocity"),
) )
for item in data
],
) )
@staticmethod @staticmethod
def insert_links_batch_sync(conn: Connection, data: List[dict]): def insert_links_batch_sync(conn: Connection, data: List[dict]):
"""Batch insert for scheme.link_simulation using INSERT for performance (sync version).""" """Batch insert for scheme.link_simulation using DELETE then COPY for performance (sync version)."""
if not data: if not data:
return return
query = """
INSERT INTO scheme.link_simulation (time, scheme_type, scheme_name, id, flow, friction, headloss, quality, reaction, setting, status, velocity) # 假设同一批次的数据时间、scheme_typescheme_name 是相同的
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, scheme_type, scheme_name, id) DO UPDATE SET target_scheme_type = data[0]["scheme_type"]
flow = EXCLUDED.flow, target_scheme_name = data[0]["scheme_name"]
friction = EXCLUDED.friction,
headloss = EXCLUDED.headloss, # 使用事务确保原子性
quality = EXCLUDED.quality, with conn.transaction():
reaction = EXCLUDED.reaction,
setting = EXCLUDED.setting,
status = EXCLUDED.status,
velocity = EXCLUDED.velocity
"""
with conn.cursor() as cur: with conn.cursor() as cur:
cur.executemany( # 1. 先删除该时间点、scheme_type、scheme_name 的旧数据
query, cur.execute(
[ "DELETE FROM scheme.link_simulation WHERE time = %s AND scheme_type = %s AND scheme_name = %s",
(target_time, target_scheme_type, target_scheme_name),
)
# 2. 使用 COPY 快速写入新数据
with cur.copy(
"COPY scheme.link_simulation (time, scheme_type, scheme_name, id, flow, friction, headloss, quality, reaction, setting, status, velocity) FROM STDIN"
) as copy:
for item in data:
copy.write_row(
( (
item["time"], item["time"],
item["scheme_type"], item["scheme_type"],
@@ -87,8 +93,6 @@ class SchemeRepository:
item.get("status"), item.get("status"),
item.get("velocity"), item.get("velocity"),
) )
for item in data
],
) )
@staticmethod @staticmethod
@@ -239,19 +243,27 @@ class SchemeRepository:
async def insert_nodes_batch(conn: AsyncConnection, data: List[dict]): async def insert_nodes_batch(conn: AsyncConnection, data: List[dict]):
if not data: if not data:
return return
query = """
INSERT INTO scheme.node_simulation (time, scheme_type, scheme_name, id, actual_demand, total_head, pressure, quality) # 假设同一批次的数据时间、scheme_typescheme_name 是相同的
VALUES (%s, %s, %s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, scheme_type, scheme_name, id) DO UPDATE SET target_scheme_type = data[0]["scheme_type"]
actual_demand = EXCLUDED.actual_demand, target_scheme_name = data[0]["scheme_name"]
total_head = EXCLUDED.total_head,
pressure = EXCLUDED.pressure, # 使用事务确保原子性
quality = EXCLUDED.quality async with conn.transaction():
"""
async with conn.cursor() as cur: async with conn.cursor() as cur:
await cur.executemany( # 1. 先删除该时间点、scheme_type、scheme_name 的旧数据
query, await cur.execute(
[ "DELETE FROM scheme.node_simulation WHERE time = %s AND scheme_type = %s AND scheme_name = %s",
(target_time, target_scheme_type, target_scheme_name),
)
# 2. 使用 COPY 快速写入新数据
async with cur.copy(
"COPY scheme.node_simulation (time, scheme_type, scheme_name, id, actual_demand, total_head, pressure, quality) FROM STDIN"
) as copy:
for item in data:
await copy.write_row(
( (
item["time"], item["time"],
item["scheme_type"], item["scheme_type"],
@@ -262,27 +274,33 @@ class SchemeRepository:
item.get("pressure"), item.get("pressure"),
item.get("quality"), item.get("quality"),
) )
for item in data
],
) )
@staticmethod @staticmethod
def insert_nodes_batch_sync(conn: Connection, data: List[dict]): def insert_nodes_batch_sync(conn: Connection, data: List[dict]):
if not data: if not data:
return return
query = """
INSERT INTO scheme.node_simulation (time, scheme_type, scheme_name, id, actual_demand, total_head, pressure, quality) # 假设同一批次的数据时间、scheme_typescheme_name 是相同的
VALUES (%s, %s, %s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, scheme_type, scheme_name, id) DO UPDATE SET target_scheme_type = data[0]["scheme_type"]
actual_demand = EXCLUDED.actual_demand, target_scheme_name = data[0]["scheme_name"]
total_head = EXCLUDED.total_head,
pressure = EXCLUDED.pressure, # 使用事务确保原子性
quality = EXCLUDED.quality with conn.transaction():
"""
with conn.cursor() as cur: with conn.cursor() as cur:
cur.executemany( # 1. 先删除该时间点、scheme_type、scheme_name 的旧数据
query, cur.execute(
[ "DELETE FROM scheme.node_simulation WHERE time = %s AND scheme_type = %s AND scheme_name = %s",
(target_time, target_scheme_type, target_scheme_name),
)
# 2. 使用 COPY 快速写入新数据
with cur.copy(
"COPY scheme.node_simulation (time, scheme_type, scheme_name, id, actual_demand, total_head, pressure, quality) FROM STDIN"
) as copy:
for item in data:
copy.write_row(
( (
item["time"], item["time"],
item["scheme_type"], item["scheme_type"],
@@ -293,8 +311,6 @@ class SchemeRepository:
item.get("pressure"), item.get("pressure"),
item.get("quality"), item.get("quality"),
) )
for item in data
],
) )
@staticmethod @staticmethod