还原旧依赖文件;使用事务 Delete And Copy 加速数据的存储/覆盖

This commit is contained in:
JIANG
2025-12-10 11:06:09 +08:00
parent 77cc7236fc
commit 8a9345dfcc
5 changed files with 260 additions and 250 deletions

Binary file not shown.

View File

@@ -8,20 +8,11 @@ if __name__ == "__main__":
if sys.platform == "win32": if sys.platform == "win32":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
# 创建配置对象 # 用 uvicorn.run 支持 workers 参数
config = uvicorn.Config( uvicorn.run(
"main:app", "main:app",
host="0.0.0.0", host="0.0.0.0",
port=8000, port=8000,
loop="asyncio", # 强制使用 asyncio 事件循环 workers=2, # 这里可以设置多进程
workers=2, # 如果需要多进程,可以取消注释 loop="asyncio",
) )
server = uvicorn.Server(config)
# 创建并设置事件循环
if sys.platform == "win32":
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# 运行服务器
asyncio.run(server.serve())

View File

@@ -1197,7 +1197,7 @@ def run_simulation(
cs.append(valve_status) cs.append(valve_status)
set_status(name_c, cs) set_status(name_c, cs)
# 运行并返回结果 # 运行并返回结果
result = run_project(name_c) run_project(name_c)
time_cost_end = time.perf_counter() time_cost_end = time.perf_counter()
print( print(
"{} -- Hydraulic simulation finished, cost time: {:.2f} s.".format( "{} -- Hydraulic simulation finished, cost time: {:.2f} s.".format(
@@ -1227,6 +1227,7 @@ def run_simulation(
# print(num_periods_result) # print(num_periods_result)
# print(node_result) # print(node_result)
# 存储 # 存储
starttime = time.time()
if simulation_type.upper() == "REALTIME": if simulation_type.upper() == "REALTIME":
TimescaleInternalStorage.store_realtime_simulation( TimescaleInternalStorage.store_realtime_simulation(
node_result, link_result, modify_pattern_start_time node_result, link_result, modify_pattern_start_time
@@ -1240,6 +1241,8 @@ def run_simulation(
modify_pattern_start_time, modify_pattern_start_time,
num_periods_result, num_periods_result,
) )
endtime = time.time()
logging.info("store time: %f", endtime - starttime)
# 暂不需要再次存储 SCADA 模拟信息 # 暂不需要再次存储 SCADA 模拟信息
# TimescaleInternalStorage.fill_scheme_simulation_result_to_SCADA(scheme_Type=scheme_Type, scheme_Name=scheme_Name) # TimescaleInternalStorage.fill_scheme_simulation_result_to_SCADA(scheme_Type=scheme_Type, scheme_Name=scheme_Name)

View File

@@ -12,79 +12,75 @@ class RealtimeRepository:
@staticmethod @staticmethod
async def insert_links_batch(conn: AsyncConnection, data: List[dict]): async def insert_links_batch(conn: AsyncConnection, data: List[dict]):
"""Batch insert for realtime.link_simulation using INSERT for performance.""" """Batch insert for realtime.link_simulation using DELETE then COPY for performance."""
if not data: if not data:
return return
query = """
INSERT INTO realtime.link_simulation (time, id, flow, friction, headloss, quality, reaction, setting, status, velocity) # 假设同一批次的数据时间是相同的
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, id) DO UPDATE SET
flow = EXCLUDED.flow, # 使用事务确保原子性
friction = EXCLUDED.friction, async with conn.transaction():
headloss = EXCLUDED.headloss, async with conn.cursor() as cur:
quality = EXCLUDED.quality, # 1. 先删除该时间点的旧数据
reaction = EXCLUDED.reaction, await cur.execute(
setting = EXCLUDED.setting, "DELETE FROM realtime.link_simulation WHERE time = %s",
status = EXCLUDED.status, (target_time,)
velocity = EXCLUDED.velocity )
"""
async with conn.cursor() as cur: # 2. 使用 COPY 快速写入新数据
await cur.executemany( async with cur.copy(
query, "COPY realtime.link_simulation (time, id, flow, friction, headloss, quality, reaction, setting, status, velocity) FROM STDIN"
[ ) as copy:
( for item in data:
item["time"], await copy.write_row((
item["id"], item["time"],
item.get("flow"), item["id"],
item.get("friction"), item.get("flow"),
item.get("headloss"), item.get("friction"),
item.get("quality"), item.get("headloss"),
item.get("reaction"), item.get("quality"),
item.get("setting"), item.get("reaction"),
item.get("status"), item.get("setting"),
item.get("velocity"), item.get("status"),
) item.get("velocity"),
for item in data ))
],
)
@staticmethod @staticmethod
def insert_links_batch_sync(conn: Connection, data: List[dict]): def insert_links_batch_sync(conn: Connection, data: List[dict]):
"""Batch insert for realtime.link_simulation using INSERT for performance (sync version).""" """Batch insert for realtime.link_simulation using DELETE then COPY for performance (sync version)."""
if not data: if not data:
return return
query = """
INSERT INTO realtime.link_simulation (time, id, flow, friction, headloss, quality, reaction, setting, status, velocity) # 假设同一批次的数据时间是相同的
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, id) DO UPDATE SET
flow = EXCLUDED.flow, # 使用事务确保原子性
friction = EXCLUDED.friction, with conn.transaction():
headloss = EXCLUDED.headloss, with conn.cursor() as cur:
quality = EXCLUDED.quality, # 1. 先删除该时间点的旧数据
reaction = EXCLUDED.reaction, cur.execute(
setting = EXCLUDED.setting, "DELETE FROM realtime.link_simulation WHERE time = %s",
status = EXCLUDED.status, (target_time,)
velocity = EXCLUDED.velocity )
"""
with conn.cursor() as cur: # 2. 使用 COPY 快速写入新数据
cur.executemany( with cur.copy(
query, "COPY realtime.link_simulation (time, id, flow, friction, headloss, quality, reaction, setting, status, velocity) FROM STDIN"
[ ) as copy:
( for item in data:
item["time"], copy.write_row((
item["id"], item["time"],
item.get("flow"), item["id"],
item.get("friction"), item.get("flow"),
item.get("headloss"), item.get("friction"),
item.get("quality"), item.get("headloss"),
item.get("reaction"), item.get("quality"),
item.get("setting"), item.get("reaction"),
item.get("status"), item.get("setting"),
item.get("velocity"), item.get("status"),
) item.get("velocity"),
for item in data ))
],
)
@staticmethod @staticmethod
async def get_link_by_time_range( async def get_link_by_time_range(
@@ -213,59 +209,63 @@ class RealtimeRepository:
async def insert_nodes_batch(conn: AsyncConnection, data: List[dict]): async def insert_nodes_batch(conn: AsyncConnection, data: List[dict]):
if not data: if not data:
return return
query = """
INSERT INTO realtime.node_simulation (time, id, actual_demand, total_head, pressure, quality) # 假设同一批次的数据时间是相同的
VALUES (%s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, id) DO UPDATE SET
actual_demand = EXCLUDED.actual_demand, # 使用事务确保原子性
total_head = EXCLUDED.total_head, async with conn.transaction():
pressure = EXCLUDED.pressure, async with conn.cursor() as cur:
quality = EXCLUDED.quality # 1. 先删除该时间点的旧数据
""" await cur.execute(
async with conn.cursor() as cur: "DELETE FROM realtime.node_simulation WHERE time = %s",
await cur.executemany( (target_time,)
query, )
[
( # 2. 使用 COPY 快速写入新数据
item["time"], async with cur.copy(
item["id"], "COPY realtime.node_simulation (time, id, actual_demand, total_head, pressure, quality) FROM STDIN"
item.get("actual_demand"), ) as copy:
item.get("total_head"), for item in data:
item.get("pressure"), await copy.write_row((
item.get("quality"), item["time"],
) item["id"],
for item in data item.get("actual_demand"),
], item.get("total_head"),
) item.get("pressure"),
item.get("quality"),
))
@staticmethod @staticmethod
def insert_nodes_batch_sync(conn: Connection, data: List[dict]): def insert_nodes_batch_sync(conn: Connection, data: List[dict]):
if not data: if not data:
return return
query = """
INSERT INTO realtime.node_simulation (time, id, actual_demand, total_head, pressure, quality) # 假设同一批次的数据时间是相同的
VALUES (%s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, id) DO UPDATE SET
actual_demand = EXCLUDED.actual_demand, # 使用事务确保原子性
total_head = EXCLUDED.total_head, with conn.transaction():
pressure = EXCLUDED.pressure, with conn.cursor() as cur:
quality = EXCLUDED.quality # 1. 先删除该时间点的旧数据
""" cur.execute(
with conn.cursor() as cur: "DELETE FROM realtime.node_simulation WHERE time = %s",
cur.executemany( (target_time,)
query, )
[
( # 2. 使用 COPY 快速写入新数据
item["time"], with cur.copy(
item["id"], "COPY realtime.node_simulation (time, id, actual_demand, total_head, pressure, quality) FROM STDIN"
item.get("actual_demand"), ) as copy:
item.get("total_head"), for item in data:
item.get("pressure"), copy.write_row((
item.get("quality"), item["time"],
) item["id"],
for item in data item.get("actual_demand"),
], item.get("total_head"),
) item.get("pressure"),
item.get("quality"),
))
@staticmethod @staticmethod
async def get_node_by_time_range( async def get_node_by_time_range(

View File

@@ -13,83 +13,87 @@ class SchemeRepository:
@staticmethod @staticmethod
async def insert_links_batch(conn: AsyncConnection, data: List[dict]): async def insert_links_batch(conn: AsyncConnection, data: List[dict]):
"""Batch insert for scheme.link_simulation using INSERT for performance.""" """Batch insert for scheme.link_simulation using DELETE then COPY for performance."""
if not data: if not data:
return return
query = """
INSERT INTO scheme.link_simulation (time, scheme_type, scheme_name, id, flow, friction, headloss, quality, reaction, setting, status, velocity) # 假设同一批次的数据时间、scheme_typescheme_name 是相同的
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, scheme_type, scheme_name, id) DO UPDATE SET target_scheme_type = data[0]["scheme_type"]
flow = EXCLUDED.flow, target_scheme_name = data[0]["scheme_name"]
friction = EXCLUDED.friction,
headloss = EXCLUDED.headloss, # 使用事务确保原子性
quality = EXCLUDED.quality, async with conn.transaction():
reaction = EXCLUDED.reaction, async with conn.cursor() as cur:
setting = EXCLUDED.setting, # 1. 先删除该时间点、scheme_type、scheme_name 的旧数据
status = EXCLUDED.status, await cur.execute(
velocity = EXCLUDED.velocity "DELETE FROM scheme.link_simulation WHERE time = %s AND scheme_type = %s AND scheme_name = %s",
""" (target_time, target_scheme_type, target_scheme_name),
async with conn.cursor() as cur: )
await cur.executemany(
query, # 2. 使用 COPY 快速写入新数据
[ async with cur.copy(
( "COPY scheme.link_simulation (time, scheme_type, scheme_name, id, flow, friction, headloss, quality, reaction, setting, status, velocity) FROM STDIN"
item["time"], ) as copy:
item["scheme_type"], for item in data:
item["scheme_name"], await copy.write_row(
item["id"], (
item.get("flow"), item["time"],
item.get("friction"), item["scheme_type"],
item.get("headloss"), item["scheme_name"],
item.get("quality"), item["id"],
item.get("reaction"), item.get("flow"),
item.get("setting"), item.get("friction"),
item.get("status"), item.get("headloss"),
item.get("velocity"), item.get("quality"),
) item.get("reaction"),
for item in data item.get("setting"),
], item.get("status"),
) item.get("velocity"),
)
)
@staticmethod @staticmethod
def insert_links_batch_sync(conn: Connection, data: List[dict]): def insert_links_batch_sync(conn: Connection, data: List[dict]):
"""Batch insert for scheme.link_simulation using INSERT for performance (sync version).""" """Batch insert for scheme.link_simulation using DELETE then COPY for performance (sync version)."""
if not data: if not data:
return return
query = """
INSERT INTO scheme.link_simulation (time, scheme_type, scheme_name, id, flow, friction, headloss, quality, reaction, setting, status, velocity) # 假设同一批次的数据时间、scheme_typescheme_name 是相同的
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, scheme_type, scheme_name, id) DO UPDATE SET target_scheme_type = data[0]["scheme_type"]
flow = EXCLUDED.flow, target_scheme_name = data[0]["scheme_name"]
friction = EXCLUDED.friction,
headloss = EXCLUDED.headloss, # 使用事务确保原子性
quality = EXCLUDED.quality, with conn.transaction():
reaction = EXCLUDED.reaction, with conn.cursor() as cur:
setting = EXCLUDED.setting, # 1. 先删除该时间点、scheme_type、scheme_name 的旧数据
status = EXCLUDED.status, cur.execute(
velocity = EXCLUDED.velocity "DELETE FROM scheme.link_simulation WHERE time = %s AND scheme_type = %s AND scheme_name = %s",
""" (target_time, target_scheme_type, target_scheme_name),
with conn.cursor() as cur: )
cur.executemany(
query, # 2. 使用 COPY 快速写入新数据
[ with cur.copy(
( "COPY scheme.link_simulation (time, scheme_type, scheme_name, id, flow, friction, headloss, quality, reaction, setting, status, velocity) FROM STDIN"
item["time"], ) as copy:
item["scheme_type"], for item in data:
item["scheme_name"], copy.write_row(
item["id"], (
item.get("flow"), item["time"],
item.get("friction"), item["scheme_type"],
item.get("headloss"), item["scheme_name"],
item.get("quality"), item["id"],
item.get("reaction"), item.get("flow"),
item.get("setting"), item.get("friction"),
item.get("status"), item.get("headloss"),
item.get("velocity"), item.get("quality"),
) item.get("reaction"),
for item in data item.get("setting"),
], item.get("status"),
) item.get("velocity"),
)
)
@staticmethod @staticmethod
async def get_link_by_scheme_and_time_range( async def get_link_by_scheme_and_time_range(
@@ -239,63 +243,75 @@ class SchemeRepository:
async def insert_nodes_batch(conn: AsyncConnection, data: List[dict]): async def insert_nodes_batch(conn: AsyncConnection, data: List[dict]):
if not data: if not data:
return return
query = """
INSERT INTO scheme.node_simulation (time, scheme_type, scheme_name, id, actual_demand, total_head, pressure, quality) # 假设同一批次的数据时间、scheme_typescheme_name 是相同的
VALUES (%s, %s, %s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, scheme_type, scheme_name, id) DO UPDATE SET target_scheme_type = data[0]["scheme_type"]
actual_demand = EXCLUDED.actual_demand, target_scheme_name = data[0]["scheme_name"]
total_head = EXCLUDED.total_head,
pressure = EXCLUDED.pressure, # 使用事务确保原子性
quality = EXCLUDED.quality async with conn.transaction():
""" async with conn.cursor() as cur:
async with conn.cursor() as cur: # 1. 先删除该时间点、scheme_type、scheme_name 的旧数据
await cur.executemany( await cur.execute(
query, "DELETE FROM scheme.node_simulation WHERE time = %s AND scheme_type = %s AND scheme_name = %s",
[ (target_time, target_scheme_type, target_scheme_name),
( )
item["time"],
item["scheme_type"], # 2. 使用 COPY 快速写入新数据
item["scheme_name"], async with cur.copy(
item["id"], "COPY scheme.node_simulation (time, scheme_type, scheme_name, id, actual_demand, total_head, pressure, quality) FROM STDIN"
item.get("actual_demand"), ) as copy:
item.get("total_head"), for item in data:
item.get("pressure"), await copy.write_row(
item.get("quality"), (
) item["time"],
for item in data item["scheme_type"],
], item["scheme_name"],
) item["id"],
item.get("actual_demand"),
item.get("total_head"),
item.get("pressure"),
item.get("quality"),
)
)
@staticmethod @staticmethod
def insert_nodes_batch_sync(conn: Connection, data: List[dict]): def insert_nodes_batch_sync(conn: Connection, data: List[dict]):
if not data: if not data:
return return
query = """
INSERT INTO scheme.node_simulation (time, scheme_type, scheme_name, id, actual_demand, total_head, pressure, quality) # 假设同一批次的数据时间、scheme_typescheme_name 是相同的
VALUES (%s, %s, %s, %s, %s, %s, %s, %s) target_time = data[0]["time"]
ON CONFLICT (time, scheme_type, scheme_name, id) DO UPDATE SET target_scheme_type = data[0]["scheme_type"]
actual_demand = EXCLUDED.actual_demand, target_scheme_name = data[0]["scheme_name"]
total_head = EXCLUDED.total_head,
pressure = EXCLUDED.pressure, # 使用事务确保原子性
quality = EXCLUDED.quality with conn.transaction():
""" with conn.cursor() as cur:
with conn.cursor() as cur: # 1. 先删除该时间点、scheme_type、scheme_name 的旧数据
cur.executemany( cur.execute(
query, "DELETE FROM scheme.node_simulation WHERE time = %s AND scheme_type = %s AND scheme_name = %s",
[ (target_time, target_scheme_type, target_scheme_name),
( )
item["time"],
item["scheme_type"], # 2. 使用 COPY 快速写入新数据
item["scheme_name"], with cur.copy(
item["id"], "COPY scheme.node_simulation (time, scheme_type, scheme_name, id, actual_demand, total_head, pressure, quality) FROM STDIN"
item.get("actual_demand"), ) as copy:
item.get("total_head"), for item in data:
item.get("pressure"), copy.write_row(
item.get("quality"), (
) item["time"],
for item in data item["scheme_type"],
], item["scheme_name"],
) item["id"],
item.get("actual_demand"),
item.get("total_head"),
item.get("pressure"),
item.get("quality"),
)
)
@staticmethod @staticmethod
async def get_node_by_scheme_and_time_range( async def get_node_by_scheme_and_time_range(