fix bug and refine ,end of 2025

This commit is contained in:
xinzish
2025-12-31 16:11:28 +08:00
parent 38fb35a333
commit 32bbe3ddcd
23 changed files with 3180 additions and 68 deletions

View File

@@ -1,6 +1,6 @@
from .project import list_project, have_project, create_project, delete_project, clean_project
from .project import is_project_open, open_project, close_project
from .project import copy_project
from .project_backup import list_project, have_project, create_project, delete_project, clean_project
from .project_backup import is_project_open, open_project, close_project
from .project_backup import copy_project
#DingZQ, 2024-12-28, convert inp v3 to v2
from .inp_in import read_inp, import_inp, convert_inp_v3_to_v2

View File

@@ -1,6 +1,6 @@
import datetime
import os
from .project import *
from .project_backup import *
from .database import ChangeSet, write
from .sections import *
from .s0_base import get_region_type

View File

@@ -1,5 +1,5 @@
import os
from .project import *
from .project_backup import *
from .database import ChangeSet
from .sections import *
from .s1_title import inp_out_title

36
api/postgresql_info.py Normal file
View File

@@ -0,0 +1,36 @@
from dotenv import load_dotenv
import os
load_dotenv()
pg_name = os.getenv("DB_NAME")
pg_host = os.getenv("DB_HOST")
pg_port = os.getenv("DB_PORT")
pg_user = os.getenv("DB_USER")
pg_password = os.getenv("DB_PASSWORD")
def get_pgconn_string(
db_name=pg_name,
db_host=pg_host,
db_port=pg_port,
db_user=pg_user,
db_password=pg_password,
):
"""返回 PostgreSQL 连接字符串"""
return f"dbname={db_name} host={db_host} port={db_port} user={db_user} password={db_password}"
def get_pg_config():
"""返回 PostgreSQL 配置变量的字典"""
return {
"name": pg_name,
"host": pg_host,
"port": pg_port,
"user": pg_user,
}
def get_pg_password():
"""返回密码(谨慎使用)"""
return pg_password

View File

@@ -2,142 +2,157 @@ import os
import psycopg as pg
from psycopg.rows import dict_row
from .connection import g_conn_dict as conn
from .postgresql_info import get_pgconn_string, get_pg_config, get_pg_password
# no undo/redo
_server_databases = ['template0', 'template1', 'postgres', 'project']
_server_databases = ["template0", "template1", "postgres", "project"]
def list_project() -> list[str]:
ps = []
with pg.connect(conninfo="dbname=postgres host=127.0.0.1", autocommit=True) as conn:
with pg.connect(conninfo=get_pgconn_string(), autocommit=True) as conn:
with conn.cursor(row_factory=dict_row) as cur:
for p in cur.execute(f"select datname from pg_database where datname <> 'postgres' and datname <> 'template0' and datname <> 'template1' and datname <> 'project'"):
ps.append(p['datname'])
for p in cur.execute(
f"select datname from pg_database where datname <> 'postgres' and datname <> 'template0' and datname <> 'template1' and datname <> 'project'"
):
ps.append(p["datname"])
return ps
def have_project(name: str) -> bool:
with pg.connect(conninfo="dbname=postgres host=127.0.0.1", autocommit=True) as conn:
with pg.connect(conninfo=get_pgconn_string(), autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute(f"select * from pg_database where datname = '{name}'")
return cur.rowcount > 0
def copy_project(source: str, new: str) -> None:
with pg.connect(conninfo="dbname=postgres host=127.0.0.1", autocommit=True) as conn:
with pg.connect(conninfo=get_pgconn_string(), autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute(f'create database "{new}" with template = {source}')
# 2025-02-07, WMH
# copyproject会把pg中operation这个表的全部内容也加进去我们实际项目运行一周后operation这个表会变得特别大导致CopyProject花费的时间很长CopyProjectEx把operation的在复制时没有一块复制过去节省时间
class CopyProjectEx:
@ staticmethod
@staticmethod
def create_database(connection, new_db):
with connection.cursor() as cursor:
cursor.execute(f'create database "{new_db}"')
connection.commit()
@staticmethod
def execute_pg_dump(hostname, source_db, exclude_table_list):
dump_command_structure = (
f'pg_dump -h {hostname} -F c -s -f source_db_structure.dump {source_db}'
)
def execute_pg_dump(source_db, exclude_table_list):
os.environ["PGPASSWORD"] = get_pg_password() # 设置密码环境变量
pg_config = get_pg_config()
host = pg_config["host"]
port = pg_config["port"]
user = pg_config["user"]
dump_command_structure = f"pg_dump -h {host} -p {port} -U {user} -F c -s -f source_db_structure.dump {source_db}"
os.system(dump_command_structure)
if exclude_table_list is not None:
exclude_table = ' '.join(['-T {}'.format(i) for i in exclude_table_list])
dump_command_db = (
f'pg_dump -h {hostname} -F c -a {exclude_table} -f source_db.dump {source_db}'
)
exclude_table = " ".join(["-T {}".format(i) for i in exclude_table_list])
dump_command_db = f"pg_dump -h {host} -p {port} -U {user} -F c -a {exclude_table} -f source_db.dump {source_db}"
else:
dump_command_db = (
f'pg_dump -h {hostname} -F c -a -f source_db.dump {source_db}'
)
dump_command_db = f"pg_dump -h {host} -p {port} -U {user} -F c -a -f source_db.dump {source_db}"
os.system(dump_command_db)
@staticmethod
def execute_pg_restore(hostname, new_db):
restore_command_structure = (
f'pg_restore -h {hostname} -d {new_db} source_db_structure.dump'
)
def execute_pg_restore(new_db):
os.environ["PGPASSWORD"] = get_pg_password() # 设置密码环境变量
pg_config = get_pg_config()
host = pg_config["host"]
port = pg_config["port"]
user = pg_config["user"]
restore_command_structure = f"pg_restore -h {host} -p {port} -U {user} -d {new_db} source_db_structure.dump"
os.system(restore_command_structure)
restore_command_db = (
f'pg_restore -h {hostname} -d {new_db} source_db.dump'
f"pg_restore -h {host} -p {port} -U {user} -d {new_db} source_db.dump"
)
os.system(restore_command_db)
@staticmethod
def init_operation_table(connection, excluded_table):
with connection.cursor() as cursor:
if 'operation' in excluded_table:
insert_query \
= "insert into operation (id, redo, undo, redo_cs, undo_cs) values (0, '', '', '', '')"
if "operation" in excluded_table:
insert_query = "insert into operation (id, redo, undo, redo_cs, undo_cs) values (0, '', '', '', '')"
cursor.execute(insert_query)
if 'current_operation' in excluded_table:
insert_query \
= "insert into current_operation (id) values (0)"
if "current_operation" in excluded_table:
insert_query = "insert into current_operation (id) values (0)"
cursor.execute(insert_query)
if 'restore_operation' in excluded_table:
insert_query \
= "insert into restore_operation (id) values (0)"
if "restore_operation" in excluded_table:
insert_query = "insert into restore_operation (id) values (0)"
cursor.execute(insert_query)
if 'batch_operation' in excluded_table:
insert_query \
= "insert into batch_operation (id, redo, undo, redo_cs, undo_cs) values (0, '', '', '', '')"
if "batch_operation" in excluded_table:
insert_query = "insert into batch_operation (id, redo, undo, redo_cs, undo_cs) values (0, '', '', '', '')"
cursor.execute(insert_query)
if 'operation_table' in excluded_table:
insert_query \
= "insert into operation_table (option) values ('operation')"
if "operation_table" in excluded_table:
insert_query = (
"insert into operation_table (option) values ('operation')"
)
cursor.execute(insert_query)
connection.commit()
def __call__(self, source: str, new: str, excluded_table: [str] = None) -> None:
connection = pg.connect(conninfo="dbname=postgres host=127.0.0.1", autocommit=True)
def __call__(self, source: str, new_db: str, excluded_tables: [str] = None) -> None:
source_connection = pg.connect(conninfo=get_pgconn_string(), autocommit=True)
self.create_database(connection, new)
self.execute_pg_dump('127.0.0.1', source, excluded_table)
self.execute_pg_restore('127.0.0.1', new)
self.create_database(source_connection, new_db)
connection = pg.connect(conninfo=f"dbname='{new}' host=127.0.0.1", autocommit=True)
self.init_operation_table(connection, excluded_table)
self.execute_pg_dump(source, excluded_tables)
self.execute_pg_restore(new_db)
source_connection.close()
new_db_connection = pg.connect(
conninfo=get_pgconn_string(db_name=new_db), autocommit=True
)
self.init_operation_table(new_db_connection, excluded_tables)
new_db_connection.close()
def create_project(name: str) -> None:
return copy_project('project', name)
return copy_project("project", name)
def delete_project(name: str) -> None:
with pg.connect(conninfo="dbname=postgres host=127.0.0.1", autocommit=True) as conn:
with pg.connect(conninfo=get_pgconn_string(), autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute(f"select pg_terminate_backend(pid) from pg_stat_activity where datname = '{name}'")
cur.execute(
f"select pg_terminate_backend(pid) from pg_stat_activity where datname = '{name}'"
)
cur.execute(f'drop database "{name}"')
def clean_project(excluded: list[str] = []) -> None:
projects = list_project()
with pg.connect(conninfo="dbname=postgres host=127.0.0.1", autocommit=True) as conn:
with pg.connect(conninfo=get_pgconn_string(), autocommit=True) as conn:
with conn.cursor(row_factory=dict_row) as cur:
row = cur.execute(f"select current_database()").fetchone()
if row != None:
current_db = row['current_database']
current_db = row["current_database"]
if current_db in projects:
projects.remove(current_db)
for project in projects:
if project in _server_databases or project in excluded:
continue
cur.execute(f"select pg_terminate_backend(pid) from pg_stat_activity where datname = '{project}'")
cur.execute(
f"select pg_terminate_backend(pid) from pg_stat_activity where datname = '{project}'"
)
cur.execute(f'drop database "{project}"')
def open_project(name: str) -> None:
if name not in conn:
conn[name] = pg.connect(conninfo=f"dbname={name} host=127.0.0.1", autocommit=True)
conn[name] = pg.connect(
conninfo=get_pgconn_string(db_name=name), autocommit=True
)
def is_project_open(name: str) -> bool:
@@ -148,4 +163,3 @@ def close_project(name: str) -> None:
if name in conn:
conn[name].close()
del conn[name]

152
api/project_backup.py Normal file
View File

@@ -0,0 +1,152 @@
import os
import psycopg as pg
from psycopg.rows import dict_row
from .connection import g_conn_dict as conn
from .postgresql_info import get_pgconn_string
# no undo/redo
_server_databases = ['template0', 'template1', 'postgres', 'project']
def list_project() -> list[str]:
ps = []
with pg.connect(conninfo=get_pgconn_string(), autocommit=True) as conn:
with conn.cursor(row_factory=dict_row) as cur:
for p in cur.execute(f"select datname from pg_database where datname <> 'postgres' and datname <> 'template0' and datname <> 'template1' and datname <> 'project'"):
ps.append(p['datname'])
return ps
def have_project(name: str) -> bool:
with pg.connect(conninfo=get_pgconn_string(db_name=name), autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute(f"select * from pg_database where datname = '{name}'")
return cur.rowcount > 0
def copy_project(source: str, new: str) -> None:
with pg.connect(conninfo=get_pgconn_string(), autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute(f'create database "{new}" with template = {source}')
# 2025-02-07, WMH
# copyproject会把pg中operation这个表的全部内容也加进去我们实际项目运行一周后operation这个表会变得特别大导致CopyProject花费的时间很长CopyProjectEx把operation的在复制时没有一块复制过去节省时间
class CopyProjectEx:
@ staticmethod
def create_database(connection, new_db):
with connection.cursor() as cursor:
cursor.execute(f'create database "{new_db}"')
connection.commit()
@staticmethod
def execute_pg_dump(hostname, source_db, exclude_table_list):
dump_command_structure = (
f'pg_dump -h {hostname} -F c -s -f source_db_structure.dump {source_db}'
)
os.system(dump_command_structure)
if exclude_table_list is not None:
exclude_table = ' '.join(['-T {}'.format(i) for i in exclude_table_list])
dump_command_db = (
f'pg_dump -h {hostname} -F c -a {exclude_table} -f source_db.dump {source_db}'
)
else:
dump_command_db = (
f'pg_dump -h {hostname} -F c -a -f source_db.dump {source_db}'
)
os.system(dump_command_db)
@staticmethod
def execute_pg_restore(hostname, new_db):
restore_command_structure = (
f'pg_restore -h {hostname} -d {new_db} source_db_structure.dump'
)
os.system(restore_command_structure)
restore_command_db = (
f'pg_restore -h {hostname} -d {new_db} source_db.dump'
)
os.system(restore_command_db)
@staticmethod
def init_operation_table(connection, excluded_table):
with connection.cursor() as cursor:
if 'operation' in excluded_table:
insert_query \
= "insert into operation (id, redo, undo, redo_cs, undo_cs) values (0, '', '', '', '')"
cursor.execute(insert_query)
if 'current_operation' in excluded_table:
insert_query \
= "insert into current_operation (id) values (0)"
cursor.execute(insert_query)
if 'restore_operation' in excluded_table:
insert_query \
= "insert into restore_operation (id) values (0)"
cursor.execute(insert_query)
if 'batch_operation' in excluded_table:
insert_query \
= "insert into batch_operation (id, redo, undo, redo_cs, undo_cs) values (0, '', '', '', '')"
cursor.execute(insert_query)
if 'operation_table' in excluded_table:
insert_query \
= "insert into operation_table (option) values ('operation')"
cursor.execute(insert_query)
connection.commit()
def __call__(self, source: str, new: str, excluded_table: [str] = None) -> None:
connection = pg.connect(conninfo=get_pgconn_string(), autocommit=True)
self.create_database(connection, new)
self.execute_pg_dump('127.0.0.1', source, excluded_table)
self.execute_pg_restore('127.0.0.1', new)
connection = pg.connect(conninfo=get_pgconn_string(db_name=new), autocommit=True)
self.init_operation_table(connection, excluded_table)
def create_project(name: str) -> None:
return copy_project('project', name)
def delete_project(name: str) -> None:
with pg.connect(conninfo=get_pgconn_string(), autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute(f"select pg_terminate_backend(pid) from pg_stat_activity where datname = '{name}'")
cur.execute(f'drop database "{name}"')
def clean_project(excluded: list[str] = []) -> None:
projects = list_project()
with pg.connect(conninfo=get_pgconn_string(), autocommit=True) as conn:
with conn.cursor(row_factory=dict_row) as cur:
row = cur.execute(f"select current_database()").fetchone()
if row != None:
current_db = row['current_database']
if current_db in projects:
projects.remove(current_db)
for project in projects:
if project in _server_databases or project in excluded:
continue
cur.execute(f"select pg_terminate_backend(pid) from pg_stat_activity where datname = '{project}'")
cur.execute(f'drop database "{project}"')
def open_project(name: str) -> None:
if name not in conn:
conn[name] = pg.connect(conninfo=get_pgconn_string(db_name=name), autocommit=True)
def is_project_open(name: str) -> bool:
return name in conn
def close_project(name: str) -> None:
if name in conn:
conn[name].close()
del conn[name]

View File

@@ -1,6 +1,6 @@
import os
import ctypes
from .project import have_project
from .project_backup import have_project
from .inp_out import dump_inp
def calculate_service_area(name: str) -> list[dict[str, list[str]]]: