暂存文件的引用修复

This commit is contained in:
2026-01-22 17:00:10 +08:00
parent 2668faf8ad
commit 0d139f96f8
13 changed files with 65 additions and 38 deletions

View File

@@ -8,7 +8,7 @@ import json
import pytz
import requests
import time
import project_info
import app.services.project_info as project_info
url_path = 'http://10.101.15.16:9000/loong' # 内网
# url_path = 'http://183.64.62.100:9057/loong' # 外网

View File

@@ -19,7 +19,7 @@ from spopt.region import Skater
from shapely.geometry import Point
import geopandas as gpd
from sklearn.metrics import pairwise_distances
import project_info
import app.services.project_info as project_info
# 2025/03/12
# Step1: 获取节点坐标

View File

@@ -12,7 +12,7 @@ from wntr.epanet.toolkit import EpanetException
from numpy.linalg import slogdet
import random
from app.services.tjnetwork import *
import project_info
import app.services.project_info as project_info
# 2025/03/12
# Step1: 获取节点坐标

View File

@@ -1,9 +1,9 @@
import os
from app.services.tjnetwork import *
from app.native.api.project import copy_project
from run_simulation import run_simulation_ex, from_clock_to_seconds_2
from app.algorithms.api_ex.run_simulation import run_simulation_ex, from_clock_to_seconds_2
from math import sqrt, pi
from epanet.epanet import Output
from app.services.epanet.epanet import Output
import json
from datetime import datetime
import time
@@ -17,11 +17,11 @@ import app.services.simulation as simulation
import geopandas as gpd
from sqlalchemy import create_engine
import ast
import sensitivity
import app.services.project_info as project_info
import app.algorithms.api_ex.kmeans_sensor
import app.algorithms.api_ex.Fdataclean
import app.algorithms.api_ex.Pdataclean
import app.algorithms.api_ex.kmeans_sensor as kmeans_sensor
import app.algorithms.api_ex.Fdataclean as Fdataclean
import app.algorithms.api_ex.Pdataclean as Pdataclean
import app.algorithms.api_ex.sensitivity as sensitivity
from app.native.api.postgresql_info import get_pgconn_string
@@ -1425,7 +1425,7 @@ def pressure_sensor_placement_kmeans(
# dump_inp
inp_name = f"./db_inp/{name}.db.inp"
dump_inp(name, inp_name, "2")
sensor_location = api_ex.kmeans_sensor.kmeans_sensor_placement(
sensor_location = kmeans_sensor.kmeans_sensor_placement(
name=name, sensor_num=sensor_number, min_diameter=min_diameter
)
try:
@@ -1475,7 +1475,7 @@ def flow_data_clean(input_csv_file: str) -> str:
if not os.path.exists(input_csv_path):
raise FileNotFoundError(f"指定的文件不存在: {input_csv_path}")
# 调用 Fdataclean.clean_flow_data_kf 函数进行数据清洗
out_xlsx_path = api_ex.Fdataclean.clean_flow_data_kf(input_csv_path)
out_xlsx_path = Fdataclean.clean_flow_data_kf(input_csv_path)
print("清洗后的数据已保存到:", out_xlsx_path)
@@ -1502,7 +1502,7 @@ def pressure_data_clean(input_csv_file: str) -> str:
if not os.path.exists(input_csv_path):
raise FileNotFoundError(f"指定的文件不存在: {input_csv_path}")
# 调用 Fdataclean.clean_flow_data_kf 函数进行数据清洗
out_xlsx_path = api_ex.Pdataclean.clean_pressure_data_km(input_csv_path)
out_xlsx_path = Pdataclean.clean_pressure_data_km(input_csv_path)
print("清洗后的数据已保存到:", out_xlsx_path)

View File

@@ -5,7 +5,7 @@ import msgpack
import datetime
from app.services.tjnetwork import *
from app.infra.db.influxdb import api as influxdb_api
from app.infra.db import redis_client # Assuming redis_client is exposed or needs initialization
from app.infra.cache.redis_client import redis_client # Assuming redis_client is exposed or needs initialization
# If redis_client isn't in app.infra.db, we might need to initialize it here or import from main if it was global.
# Given the instructions, we'll assume it needs to be imported or initialized.
# For now, let's copy the redis initialization logic if it's not centralized, or better,

View File

@@ -14,18 +14,19 @@ from app.algorithms.online_Analysis import (
flushing_analysis,
contaminant_simulation,
age_analysis,
scheduling_analysis,
# scheduling_analysis,
pressure_regulation,
project_management,
daily_scheduling_analysis,
# daily_scheduling_analysis,
network_update,
pump_failure,
# pump_failure,
pressure_sensor_placement_sensitivity,
pressure_sensor_placement_kmeans,
)
router = APIRouter()
# 必须用这个PlainTextResponse不然每个key都有引号
@router.get("/runproject/", response_class=PlainTextResponse)
async def run_project_endpoint(network: str) -> str:
@@ -78,51 +79,73 @@ async def run_inp_endpoint(network: str) -> str:
async def dump_output_endpoint(output: str) -> str:
return dump_output(output)
# Analysis Endpoints
@router.get("/burstanalysis/")
async def burst_analysis_endpoint(network: str, pipe_id: str, start_time: str, end_time: str, burst_flow: float):
async def burst_analysis_endpoint(
network: str, pipe_id: str, start_time: str, end_time: str, burst_flow: float
):
return burst_analysis(network, pipe_id, start_time, end_time, burst_flow)
@router.get("/valvecloseanalysis/")
async def valve_close_analysis_endpoint(network: str, valve_id: str, start_time: str, end_time: str):
async def valve_close_analysis_endpoint(
network: str, valve_id: str, start_time: str, end_time: str
):
return valve_close_analysis(network, valve_id, start_time, end_time)
@router.get("/flushinganalysis/")
async def flushing_analysis_endpoint(network: str, pipe_id: str, start_time: str, duration: float, flow: float):
async def flushing_analysis_endpoint(
network: str, pipe_id: str, start_time: str, duration: float, flow: float
):
return flushing_analysis(network, pipe_id, start_time, duration, flow)
@router.get("/contaminantsimulation/")
async def contaminant_simulation_endpoint(network: str, node_id: str, start_time: str, duration: float, concentration: float):
async def contaminant_simulation_endpoint(
network: str, node_id: str, start_time: str, duration: float, concentration: float
):
return contaminant_simulation(network, node_id, start_time, duration, concentration)
@router.get("/ageanalysis/")
async def age_analysis_endpoint(network: str):
return age_analysis(network)
@router.get("/schedulinganalysis/")
async def scheduling_analysis_endpoint(network: str):
return scheduling_analysis(network)
@router.get("/pressureregulation/")
async def pressure_regulation_endpoint(network: str, target_node: str, target_pressure: float):
async def pressure_regulation_endpoint(
network: str, target_node: str, target_pressure: float
):
return pressure_regulation(network, target_node, target_pressure)
@router.get("/projectmanagement/")
async def project_management_endpoint(network: str):
return project_management(network)
@router.get("/dailyschedulinganalysis/")
async def daily_scheduling_analysis_endpoint(network: str):
return daily_scheduling_analysis(network)
@router.get("/networkupdate/")
async def network_update_endpoint(network: str):
return network_update(network)
@router.get("/pumpfailure/")
async def pump_failure_endpoint(network: str, pump_id: str, time: str):
return pump_failure(network, pump_id, time)
@router.get("/pressuresensorplacementsensitivity/")
async def pressure_sensor_placement_sensitivity_endpoint(
name: str, scheme_name: str, sensor_number: int, min_diameter: int, username: str
@@ -131,6 +154,7 @@ async def pressure_sensor_placement_sensitivity_endpoint(
name, scheme_name, sensor_number, min_diameter, username
)
@router.get("/pressuresensorplacementkmeans/")
async def pressure_sensor_placement_kmeans_endpoint(
name: str, scheme_name: str, sensor_number: int, min_diameter: int, username: str

View File

@@ -2,7 +2,7 @@ import time
from typing import List, Optional
from fastapi.logger import logger
import postgresql_info
import app.native.api.postgresql_info as postgresql_info
import psycopg

View File

@@ -4,15 +4,15 @@ from datetime import datetime, timedelta
from psycopg import AsyncConnection
import pandas as pd
import numpy as np
from api_ex.Fdataclean import clean_flow_data_df_kf
from api_ex.Pdataclean import clean_pressure_data_df_km
from api_ex.pipeline_health_analyzer import PipelineHealthAnalyzer
from app.algorithms.api_ex.Fdataclean import clean_flow_data_df_kf
from app.algorithms.api_ex.Pdataclean import clean_pressure_data_df_km
from app.algorithms.api_ex.pipeline_health_analyzer import PipelineHealthAnalyzer
from postgresql.internal_queries import InternalQueries
from postgresql.scada_info import ScadaRepository as PostgreScadaRepository
from timescaledb.schemas.realtime import RealtimeRepository
from timescaledb.schemas.scheme import SchemeRepository
from timescaledb.schemas.scada import ScadaRepository
from app.infra.db.postgresql.internal_queries import InternalQueries
from app.infra.db.postgresql.scada_info import ScadaRepository as PostgreScadaRepository
from app.infra.db.timescaledb.schemas.realtime import RealtimeRepository
from app.infra.db.timescaledb.schemas.scheme import SchemeRepository
from app.infra.db.timescaledb.schemas.scada import ScadaRepository
class CompositeQueries:

View File

@@ -1,13 +1,13 @@
from typing import List
from fastapi.logger import logger
from timescaledb.schemas.scheme import SchemeRepository
from timescaledb.schemas.realtime import RealtimeRepository
import timescaledb.timescaledb_info as timescaledb_info
from datetime import datetime, timedelta
from timescaledb.schemas.scada import ScadaRepository
import psycopg
import time
from app.infra.db.timescaledb.schemas.scheme import SchemeRepository
from app.infra.db.timescaledb.schemas.realtime import RealtimeRepository
import app.infra.db.timescaledb.timescaledb_info as timescaledb_info
from app.infra.db.timescaledb.schemas.scada import ScadaRepository
class InternalStorage:

View File

@@ -8,7 +8,7 @@ from .schemas.realtime import RealtimeRepository
from .schemas.scheme import SchemeRepository
from .schemas.scada import ScadaRepository
from .composite_queries import CompositeQueries
from postgresql.database import get_database_instance as get_postgres_database_instance
from app.infra.db.postgresql.database import get_database_instance as get_postgres_database_instance
router = APIRouter(prefix="/timescaledb", tags=["TimescaleDB"])

View File

@@ -2,7 +2,7 @@ from typing import List, Any, Dict
from datetime import datetime, timedelta, timezone
from collections import defaultdict
from psycopg import AsyncConnection, Connection, sql
import globals
import app.services.globals as globals
# 定义UTC+8时区
UTC_8 = timezone(timedelta(hours=8))

View File

@@ -11,7 +11,7 @@ import pytz
import requests
import time
import shutil
from epanet.epanet import Output
from app.services.epanet.epanet import Output
from typing import Optional, Tuple
import app.infra.db.influxdb.api as influxdb_api
import typing

View File

@@ -1,7 +1,10 @@
import asyncio
import sys
import os
import uvicorn
# 将项目根目录添加到 python 路径
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
if __name__ == "__main__":
# Windows 设置事件循环策略
@@ -13,6 +16,6 @@ if __name__ == "__main__":
"app.main:app",
host="0.0.0.0",
port=8000,
workers=2, # 这里可以设置多进程
# workers=2, # 这里可以设置多进程
loop="asyncio",
)