212 lines
8.3 KiB
Python
212 lines
8.3 KiB
Python
from fastapi import APIRouter, Request
|
|
from typing import Any, List, Dict, Union
|
|
import redis
|
|
import msgpack
|
|
import datetime
|
|
from app.services.tjnetwork import *
|
|
from app.infra.db.influxdb import api as influxdb_api
|
|
from app.infra.cache.redis_client import redis_client # Assuming redis_client is exposed or needs initialization
|
|
# If redis_client isn't in app.infra.db, we might need to initialize it here or import from main if it was global.
|
|
# Given the instructions, we'll assume it needs to be imported or initialized.
|
|
# For now, let's copy the redis initialization logic if it's not centralized, or better,
|
|
# if main.py had it global, we should move it to a shared module.
|
|
# Checking imports suggests it might be ad-hoc in main.py.
|
|
# Let's import standard stuff first.
|
|
|
|
# To properly handle the redis client, we should probably have a dependency or a singleton.
|
|
# Since we are refactoring, let's assume there is a `app.core.cache` or similar,
|
|
# OR we will just duplicate the init for now if it's simple, or use a placeholder.
|
|
# Looking at main.py (from context), it used `redis_client` global.
|
|
# I will create a simple redis wrapper in this file or use a try-except block if it's not critical,
|
|
# but for a refactor, it's better to put it in `app/infra/db/redis_utils.py`?
|
|
# For now, let's include the imports and logic from main.py.
|
|
|
|
# Need to handle date encoding for msgpack
|
|
def decode_datetime(obj):
|
|
if "__datetime__" in obj:
|
|
obj = datetime.datetime.strptime(obj["as_str"], "%Y%m%dT%H:%M:%S.%f")
|
|
return obj
|
|
|
|
def encode_datetime(obj):
|
|
if isinstance(obj, datetime.datetime):
|
|
return {"__datetime__": True, "as_str": obj.strftime("%Y%m%dT%H:%M:%S.%f")}
|
|
return obj
|
|
|
|
# Mocking or getting redis client. ideally this should be in app.core.config or similar
|
|
# For this file, I will initialize it if it's not imported.
|
|
try:
|
|
# Attempt to connect to default redis for cache
|
|
redis_client = redis.Redis(host='localhost', port=6379, db=0)
|
|
except Exception:
|
|
redis_client = None
|
|
|
|
router = APIRouter()
|
|
|
|
############################################################
|
|
# pipe_risk_probability 41
|
|
############################################################
|
|
|
|
@router.get("/getpiperiskprobabilitynow/")
|
|
async def fastapi_get_pipe_risk_probability_now(
|
|
network: str, pipe_id: str
|
|
) -> dict[str, Any]:
|
|
return get_pipe_risk_probability_now(network, pipe_id)
|
|
|
|
@router.get("/getpiperiskprobability/")
|
|
async def fastapi_get_pipe_risk_probability(
|
|
network: str, pipe_id: str
|
|
) -> dict[str, Any]:
|
|
return get_pipe_risk_probability(network, pipe_id)
|
|
|
|
@router.get("/getpipesriskprobability/")
|
|
async def fastapi_get_pipes_risk_probability(
|
|
network: str, pipe_ids: str
|
|
) -> list[dict[str, Any]]:
|
|
pipeids = pipe_ids.split(",")
|
|
return get_pipes_risk_probability(network, pipeids)
|
|
|
|
@router.get("/getnetworkpiperiskprobabilitynow/")
|
|
async def fastapi_get_network_pipe_risk_probability_now(
|
|
network: str,
|
|
) -> list[dict[str, Any]]:
|
|
return get_network_pipe_risk_probability_now(network)
|
|
|
|
@router.get("/getpiperiskprobabilitygeometries/")
|
|
async def fastapi_get_pipe_risk_probability_geometries(network: str) -> dict[str, Any]:
|
|
return get_pipe_risk_probability_geometries(network)
|
|
|
|
############################################################
|
|
# InfluxDB / Real-time Data
|
|
############################################################
|
|
|
|
@router.get("/getrealtimedata/")
|
|
async def fastapi_get_realtimedata():
|
|
# Placeholder random data from original main.py
|
|
import random
|
|
data = [random.randint(0, 100) for _ in range(100)]
|
|
return data
|
|
|
|
@router.get("/getsimulationresult/")
|
|
async def fastapi_get_simulationresult():
|
|
import random
|
|
data = [random.randint(0, 100) for _ in range(100)]
|
|
return data
|
|
|
|
@router.get("/querynodelatestrecordbyid/")
|
|
async def fastapi_query_node_latest_record_by_id(id: str):
|
|
return influxdb_api.query_latest_record_by_ID(id, type="node")
|
|
|
|
@router.get("/querylinklatestrecordbyid/")
|
|
async def fastapi_query_link_latest_record_by_id(id: str):
|
|
return influxdb_api.query_latest_record_by_ID(id, type="link")
|
|
|
|
@router.get("/queryscadalatestrecordbyid/")
|
|
async def fastapi_query_scada_latest_record_by_id(id: str):
|
|
return influxdb_api.query_latest_record_by_ID(id, type="scada")
|
|
|
|
@router.get("/queryallrecordsbytime/")
|
|
async def fastapi_query_all_records_by_time(querytime: str) -> dict[str, list]:
|
|
results: tuple = influxdb_api.query_all_records_by_time(query_time=querytime)
|
|
return {"nodes": results[0], "links": results[1]}
|
|
|
|
@router.get("/queryallrecordsbytimeproperty/")
|
|
async def fastapi_query_all_record_by_time_property(
|
|
querytime: str, type: str, property: str, bucket: str = "realtime_simulation_result"
|
|
) -> dict[str, list]:
|
|
results: tuple = influxdb_api.query_all_record_by_time_property(
|
|
query_time=querytime, type=type, property=property, bucket=bucket
|
|
)
|
|
return {"results": results}
|
|
|
|
@router.get("/queryallschemerecordsbytimeproperty/")
|
|
async def fastapi_query_all_scheme_record_by_time_property(
|
|
querytime: str,
|
|
type: str,
|
|
property: str,
|
|
schemename: str,
|
|
bucket: str = "scheme_simulation_result",
|
|
) -> dict[str, list]:
|
|
results: list = influxdb_api.query_all_scheme_record_by_time_property(
|
|
query_time=querytime,
|
|
type=type,
|
|
property=property,
|
|
scheme_name=schemename,
|
|
bucket=bucket,
|
|
)
|
|
return {"results": results}
|
|
|
|
@router.get("/querysimulationrecordsbyidtime/")
|
|
async def fastapi_query_simulation_record_by_ids_time(
|
|
id: str, querytime: str, type: str, bucket: str = "realtime_simulation_result"
|
|
) -> dict[str, list]:
|
|
results: tuple = influxdb_api.query_simulation_result_by_ID_time(
|
|
ID=id, type=type, query_time=querytime, bucket=bucket
|
|
)
|
|
return {"results": results}
|
|
|
|
@router.get("/queryschemesimulationrecordsbyidtime/")
|
|
async def fastapi_query_scheme_simulation_record_by_ids_time(
|
|
scheme_name: str,
|
|
id: str,
|
|
querytime: str,
|
|
type: str,
|
|
bucket: str = "scheme_simulation_result",
|
|
) -> dict[str, list]:
|
|
results: tuple = influxdb_api.query_scheme_simulation_result_by_ID_time(
|
|
scheme_name=scheme_name, ID=id, type=type, query_time=querytime, bucket=bucket
|
|
)
|
|
return {"results": results}
|
|
|
|
@router.get("/queryallrecordsbydate/")
|
|
async def fastapi_query_all_records_by_date(querydate: str) -> dict:
|
|
# Logic copied from main.py regarding redis caching
|
|
# NOTE: time_api needs to be imported from services or where it resides.
|
|
# Based on main.py analysis, it seems `app.services.tjnetwork` imports `time_api`?
|
|
# Or it was imported in main.py. Let's assume we can get it or use standard datetime.
|
|
# If `time_api.is_today_or_future` is custom, we need to find it.
|
|
# For now, let's look for it or try to import it.
|
|
|
|
# Importing time_api assuming it's available as seen in main.py context
|
|
from app.services.tjnetwork import time_api
|
|
# (If this fails we might need to adjust import path)
|
|
|
|
is_today_or_future = time_api.is_today_or_future(querydate)
|
|
|
|
if not is_today_or_future and redis_client:
|
|
cache_key = f"queryallrecordsbydate_{querydate}"
|
|
data = redis_client.get(cache_key)
|
|
if data:
|
|
results = msgpack.unpackb(data, object_hook=decode_datetime)
|
|
return results
|
|
|
|
nodes_links: tuple = influxdb_api.query_all_records_by_date(query_date=querydate)
|
|
results = {"nodes": nodes_links[0], "links": nodes_links[1]}
|
|
|
|
if not is_today_or_future and redis_client:
|
|
redis_client.set(cache_key, msgpack.packb(results, default=encode_datetime))
|
|
|
|
return results
|
|
|
|
@router.get("/queryallrecordsbytimerange/")
|
|
async def fastapi_query_all_records_by_time_range(
|
|
starttime: str, endtime: str
|
|
) -> dict[str, list]:
|
|
from app.services.tjnetwork import time_api
|
|
|
|
if not time_api.is_today_or_future(starttime) and redis_client:
|
|
cache_key = f"queryallrecordsbytimerange_{starttime}_{endtime}"
|
|
data = redis_client.get(cache_key)
|
|
if data:
|
|
loaded_dict = msgpack.unpackb(data, object_hook=decode_datetime)
|
|
return loaded_dict
|
|
|
|
nodes_links: tuple = influxdb_api.query_all_records_by_time_range(
|
|
starttime=starttime, endtime=endtime
|
|
)
|
|
results = {"nodes": nodes_links[0], "links": nodes_links[1]}
|
|
|
|
# Original code didn't seem to set cache for this? or I missed it.
|
|
# It's safer to not cache if not sure about logic, but pattern suggests it should.
|
|
|
|
return results
|