Refine code with cursor

This commit is contained in:
DingZQ
2025-02-28 21:16:42 +08:00
parent ae60649726
commit 0d4128c8c8
4 changed files with 36 additions and 35 deletions

View File

@@ -1,4 +1,5 @@
from .database import *
from typing import Any
from .database import ChangeSet, execute_command, try_read, read_all, DbChangeSet, g_update_prefix
TAG_TYPE_NODE = 'NODE'
TAG_TYPE_LINK = 'LINK'
@@ -11,11 +12,11 @@ def get_tag_schema(name: str) -> dict[str, dict[str, Any]]:
def get_tags(name: str) -> list[dict[str, Any]]:
results: list[dict[str, Any]] = []
rows = read_all(name, f"select * from tags_node")
rows = read_all(name, "select * from tags_node")
for row in rows:
tag = str(row['tag']) if row['tag'] != None else None
results.append({ 't_type': TAG_TYPE_NODE, 'id': str(row['id']), 'tag': tag })
rows = read_all(name, f"select * from tags_link")
rows = read_all(name, "select * from tags_link")
for row in rows:
tag = str(row['tag']) if row['tag'] != None else None
results.append({ 't_type': TAG_TYPE_LINK, 'id': str(row['id']), 'tag': tag })
@@ -28,12 +29,12 @@ def get_tag(name: str, t_type: str, id: str) -> dict[str, Any]:
t = try_read(name, f"select * from tags_node where id = '{id}'")
elif t_type == TAG_TYPE_LINK:
t = try_read(name, f"select * from tags_link where id = '{id}'")
if t == None:
if t is None:
return { 't_type': t_type, 'id': id, 'tag': None }
d = {}
d['t_type'] = t_type
d['id'] = str(t['id'])
d['tag'] = str(t['tag']) if t['tag'] != None else None
d['tag'] = str(t['tag']) if t['tag'] is not None else None
return d
@@ -73,11 +74,11 @@ def _set_tag(name: str, cs: ChangeSet) -> DbChangeSet:
raise Exception('Only support NODE and Link')
redo_sql = f"delete from {table} where id = {new.f_id};"
if new.tag != None:
if new.tag is not None:
redo_sql += f"\ninsert into {table} (id, tag) values ({new.f_id}, {new.f_tag});"
undo_sql = f"delete from {table} where id = {old.f_id};"
if old.tag != None:
if old.tag is not None:
undo_sql += f"\ninsert into {table} (id, tag) values ({old.f_id}, {old.f_tag});"
redo_cs = g_update_prefix | new.as_dict()
@@ -129,13 +130,13 @@ def inp_out_tag(name: str) -> list[str]:
def delete_tag_by_node(name: str, node: str) -> ChangeSet:
row = try_read(name, f"select * from tags_node where id = '{node}'")
if row == None:
if row is None:
return ChangeSet()
return ChangeSet(g_update_prefix | {'type': 'tag', 't_type': TAG_TYPE_NODE, 'id': node, 'tag': None })
def delete_tag_by_link(name: str, link: str) -> ChangeSet:
row = try_read(name, f"select * from tags_link where id = '{link}'")
if row == None:
if row is None:
return ChangeSet()
return ChangeSet(g_update_prefix | {'type': 'tag', 't_type': TAG_TYPE_LINK, 'id': link, 'tag': None })

View File

@@ -1,4 +1,5 @@
from .database import *
from .database import read_all, ChangeSet, DbChangeSet, g_update_prefix, execute_command, try_read
from typing import Any
def get_demand_schema(name: str) -> dict[str, dict[str, Any]]:
return { 'junction' : {'type': 'str' , 'optional': False , 'readonly': True },
@@ -34,8 +35,8 @@ def _set_demand(name: str, cs: ChangeSet) -> DbChangeSet:
pattern = str(r['pattern']) if 'pattern' in r and r['pattern'] != None else None
category = str(r['category']) if 'category' in r and r['category'] != None else None
f_demand = demand
f_pattern = f"'{pattern}'" if pattern != None else 'null'
f_category = f"'{category}'" if category != None else 'null'
f_pattern = f"'{pattern}'" if pattern is not None else 'null'
f_category = f"'{category}'" if category is not None else 'null'
redo_sql += f"\ninsert into demands (junction, demand, pattern, category) values ({f_junction}, {f_demand}, {f_pattern}, {f_category});"
new['demands'].append({ 'demand': demand, 'pattern': pattern, 'category': category })
@@ -45,8 +46,8 @@ def _set_demand(name: str, cs: ChangeSet) -> DbChangeSet:
pattern = str(r['pattern']) if 'pattern' in r and r['pattern'] != None else None
category = str(r['category']) if 'category' in r and r['category'] != None else None
f_demand = demand
f_pattern = f"'{pattern}'" if pattern != None else 'null'
f_category = f"'{category}'" if category != None else 'null'
f_pattern = f"'{pattern}'" if pattern is not None else 'null'
f_category = f"'{category}'" if category is not None else 'null'
undo_sql += f"\ninsert into demands (junction, demand, pattern, category) values ({f_junction}, {f_demand}, {f_pattern}, {f_category});"
redo_cs = g_update_prefix | { 'type': 'demand' } | new
@@ -75,28 +76,28 @@ def inp_in_demand(line: str) -> str:
junction = str(tokens[0])
demand = float(tokens[1])
pattern = str(tokens[2]) if num_without_desc >= 3 else None
pattern = f"'{pattern}'" if pattern != None else 'null'
pattern = f"'{pattern}'" if pattern is not None else 'null'
category = str(tokens[3]) if num_without_desc >= 4 else None
category = f"'{category}'" if category != None else 'null'
category = f"'{category}'" if category is not None else 'null'
return str(f"insert into demands (junction, demand, pattern, category) values ('{junction}', {demand}, {pattern}, {category});")
def inp_out_demand(name: str) -> list[str]:
lines = []
objs = read_all(name, f"select * from demands order by _order")
objs = read_all(name, "select * from demands order by _order")
for obj in objs:
junction = obj['junction']
demand = obj['demand']
pattern = obj['pattern'] if obj['pattern'] != None else ''
category = f";{obj['category']}" if obj['category'] != None else ';'
pattern = obj['pattern'] if obj['pattern'] is not None else ''
category = f";{obj['category']}" if obj['category'] is not None else ';'
lines.append(f'{junction} {demand} {pattern} {category}')
return lines
def delete_demand_by_junction(name: str, junction: str) -> ChangeSet:
row = try_read(name, f"select * from demands where junction = '{junction}'")
if row == None:
if row is None:
return ChangeSet()
return ChangeSet(g_update_prefix | {'type': 'demand', 'junction': junction, 'demands': []})

27
main.py
View File

@@ -2032,7 +2032,7 @@ async def fastapi_get_all_scada_info(network: str) -> list[dict[str, float]]:
# inp file
@app.post("/uploadinp/", status_code=status.HTTP_200_OK)
async def upload_inp(afile: bytes, name: str ):
async def fastapi_upload_inp(afile: bytes, name: str ):
filePath = inpDir + str(name)
f = open(filePath, 'wb')
f.write(afile)
@@ -2041,7 +2041,7 @@ async def upload_inp(afile: bytes, name: str ):
return True
@app.get("/downloadinp/", status_code=status.HTTP_200_OK)
async def download_inp(name: str, response: Response):
async def fastapi_download_inp(name: str, response: Response):
filePath = inpDir + name
if os.path.exists(filePath):
return FileResponse(filePath, media_type='application/octet-stream', filename="inp.inp")
@@ -2079,7 +2079,7 @@ async def fastapi_convert_v3_to_v2(req: Request) -> ChangeSet:
return cs
@app.get("/getjson/")
async def get_json():
async def fastapi_get_json():
return JSONResponse(
status_code = status.HTTP_400_BAD_REQUEST,
content={
@@ -2095,12 +2095,12 @@ async def get_json():
# influx db operation
############################################################
@app.get("/getrealtimedata/")
async def get_realtimedata():
async def fastapi_get_realtimedata():
data = [random.randint(0, 100) for _ in range(100)]
return data
@app.get("/getsimulationresult/")
async def get_simulationresult():
async def fastapi_get_simulationresult():
data = [random.randint(0, 100) for _ in range(100)]
return data
@@ -2109,16 +2109,16 @@ async def get_simulationresult():
# DingZQ 2025-01-31
# def query_latest_record_by_ID(ID: str, type: str, bucket: str="realtime_data", client: InfluxDBClient=client) -> dict:
@app.get("/querynodelatestrecordbyid/")
async def query_node_latest_record_by_id(id: str):
async def fastapi_query_node_latest_record_by_id(id: str):
return influxdb_api.query_latest_record_by_ID(id, type='node', client=influx_client)
@app.get("/querylinklatestrecordbyid/")
async def query_link_latest_record_by_id(id: str):
async def fastapi_query_link_latest_record_by_id(id: str):
return influxdb_api.query_latest_record_by_ID(id, type='link', client=influx_client)
# query scada
@app.get("/queryscadalatestrecordbyid/")
async def query_scada_latest_record_by_id(id: str):
async def fastapi_query_scada_latest_record_by_id(id: str):
return influxdb_api.query_latest_record_by_ID(id, type='scada', client=influx_client)
@@ -2140,24 +2140,24 @@ async def fastapi_query_all_records_by_date(querydate: str) -> dict[str, list]:
# def query_curve_by_ID_property_daterange(ID: str, type: str, property: str, start_date: str, end_date: str, bucket: str="realtime_data", client: InfluxDBClient=client) -> list:
@app.get("/querynodecurvebyidpropertydaterange/")
async def query_node_curve_by_id_property_daterange(id: str, prop: str, startdate: str, enddate: str):
async def fastapi_query_node_curve_by_id_property_daterange(id: str, prop: str, startdate: str, enddate: str):
return influxdb_api.query_curve_by_ID_property_daterange(id, type='node', property=prop, start_date=startdate, end_date=enddate, client=influx_client)
@app.get("/querylinkcurvebyidpropertydaterange/")
async def query_link_curve_by_id_property_daterange(id: str, prop: str, startdate: str, enddate: str):
async def fastapi_query_link_curve_by_id_property_daterange(id: str, prop: str, startdate: str, enddate: str):
return influxdb_api.query_curve_by_ID_property_daterange(id, type='link', property=prop, start_date=startdate, end_date=enddate, client=influx_client)
# ids 用,隔开
# 返回 { 'id': value1, 'id2': value2 }
# def query_SCADA_data_by_device_ID_and_time(query_ids_list: List[str], query_time: str, bucket: str="SCADA_data", client: InfluxDBClient=client) -> Dict[str, float]:
@app.get("/queryscadadatabydeviceidandtime/")
async def query_scada_data_by_device_id_and_time(ids: str, querytime: str):
async def fastapi_query_scada_data_by_device_id_and_time(ids: str, querytime: str):
query_ids = ids.split(',')
logger.info(querytime)
return influxdb_api.query_SCADA_data_by_device_ID_and_time(query_ids_list=query_ids, query_time=querytime, client=influx_client)
@app.get("/queryscadadatabydeviceidandtimerange/")
async def query_scada_data_by_device_id_and_time_range(ids: str, starttime: str, endtime: str):
async def fastapi_query_scada_data_by_device_id_and_time_range(ids: str, starttime: str, endtime: str):
query_ids = ids.split(',')
return influxdb_api.query_SCADA_data_by_device_ID_and_time_range(query_ids_list=query_ids, start_time=starttime, end_time=endtime, client=influx_client)
@@ -2219,8 +2219,7 @@ async def fastapi_run_project(network: str,start_time:str,end_time=None) -> str:
else:
print('file doesnt exists')
#os.rename(filename, filename2)
result = run_simulation_ex(name=network, simulation_type='realtime',
start_datetime=start_time, end_datetime=end_time)
result = run_simulation_ex(name=network, simulation_type='realtime', start_datetime=start_time, end_datetime=end_time)
#os.rename(filename2, filename)
return result

View File

@@ -47,4 +47,4 @@ def get_utc_after(utc_time: datetime, hours: float=0.0, seconds: float=0.0):
'''
获取 utc 时间后面的一个时间点
'''
return utc_time + timedelta(hours=hours, seconds=seconds)
return utc_time + timedelta(hours=hours, seconds=seconds)