199 lines
6.6 KiB
Python
199 lines
6.6 KiB
Python
import os
|
|
import ctypes
|
|
from .project_backup import have_project
|
|
from .inp_out import dump_inp
|
|
|
|
def calculate_service_area(name: str) -> list[dict[str, list[str]]]:
|
|
if not have_project(name):
|
|
raise Exception(f'Not found project [{name}]')
|
|
|
|
dir = os.path.abspath(os.getcwd())
|
|
|
|
inp_str = os.path.join(os.path.join(dir, 'db_inp'), name + '.db.inp')
|
|
dump_inp(name, inp_str, '2')
|
|
|
|
toolkit = ctypes.CDLL(os.path.join(os.path.join(dir, 'api'), 'toolkit.dll'))
|
|
|
|
inp = ctypes.c_char_p(inp_str.encode())
|
|
|
|
handle = ctypes.c_ulonglong()
|
|
toolkit.TK_ServiceArea_Start(inp, ctypes.byref(handle))
|
|
|
|
c_nodeCount = ctypes.c_size_t()
|
|
toolkit.TK_ServiceArea_GetNodeCount(handle, ctypes.byref(c_nodeCount))
|
|
nodeCount = c_nodeCount.value
|
|
|
|
nodeIds: list[str] = []
|
|
|
|
for n in range(0, nodeCount):
|
|
id = ctypes.c_char_p()
|
|
toolkit.TK_ServiceArea_GetNodeId(handle, ctypes.c_size_t(n), ctypes.byref(id))
|
|
nodeIds.append(id.value.decode())
|
|
|
|
c_timeCount = ctypes.c_size_t()
|
|
toolkit.TK_ServiceArea_GetTimeCount(handle, ctypes.byref(c_timeCount))
|
|
timeCount = c_timeCount.value
|
|
|
|
results: list[dict[str, list[str]]] = []
|
|
|
|
for t in range(0, timeCount):
|
|
c_sourceCount = ctypes.c_size_t()
|
|
toolkit.TK_ServiceArea_GetSourceCount(handle, ctypes.c_size_t(t), ctypes.byref(c_sourceCount))
|
|
sourceCount = c_sourceCount.value
|
|
|
|
sources = ctypes.POINTER(ctypes.c_size_t)()
|
|
toolkit.TK_ServiceArea_GetSources(handle, ctypes.c_size_t(t), ctypes.byref(sources))
|
|
|
|
result: dict[str, list[str]] = {}
|
|
for s in range(0, sourceCount):
|
|
result[nodeIds[sources[s]]] = []
|
|
|
|
for n in range(0, nodeCount):
|
|
concentration = ctypes.POINTER(ctypes.c_double)()
|
|
toolkit.TK_ServiceArea_GetConcentration(handle, ctypes.c_size_t(t), ctypes.c_size_t(n), ctypes.byref(concentration))
|
|
|
|
maxS = sources[0]
|
|
maxC = concentration[0]
|
|
for s in range(1, sourceCount):
|
|
if concentration[s] > maxC:
|
|
maxS = sources[s]
|
|
maxC = concentration[s]
|
|
|
|
result[nodeIds[maxS]].append(nodeIds[n])
|
|
|
|
results.append(result)
|
|
|
|
toolkit.TK_ServiceArea_End(handle)
|
|
|
|
return results
|
|
|
|
'''
|
|
import sys
|
|
import json
|
|
from queue import Queue
|
|
from .database import *
|
|
from .s0_base import get_node_links, get_link_nodes
|
|
|
|
sys.path.append('..')
|
|
from epanet.epanet import run_project
|
|
|
|
def _calculate_service_area(name: str, inp, time_index: int = 0) -> dict[str, list[str]]:
|
|
sources : dict[str, list[str]] = {}
|
|
for node_result in inp['node_results']:
|
|
result = node_result['result'][time_index]
|
|
if result['demand'] < 0:
|
|
sources[node_result['node']] = []
|
|
|
|
link_flows: dict[str, float] = {}
|
|
for link_result in inp['link_results']:
|
|
result = link_result['result'][time_index]
|
|
link_flows[link_result['link']] = float(result['flow'])
|
|
|
|
# build source to nodes map
|
|
for source in sources:
|
|
queue = Queue()
|
|
queue.put(source)
|
|
|
|
while not queue.empty():
|
|
cursor = queue.get()
|
|
if cursor not in sources[source]:
|
|
sources[source].append(cursor)
|
|
|
|
links = get_node_links(name, cursor)
|
|
for link in links:
|
|
node1, node2 = get_link_nodes(name, link)
|
|
if node1 == cursor and link_flows[link] > 0:
|
|
queue.put(node2)
|
|
elif node2 == cursor and link_flows[link] < 0:
|
|
queue.put(node1)
|
|
|
|
#return sources
|
|
|
|
# calculation concentration
|
|
concentration_map: dict[str, dict[str, float]] = {}
|
|
node_wip: list[str] = []
|
|
for source, nodes in sources.items():
|
|
for node in nodes:
|
|
if node not in concentration_map:
|
|
concentration_map[node] = {}
|
|
concentration_map[node][source] = 0.0
|
|
if node not in node_wip:
|
|
node_wip.append(node)
|
|
|
|
# if only one source, done
|
|
for node, concentrations in concentration_map.items():
|
|
if len(concentrations) == 1:
|
|
node_wip.remove(node)
|
|
for key in concentrations.keys():
|
|
concentration_map[node][key] = 1.0
|
|
|
|
node_upstream : dict[str, list[tuple[str, str]]] = {}
|
|
for node in node_wip:
|
|
if node not in node_upstream:
|
|
node_upstream[node] = []
|
|
|
|
links = get_node_links(name, node)
|
|
for link in links:
|
|
node1, node2 = get_link_nodes(name, link)
|
|
if node2 == node and link_flows[link] > 0:
|
|
node_upstream[node].append((link, node1))
|
|
elif node1 == node and link_flows[link] < 0:
|
|
node_upstream[node].append((link, node2))
|
|
|
|
while len(node_wip) != 0:
|
|
done = []
|
|
for node in node_wip:
|
|
up_link_nodes = node_upstream[node]
|
|
ready = True
|
|
for link_node in up_link_nodes:
|
|
if link_node[1] in node_wip:
|
|
ready = False
|
|
break
|
|
if ready:
|
|
for link_node in up_link_nodes:
|
|
if link_node[1] not in concentration_map.keys():
|
|
continue
|
|
for source, concentration in concentration_map[link_node[1]].items():
|
|
concentration_map[node][source] += concentration * abs(link_flows[link_node[0]])
|
|
|
|
# normalize
|
|
sum = 0.0
|
|
for source, concentration in concentration_map[node].items():
|
|
sum += concentration
|
|
for source in concentration_map[node].keys():
|
|
concentration_map[node][source] /= sum
|
|
|
|
done.append(node)
|
|
|
|
for node in done:
|
|
node_wip.remove(node)
|
|
|
|
source_to_main_node: dict[str, list[str]] = {}
|
|
for node, value in concentration_map.items():
|
|
max_source = ''
|
|
max_concentration = 0.0
|
|
for s, c in value.items():
|
|
if c > max_concentration:
|
|
max_concentration = c
|
|
max_source = s
|
|
if max_source not in source_to_main_node:
|
|
source_to_main_node[max_source] = []
|
|
source_to_main_node[max_source].append(node)
|
|
|
|
return source_to_main_node
|
|
|
|
|
|
def calculate_service_area(name: str) -> list[dict[str, list[str]]]:
|
|
inp = json.loads(run_project(name, True))
|
|
|
|
result: list[dict[str, list[str]]] = []
|
|
|
|
time_count = len(inp['node_results'][0]['result'])
|
|
|
|
for i in range(time_count):
|
|
sas = _calculate_service_area(name, inp, i)
|
|
result.append(sas)
|
|
|
|
return result
|
|
'''
|