inp file encoding improvement
This commit is contained in:
xinzish
2024-04-07 09:45:02 +08:00
9 changed files with 97 additions and 4 deletions

BIN
api/epanet-output.dll Normal file

Binary file not shown.

BIN
api/epanet2.dll Normal file

Binary file not shown.

View File

@@ -255,15 +255,16 @@ def parse_file(project: str, inp: str, version: str = '3') -> None:
sql_batch.add(handler(line, demand_outside))
elif s == PATTERNS:
sql_batch.add(handler(line, current_pattern not in variable_patterns))
elif s == OPTIONS:
sql_batch.add(handler(line, version))
else:
sql_batch.add(handler(line))
f.seek(0)
if is_s:
sql_batch.add(handler(sections[s]))
if s == OPTIONS:
sql_batch.add(handler(sections[s], version))
else:
sql_batch.add(handler(sections[s]))
sql_batch.flush()

View File

@@ -49,11 +49,32 @@ def inp_in_option(section: list[str]) -> str:
def inp_out_option(name: str) -> list[str]:
lines = []
objs = read_all(name, f"select * from options")
is_dda = False
for obj in objs:
if obj['key'] == 'DEMAND MODEL':
is_dda = obj['value'] == 'DDA'
dda_ignore = [
'HEADERROR', # TODO: default is 0 which is conflict with PDA
'FLOWCHANGE', # TODO: default is 0 which is conflict with PDA
'MINIMUM PRESSURE',
'REQUIRED PRESSURE',
'PRESSURE EXPONENT'
]
for obj in objs:
key = obj['key']
# why write this ?
if key == 'PRESSURE':
continue
# release version does not support new keys and has error message
if key == 'HTOL' or key == 'QTOL' or key == 'RQTOL':
continue
# ignore some weird settings for DDA
if is_dda and key in dda_ignore:
continue
value = obj['value']
if str(value).strip() != '':
lines.append(f'{key} {value}')

View File

@@ -1,3 +1,73 @@
import os
import ctypes
from .project import have_project
from .inp_out import dump_inp
def calculate_service_area(name: str) -> list[dict[str, list[str]]]:
if not have_project(name):
raise Exception(f'Not found project [{name}]')
dir = os.path.abspath(os.getcwd())
inp_str = os.path.join(os.path.join(dir, 'db_inp'), name + '.db.inp')
dump_inp(name, inp_str, '2')
toolkit = ctypes.CDLL(os.path.join(os.path.join(dir, 'api'), 'toolkit.dll'))
inp = ctypes.c_char_p(inp_str.encode())
handle = ctypes.c_ulonglong()
toolkit.TK_ServiceArea_Start(inp, ctypes.byref(handle))
c_nodeCount = ctypes.c_size_t()
toolkit.TK_ServiceArea_GetNodeCount(handle, ctypes.byref(c_nodeCount))
nodeCount = c_nodeCount.value
nodeIds: list[str] = []
for n in range(0, nodeCount):
id = ctypes.c_char_p()
toolkit.TK_ServiceArea_GetNodeId(handle, ctypes.c_size_t(n), ctypes.byref(id))
nodeIds.append(id.value.decode())
c_timeCount = ctypes.c_size_t()
toolkit.TK_ServiceArea_GetTimeCount(handle, ctypes.byref(c_timeCount))
timeCount = c_timeCount.value
results: list[dict[str, list[str]]] = []
for t in range(0, timeCount):
c_sourceCount = ctypes.c_size_t()
toolkit.TK_ServiceArea_GetSourceCount(handle, ctypes.c_size_t(t), ctypes.byref(c_sourceCount))
sourceCount = c_sourceCount.value
sources = ctypes.POINTER(ctypes.c_size_t)()
toolkit.TK_ServiceArea_GetSources(handle, ctypes.c_size_t(t), ctypes.byref(sources))
result: dict[str, list[str]] = {}
for s in range(0, sourceCount):
result[nodeIds[sources[s]]] = []
for n in range(0, nodeCount):
concentration = ctypes.POINTER(ctypes.c_double)()
toolkit.TK_ServiceArea_GetConcentration(handle, ctypes.c_size_t(t), ctypes.c_size_t(n), ctypes.byref(concentration))
maxS = sources[0]
maxC = concentration[0]
for s in range(1, sourceCount):
if concentration[s] > maxC:
maxS = sources[s]
maxC = concentration[s]
result[nodeIds[maxS]].append(nodeIds[n])
results.append(result)
toolkit.TK_ServiceArea_End(handle)
return results
'''
import sys
import json
from queue import Queue
@@ -7,7 +77,6 @@ from .s0_base import get_node_links, get_link_nodes
sys.path.append('..')
from epanet.epanet import run_project
def _calculate_service_area(name: str, inp, time_index: int = 0) -> dict[str, list[str]]:
sources : dict[str, list[str]] = {}
for node_result in inp['node_results']:
@@ -126,3 +195,4 @@ def calculate_service_area(name: str) -> list[dict[str, list[str]]]:
result.append(sas)
return result
'''

View File

@@ -2,6 +2,7 @@ from .s32_region_util import calculate_boundary, inflate_boundary
from .s34_sa_cal import *
from .s34_sa import get_all_service_area_ids
from .batch_exe import execute_batch_command
from .database import ChangeSet
def generate_service_area(name: str, inflate_delta: float = 0.5) -> ChangeSet:
cs = ChangeSet()

BIN
api/toolkit.dll Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.