Accept Merge Request #178: (api -> master)

Merge Request: Big change...

Created By: @王琼钰
Accepted By: @王琼钰
URL: https://tjwater.coding.net/p/tjwatercloud/d/TJWaterServer/git/merge/178?initial=true
This commit is contained in:
王琼钰
2023-03-09 00:36:07 +08:00
38 changed files with 1563 additions and 493 deletions

View File

@@ -90,12 +90,26 @@ from .s20_mixing import get_mixing_schema, get_mixing, set_mixing, add_mixing, d
from .s21_times import TIME_STATISTIC_NONE, TIME_STATISTIC_AVERAGED, TIME_STATISTIC_MINIMUM, TIME_STATISTIC_MAXIMUM, TIME_STATISTIC_RANGE
from .s21_times import get_time_schema, get_time, set_time
from .s23_options import OPTION_UNITS_CFS, OPTION_UNITS_GPM, OPTION_UNITS_MGD, OPTION_UNITS_IMGD, OPTION_UNITS_AFD, OPTION_UNITS_LPS, OPTION_UNITS_LPM, OPTION_UNITS_MLD, OPTION_UNITS_CMH, OPTION_UNITS_CMD
from .s23_options import OPTION_HEADLOSS_HW, OPTION_HEADLOSS_DW, OPTION_HEADLOSS_CM
from .s23_options import OPTION_UNBALANCED_STOP, OPTION_UNBALANCED_CONTINUE
from .s23_options import OPTION_DEMAND_MODEL_DDA, OPTION_DEMAND_MODEL_PDA
from .s23_options import OPTION_QUALITY_NONE, OPTION_QUALITY_CHEMICAL, OPTION_QUALITY_AGE, OPTION_QUALITY_TRACE
from .s23_options import get_option_schema, get_option, set_option
from .s23_options_util import OPTION_UNITS_CFS, OPTION_UNITS_GPM, OPTION_UNITS_MGD, OPTION_UNITS_IMGD, OPTION_UNITS_AFD, OPTION_UNITS_LPS, OPTION_UNITS_LPM, OPTION_UNITS_MLD, OPTION_UNITS_CMH, OPTION_UNITS_CMD
from .s23_options_util import OPTION_PRESSURE_PSI, OPTION_PRESSURE_KPA, OPTION_PRESSURE_M
from .s23_options_util import OPTION_HEADLOSS_HW, OPTION_HEADLOSS_DW, OPTION_HEADLOSS_CM
from .s23_options_util import OPTION_UNBALANCED_STOP, OPTION_UNBALANCED_CONTINUE
from .s23_options_util import OPTION_DEMAND_MODEL_DDA, OPTION_DEMAND_MODEL_PDA
from .s23_options_util import OPTION_QUALITY_NONE, OPTION_QUALITY_CHEMICAL, OPTION_QUALITY_AGE, OPTION_QUALITY_TRACE
from .s23_options_util import get_option_schema, get_option
from .s23_options import set_option
from .s23_options_util import OPTION_V3_FLOW_UNITS_CFS, OPTION_V3_FLOW_UNITS_GPM, OPTION_V3_FLOW_UNITS_MGD, OPTION_V3_FLOW_UNITS_IMGD, OPTION_V3_FLOW_UNITS_AFD, OPTION_V3_FLOW_UNITS_LPS, OPTION_V3_FLOW_UNITS_LPM, OPTION_V3_FLOW_UNITS_MLD, OPTION_V3_FLOW_UNITS_CMH, OPTION_V3_FLOW_UNITS_CMD
from .s23_options_util import OPTION_V3_PRESSURE_UNITS_PSI, OPTION_V3_PRESSURE_UNITS_KPA, OPTION_V3_PRESSURE_UNITS_M
from .s23_options_util import OPTION_V3_HEADLOSS_MODEL_HW, OPTION_V3_HEADLOSS_MODEL_DW, OPTION_V3_HEADLOSS_MODEL_CM
from .s23_options_util import OPTION_V3_STEP_SIZING_FULL, OPTION_V3_STEP_SIZING_RELAXATION, OPTION_V3_STEP_SIZING_LINESEARCH
from .s23_options_util import OPTION_V3_IF_UNBALANCED_STOP, OPTION_V3_IF_UNBALANCED_CONTINUE
from .s23_options_util import OPTION_V3_DEMAND_MODEL_FIXED, OPTION_V3_DEMAND_MODEL_CONSTRAINED, OPTION_V3_DEMAND_MODEL_POWER, OPTION_V3_DEMAND_MODEL_LOGISTIC
from .s23_options_util import OPTION_V3_LEAKAGE_MODEL_NONE, OPTION_V3_LEAKAGE_MODEL_POWER, OPTION_V3_LEAKAGE_MODEL_FAVAD
from .s23_options_util import OPTION_V3_QUALITY_MODEL_NONE, OPTION_V3_QUALITY_MODEL_CHEMICAL, OPTION_V3_QUALITY_MODEL_AGE, OPTION_V3_QUALITY_MODEL_TRACE
from .s23_options_util import OPTION_V3_QUALITY_UNITS_HRS, OPTION_V3_QUALITY_UNITS_PCNT, OPTION_V3_QUALITY_UNITS_MGL, OPTION_V3_QUALITY_UNITS_UGL
from .s23_options_util import get_option_v3_schema, get_option_v3
from .s23_options_v3 import set_option_v3
from .s24_coordinates import get_node_coord

View File

@@ -22,7 +22,7 @@ from .s19_reactions import set_reaction_cmd, set_pipe_reaction_cmd, set_tank_rea
from .s20_mixing import set_mixing_cmd, add_mixing_cmd, delete_mixing_cmd
from .s21_times import set_time_cmd
#from .s22_report import *
from .s23_options import set_option_cmd
from .s23_options_util import set_option_cmd, set_option_v3_cmd
#from .s24_coordinates import *
from .s25_vertices import set_vertex_cmd, add_vertex_cmd, delete_vertex_cmd
from .s26_labels import set_label_cmd, add_label_cmd, delete_label_cmd
@@ -89,6 +89,8 @@ def add_cmd(name: str, cs: ChangeSet) -> DbChangeSet | None:
return None
elif type == s23_option:
return None
elif type == s23_option_v3:
return None
elif type == s24_coordinate:
return None
elif type == s25_vertex:
@@ -164,6 +166,8 @@ def set_cmd(name: str, cs: ChangeSet) -> DbChangeSet | None:
return None
elif type == s23_option:
return set_option_cmd(name, cs)
elif type == s23_option_v3:
return set_option_v3_cmd(name, cs)
elif type == s24_coordinate: # do not support update here
return None
elif type == s25_vertex:
@@ -239,6 +243,8 @@ def del_cmd(name: str, cs: ChangeSet) -> DbChangeSet | None:
return None
elif type == s23_option:
return None
elif type == s23_option_v3:
return None
elif type == s24_coordinate:
return None
elif type == s25_vertex:

View File

@@ -23,6 +23,7 @@ from .s20_mixing import set_mixing, add_mixing, delete_mixing
from .s21_times import set_time
#from .s22_report import *
from .s23_options import set_option
from .s23_options_v3 import set_option_v3
#from .s24_coordinates import *
from .s25_vertices import set_vertex, add_vertex, delete_vertex
from .s26_labels import set_label, add_label, delete_label
@@ -89,6 +90,8 @@ def execute_add_command(name: str, cs: ChangeSet) -> ChangeSet:
return ChangeSet()
elif type == s23_option:
return ChangeSet()
elif type == s23_option_v3:
return ChangeSet()
elif type == s24_coordinate:
return ChangeSet()
elif type == s25_vertex:
@@ -164,6 +167,8 @@ def execute_update_command(name: str, cs: ChangeSet) -> ChangeSet:
return ChangeSet()
elif type == s23_option:
return set_option(name, cs)
elif type == s23_option_v3:
return set_option_v3(name, cs)
elif type == s24_coordinate: # do not support update here
return ChangeSet()
elif type == s25_vertex:
@@ -239,6 +244,8 @@ def execute_delete_command(name: str, cs: ChangeSet) -> ChangeSet:
return ChangeSet()
elif type == s23_option:
return ChangeSet()
elif type == s23_option_v3:
return ChangeSet()
elif type == s24_coordinate:
return ChangeSet()
elif type == s25_vertex:

View File

@@ -3,7 +3,6 @@ from .sections import *
from .s0_base import *
from .s2_junctions import unset_junction_by_pattern
from .s3_reservoirs import unset_reservoir_by_pattern
from .s4_tanks import unset_tank_by_curve
from .s6_pumps import unset_pump_by_curve, unset_pump_by_pattern
@@ -164,7 +163,6 @@ def delete_pattern_cascade_batch_cmd(name: str, cs: ChangeSet) -> ChangeSet:
if row == None:
return result
result.merge(unset_junction_by_pattern(name, id))
result.merge(unset_reservoir_by_pattern(name, id))
result.merge(unset_pump_by_pattern(name, id))
result.merge(unset_demand_by_pattern(name, id))

View File

@@ -185,6 +185,8 @@ def _read_inp(file: dict[str, list[str]]) -> ChangeSet:
coords = inp_in_coord(section)
for s in ['JUNCTIONS', 'RESERVOIRS', 'TANKS']:
for node in file_cs[s].operations:
if node['type'] == 'demand':
continue
if node['id'] in coords:
coord = coords[node['id']]
node |= { 'x' : coord['x'], 'y' : coord['y'] }
@@ -204,10 +206,6 @@ def _read_inp(file: dict[str, list[str]]) -> ChangeSet:
elif name == 'END':
pass # :)
# if demand section is empty, fill it with junction data
demands_cs = fill_demand(file_cs['JUNCTIONS'], file_cs['DEMANDS'])
file_cs['DEMANDS'].merge(demands_cs)
# release file
file = {}

View File

@@ -69,7 +69,7 @@ def set_status(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][EPA3][IN][OUT]
# link value
#--------------------------------------------------------------
class InpStatus:

View File

@@ -94,7 +94,7 @@ def delete_pattern(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][IN][OUT]
# ;desc
# id mult1 mult2 .....
#--------------------------------------------------------------
@@ -134,3 +134,61 @@ def inp_out_pattern(name: str) -> list[str]:
factor = obj['factor']
lines.append(f'{id} {factor}')
return lines
#--------------------------------------------------------------
# [EPA3][IN][OUT]
# id FIXED (interval)
# id factor1 factor2 ...
# id VARIABLE
# id time1 factor1 time2 factor2 ...
#--------------------------------------------------------------
def inp_in_pattern_v3(section: list[str]) -> ChangeSet:
patterns: dict[str, list[float]] = {}
variable_patterns: list[str] = []
count = len(section)
for i in range(0, count):
if section[i].startswith(';'):
continue
tokens = section[i].split()
# for EPA3, ignore time of variable pattern...
if tokens[1] == 'VARIABLE':
variable_patterns.append(tokens[0])
continue
elif tokens[1] == 'FIXED':
continue
if tokens[0] not in patterns:
patterns[tokens[0]] = []
if tokens[0] not in variable_patterns:
for token in tokens[1:]:
patterns[tokens[0]].append(float(token))
else:
for token in tokens[1::2]:
patterns[tokens[0]].append(float(token))
cs = ChangeSet()
for id, factors in patterns.items():
cs.append(g_add_prefix | {'type': 'pattern', 'id' : id, 'factors' : factors})
#print(descs)
return cs
def inp_out_pattern_v3(name: str) -> list[str]:
lines = []
objs = read_all(name, f"select * from patterns order by _order")
ids = []
for obj in objs:
id = obj['id']
if id not in ids:
# for EPA3, ignore time of variable pattern...
lines.append(f'{id} FIXED')
ids.append(id)
factor = obj['factor']
lines.append(f'{id} {factor}')
return lines

View File

@@ -122,7 +122,7 @@ def delete_curve(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][IN][OUT]
# ;type: desc
# id x y
#--------------------------------------------------------------
@@ -134,7 +134,7 @@ def inp_in_curve(section: list[str]) -> ChangeSet:
count = len(section)
for i in range(0, count):
if section[i].startswith(';'):
# this is description
# ;type: desc
type_plus_desc = section[i].removeprefix(';')
type_plus_desc_tokens = type_plus_desc.split(':')
next = i + 1
@@ -173,3 +173,53 @@ def inp_out_curve(name: str) -> list[str]:
y = obj['y']
lines.append(f'{id} {x} {y}')
return lines
#--------------------------------------------------------------
# [EPA3][IN][OUT]
# id type
# id x y
#--------------------------------------------------------------
def inp_in_curve_v3(section: list[str]) -> ChangeSet:
types = {}
curves: dict[str, list[dict[str, float]]] = {}
count = len(section)
for i in range(0, count):
if section[i].startswith(';'):
continue
tokens = section[i].split()
# for EPA3
if tokens[1] == 'PUMP' or tokens[1] == 'EFFICIENCY' or tokens[1] == 'VOLUME' or tokens[1] == 'HEADLOSS':
types[tokens[0]] = tokens[1]
continue
if tokens[0] not in curves:
curves[tokens[0]] = []
curves[tokens[0]].append({'x': float(tokens[1]), 'y': float(tokens[2])})
cs = ChangeSet()
for id, coords in curves.items():
c_type = types[id] if id in types else CURVE_TYPE_PUMP
cs.append(g_add_prefix | {'type': 'curve', 'id' : id, 'c_type': c_type, 'coords' : coords})
#print(descs)
return cs
def inp_out_curve_v3(name: str) -> list[str]:
lines = []
types = read_all(name, f"select * from _curve")
for type in types:
id = type['id']
# id type
lines.append(f"{id} {type['type']}")
objs = read_all(name, f"select * from curves where id = '{id}' order by _order")
for obj in objs:
id = obj['id']
x = obj['x']
y = obj['y']
lines.append(f'{id} {x} {y}')
return lines

View File

@@ -35,11 +35,12 @@ def set_control(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2]
# [EPA2][EPA3]
# LINK linkID setting IF NODE nodeID {BELOW/ABOVE} level
# LINK linkID setting AT TIME value (units)
# LINK linkID setting AT CLOCKTIME value (units)
# (0) (1) (2) (3) (4) (5) (6) (7)
# todo...
#--------------------------------------------------------------
def inp_in_control(section: list[str]) -> ChangeSet:
if len(section) > 0:

View File

@@ -35,7 +35,7 @@ def set_rule(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2]
# [EPA2][EPA3]
# TODO...
#--------------------------------------------------------------
def inp_in_rule(section: list[str]) -> ChangeSet:

View File

@@ -132,7 +132,7 @@ def set_pump_energy(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][EPA3][IN][OUT]
# GLOBAL {PRICE/PATTERN/EFFIC} value
# PUMP id {PRICE/PATTERN/EFFIC} value
# DEMAND CHARGE value

View File

@@ -60,9 +60,12 @@ def set_emitter(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][IN][OUT]
# node Ke
#--------------------------------------------------------------
# [EPA3][IN][OUT]
# node Ke (exponent pattern)
#--------------------------------------------------------------
class InpEmitter:
def __init__(self, line: str) -> None:
tokens = line.split()

View File

@@ -60,7 +60,7 @@ def set_quality(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][EPA3][IN][OUT]
# node initqual
#--------------------------------------------------------------
class InpQuality:

View File

@@ -103,7 +103,7 @@ def delete_source(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][EPA3][IN][OUT]
# node sourcetype quality (pattern)
#--------------------------------------------------------------
class InpSource:

View File

@@ -182,7 +182,7 @@ def set_tank_reaction(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][EPA3][IN][OUT]
# ORDER {BULK/WALL/TANK} value
# GLOBAL BULK coeff
# GLOBAL WALL coeff

View File

@@ -97,8 +97,9 @@ def delete_mixing(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][EPA3][IN][OUT]
# TankID MixModel FractVolume
# FractVolume if type == MIX2
#--------------------------------------------------------------
class InpMixing:
def __init__(self, line: str) -> None:

View File

@@ -68,7 +68,7 @@ def set_time(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2]
# [EPA2][EPA3]
# STATISTIC {NONE/AVERAGE/MIN/MAX/RANGE}
# DURATION value (units)
# HYDRAULIC TIMESTEP value (units)
@@ -79,6 +79,7 @@ def set_time(name: str, cs: ChangeSet) -> ChangeSet:
# REPORT TIMESTEP value (units)
# REPORT START value (units)
# START CLOCKTIME value (AM PM)
# [EPA3] supports [EPA2] keyword
#--------------------------------------------------------------
def inp_in_time(section: list[str]) -> ChangeSet:
if len(section) > 0:

View File

@@ -2,7 +2,7 @@ from .database import *
#--------------------------------------------------------------
# [EPANET2]
# [EPA2]
# PAGE linesperpage
# STATUS {NONE/YES/FULL}
# SUMMARY {YES/NO}
@@ -15,6 +15,8 @@ from .database import *
# FILE filename
# variable {YES/NO}
# variable {BELOW/ABOVE/PRECISION} value
# [EPA3][NOT SUPPORT]
# TRIALS {YES/NO}
#--------------------------------------------------------------
def inp_in_report(section: list[str]) -> ChangeSet:
return ChangeSet()

View File

@@ -1,166 +1,43 @@
from .database import *
OPTION_UNITS_CFS = 'CFS'
OPTION_UNITS_GPM = 'GPM'
OPTION_UNITS_MGD = 'MGD'
OPTION_UNITS_IMGD = 'IMGD'
OPTION_UNITS_AFD = 'AFD'
OPTION_UNITS_LPS = 'LPS'
OPTION_UNITS_LPM = 'LPM'
OPTION_UNITS_MLD = 'MLD'
OPTION_UNITS_CMH = 'CMH'
OPTION_UNITS_CMD = 'CMD'
OPTION_HEADLOSS_HW = 'H-W'
OPTION_HEADLOSS_DW = 'D-W'
OPTION_HEADLOSS_CM = 'C-M'
#OPTION_HYDRAULICS_USE = 'USE'
#OPTION_HYDRAULICS_SAVE = 'SAVE'
OPTION_UNBALANCED_STOP = 'STOP'
OPTION_UNBALANCED_CONTINUE = 'CONTINUE'
OPTION_DEMAND_MODEL_DDA = 'DDA'
OPTION_DEMAND_MODEL_PDA = 'PDA'
OPTION_QUALITY_NONE = 'NONE'
OPTION_QUALITY_CHEMICAL = 'CHEMICAL'
OPTION_QUALITY_AGE = 'AGE'
OPTION_QUALITY_TRACE = 'TRACE'
element_schema = {'type': 'str' , 'optional': True , 'readonly': False}
def get_option_schema(name: str) -> dict[str, dict[str, Any]]:
return { 'UNITS' : element_schema,
'HEADLOSS' : element_schema,
#'HYDRAULICS' : element_schema,
'VISCOSITY' : element_schema,
'SPECIFIC GRAVITY' : element_schema,
'TRIALS' : element_schema,
'ACCURACY' : element_schema,
'FLOWCHANGE' : element_schema,
'HEADERROR' : element_schema,
'CHECKFREQ' : element_schema,
'MAXCHECK' : element_schema,
'DAMPLIMIT' : element_schema,
'UNBALANCED' : element_schema,
'DEMAND MODEL' : element_schema,
'MINIMUM PRESSURE' : element_schema,
'REQUIRED PRESSURE' : element_schema,
'PRESSURE EXPONENT' : element_schema,
'PATTERN' : element_schema,
'DEMAND MULTIPLIER' : element_schema,
'EMITTER EXPONENT' : element_schema,
'QUALITY' : element_schema,
'DIFFUSIVITY' : element_schema,
'TOLERANCE' : element_schema,
#'MAP' : element_schema,
}
def get_option(name: str) -> dict[str, Any]:
ts = read_all(name, f"select * from options")
d = {}
for e in ts:
d[e['key']] = str(e['value'])
return d
def set_option_cmd(name: str, cs: ChangeSet) -> DbChangeSet:
raw_old = get_option(name)
old = {}
new = {}
new_dict = cs.operations[0]
schema = get_option_schema(name)
for key in schema.keys():
if key in new_dict:
old[key] = str(raw_old[key])
new[key] = str(new_dict[key])
redo_cs = g_update_prefix | { 'type' : 'option' }
redo_sql = ''
for key, value in new.items():
if redo_sql != '':
redo_sql += '\n'
redo_sql += f"update options set value = '{value}' where key = '{key}';"
redo_cs |= { key: value }
undo_cs = g_update_prefix | { 'type' : 'option' }
undo_sql = ''
for key, value in old.items():
if undo_sql != '':
undo_sql += '\n'
undo_sql += f"update options set value = '{value}' where key = '{key}';"
undo_cs |= { key: value }
return DbChangeSet(redo_sql, undo_sql, [redo_cs], [undo_cs])
from .s23_options_util import get_option_schema, generate_v3
from .batch_cmd import execute_batch_command
def set_option(name: str, cs: ChangeSet) -> ChangeSet:
return execute_command(name, set_option_cmd(name, cs))
cs.operations[0]['operation'] = API_UPDATE
cs.operations[0]['type'] = 'option'
new_cs = cs
new_cs.merge(generate_v3(cs))
return execute_batch_command(name, new_cs)
#--------------------------------------------------------------
# [EPANET2]
# UNITS CFS/GPM/MGD/IMGD/AFD/LPS/LPM/MLD/CMH/CMD/SI
# PRESSURE PSI/KPA/M
# HEADLOSS H-W/D-W/C-M
# HYDRAULICS USE/SAVE filename
# QUALITY NONE/AGE/TRACE/CHEMICAL (TraceNode)
# MAP filename
# VERIFY filename
# UNBALANCED STOP/CONTINUE {Niter}
# PATTERN id
# DEMAND MODEL DDA/PDA
# DEMAND MULTIPLIER value
# EMITTER EXPONENT value
# VISCOSITY value
# DIFFUSIVITY value
# SPECIFIC GRAVITY value
# TRIALS value
# ACCURACY value#
# HEADERROR value
# FLOWCHANGE value
# MINIMUM PRESSURE value
# REQUIRED PRESSURE value
# PRESSURE EXPONENT value#
# TOLERANCE value
# ---- Undocumented Options -----
# HTOL value
# QTOL value
# RQTOL value
# CHECKFREQ value
# MAXCHECK value
# DAMPLIMIT value
#--------------------------------------------------------------
def inp_in_option(section: list[str]) -> ChangeSet:
if len(section) > 0:
cs = g_update_prefix | { 'type' : 'option' }
for s in section:
if s.startswith(';'):
continue
if len(section) <= 0:
return ChangeSet()
tokens = s.strip().split()
if tokens[0].upper() == 'PATTERN':
cs |= { 'PATTERN' : tokens[1] }
elif tokens[0].upper() == 'QUALITY':
value = tokens[1]
if len(tokens) > 2:
value += f' {tokens[2]}'
cs |= { 'QUALITY' : value }
else:
line = s.upper().strip()
for key in get_option_schema('').keys():
if line.startswith(key):
value = line.removeprefix(key).strip()
cs |= { key : value }
return ChangeSet(cs)
return ChangeSet()
cs = g_update_prefix | { 'type' : 'option' }
for s in section:
if s.startswith(';'):
continue
tokens = s.strip().split()
if tokens[0].upper() == 'PATTERN': # can not upper id
cs |= { 'PATTERN' : tokens[1] }
elif tokens[0].upper() == 'QUALITY': # can not upper trace node
value = tokens[1]
if len(tokens) > 2:
value += f' {tokens[2]}'
cs |= { 'QUALITY' : value }
else:
line = s.upper().strip()
for key in get_option_schema('').keys():
if line.startswith(key):
value = line.removeprefix(key).strip()
cs |= { key : value }
result = ChangeSet(cs)
result.merge(generate_v3(result))
return result
def inp_out_option(name: str) -> list[str]:
@@ -169,5 +46,6 @@ def inp_out_option(name: str) -> list[str]:
for obj in objs:
key = obj['key']
value = obj['value']
lines.append(f'{key} {value}')
if str(value).strip() != '':
lines.append(f'{key} {value}')
return lines

403
api/s23_options_util.py Normal file
View File

@@ -0,0 +1,403 @@
from .database import *
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# UNITS CFS/GPM/MGD/IMGD/AFD/LPS/LPM/MLD/CMH/CMD/SI
# PRESSURE PSI/KPA/M
# HEADLOSS H-W/D-W/C-M
# QUALITY NONE/AGE/TRACE/CHEMICAL (TraceNode)
# UNBALANCED STOP/CONTINUE {Niter}
# PATTERN id
# DEMAND MODEL DDA/PDA
# DEMAND MULTIPLIER value
# EMITTER EXPONENT value
# VISCOSITY value
# DIFFUSIVITY value
# SPECIFIC GRAVITY value
# TRIALS value
# ACCURACY value#
# HEADERROR value
# FLOWCHANGE value
# MINIMUM PRESSURE value
# REQUIRED PRESSURE value
# PRESSURE EXPONENT value#
# TOLERANCE value
# HTOL value
# QTOL value
# RQTOL value
# CHECKFREQ value
# MAXCHECK value
# DAMPLIMIT value
# ---- Unsupported Options -----
# HYDRAULICS USE/SAVE filename
# MAP filename
#--------------------------------------------------------------
element_schema = {'type': 'str' , 'optional': True , 'readonly': False}
OPTION_UNITS_CFS = 'CFS'
OPTION_UNITS_GPM = 'GPM'
OPTION_UNITS_MGD = 'MGD'
OPTION_UNITS_IMGD = 'IMGD'
OPTION_UNITS_AFD = 'AFD'
OPTION_UNITS_LPS = 'LPS'
OPTION_UNITS_LPM = 'LPM'
OPTION_UNITS_MLD = 'MLD'
OPTION_UNITS_CMH = 'CMH'
OPTION_UNITS_CMD = 'CMD'
OPTION_PRESSURE_PSI = 'PSI'
OPTION_PRESSURE_KPA = 'KPA'
OPTION_PRESSURE_M = 'M'
OPTION_HEADLOSS_HW = 'H-W'
OPTION_HEADLOSS_DW = 'D-W'
OPTION_HEADLOSS_CM = 'C-M'
OPTION_UNBALANCED_STOP = 'STOP'
OPTION_UNBALANCED_CONTINUE = 'CONTINUE'
OPTION_DEMAND_MODEL_DDA = 'DDA'
OPTION_DEMAND_MODEL_PDA = 'PDA'
OPTION_QUALITY_NONE = 'NONE'
OPTION_QUALITY_CHEMICAL = 'CHEMICAL'
OPTION_QUALITY_AGE = 'AGE'
OPTION_QUALITY_TRACE = 'TRACE'
def get_option_schema(name: str) -> dict[str, dict[str, Any]]:
return { 'UNITS' : element_schema,
'PRESSURE' : element_schema,
'HEADLOSS' : element_schema,
'QUALITY' : element_schema,
'UNBALANCED' : element_schema,
'PATTERN' : element_schema,
'DEMAND MODEL' : element_schema,
'DEMAND MULTIPLIER' : element_schema,
'EMITTER EXPONENT' : element_schema,
'VISCOSITY' : element_schema,
'DIFFUSIVITY' : element_schema,
'SPECIFIC GRAVITY' : element_schema,
'TRIALS' : element_schema,
'ACCURACY' : element_schema,
'HEADERROR' : element_schema,
'FLOWCHANGE' : element_schema,
'MINIMUM PRESSURE' : element_schema,
'REQUIRED PRESSURE' : element_schema,
'PRESSURE EXPONENT' : element_schema,
'TOLERANCE' : element_schema,
'HTOL' : element_schema,
'QTOL' : element_schema,
'RQTOL' : element_schema,
'CHECKFREQ' : element_schema,
'MAXCHECK' : element_schema,
'DAMPLIMIT' : element_schema }
def get_option(name: str) -> dict[str, Any]:
ts = read_all(name, f"select * from options")
d = {}
for e in ts:
d[e['key']] = str(e['value'])
return d
def set_option_cmd(name: str, cs: ChangeSet) -> DbChangeSet:
raw_old = get_option(name)
old = {}
new = {}
new_dict = cs.operations[0]
schema = get_option_schema(name)
for key in schema.keys():
if key in new_dict:
old[key] = str(raw_old[key])
new[key] = str(new_dict[key])
redo_cs = g_update_prefix | { 'type' : 'option' }
redo_sql = ''
for key, value in new.items():
if redo_sql != '':
redo_sql += '\n'
redo_sql += f"update options set value = '{value}' where key = '{key}';"
redo_cs |= { key: value }
undo_cs = g_update_prefix | { 'type' : 'option' }
undo_sql = ''
for key, value in old.items():
if undo_sql != '':
undo_sql += '\n'
undo_sql += f"update options set value = '{value}' where key = '{key}';"
undo_cs |= { key: value }
return DbChangeSet(redo_sql, undo_sql, [redo_cs], [undo_cs])
def set_option_only(name: str, cs: ChangeSet) -> ChangeSet:
v2_cmd = set_option_cmd(name, cs)
return execute_command(name, v2_cmd)
OPTION_V3_FLOW_UNITS_CFS = OPTION_UNITS_CFS
OPTION_V3_FLOW_UNITS_GPM = OPTION_UNITS_GPM
OPTION_V3_FLOW_UNITS_MGD = OPTION_UNITS_MGD
OPTION_V3_FLOW_UNITS_IMGD = OPTION_UNITS_IMGD
OPTION_V3_FLOW_UNITS_AFD = OPTION_UNITS_AFD
OPTION_V3_FLOW_UNITS_LPS = OPTION_UNITS_LPS
OPTION_V3_FLOW_UNITS_LPM = OPTION_UNITS_LPM
OPTION_V3_FLOW_UNITS_MLD = OPTION_UNITS_MLD
OPTION_V3_FLOW_UNITS_CMH = OPTION_UNITS_CMH
OPTION_V3_FLOW_UNITS_CMD = OPTION_UNITS_CMD
OPTION_V3_PRESSURE_UNITS_PSI = OPTION_PRESSURE_PSI
OPTION_V3_PRESSURE_UNITS_KPA = OPTION_PRESSURE_KPA
OPTION_V3_PRESSURE_UNITS_M = OPTION_PRESSURE_M
OPTION_V3_HEADLOSS_MODEL_HW = OPTION_HEADLOSS_HW
OPTION_V3_HEADLOSS_MODEL_DW = OPTION_HEADLOSS_DW
OPTION_V3_HEADLOSS_MODEL_CM = OPTION_HEADLOSS_CM
OPTION_V3_STEP_SIZING_FULL = 'FULL'
OPTION_V3_STEP_SIZING_RELAXATION = 'RELAXATION'
OPTION_V3_STEP_SIZING_LINESEARCH = 'LINESEARCH'
OPTION_V3_IF_UNBALANCED_STOP = OPTION_UNBALANCED_STOP
OPTION_V3_IF_UNBALANCED_CONTINUE = OPTION_UNBALANCED_CONTINUE
OPTION_V3_DEMAND_MODEL_FIXED = 'FIXED'
OPTION_V3_DEMAND_MODEL_CONSTRAINED = 'CONSTRAINED'
OPTION_V3_DEMAND_MODEL_POWER = 'POWER'
OPTION_V3_DEMAND_MODEL_LOGISTIC = 'LOGISTIC'
OPTION_V3_LEAKAGE_MODEL_NONE = 'NONE'
OPTION_V3_LEAKAGE_MODEL_POWER = 'POWER'
OPTION_V3_LEAKAGE_MODEL_FAVAD = 'FAVAD'
OPTION_V3_QUALITY_MODEL_NONE = OPTION_QUALITY_NONE
OPTION_V3_QUALITY_MODEL_CHEMICAL = OPTION_QUALITY_CHEMICAL
OPTION_V3_QUALITY_MODEL_AGE = OPTION_QUALITY_AGE
OPTION_V3_QUALITY_MODEL_TRACE = OPTION_QUALITY_TRACE
OPTION_V3_QUALITY_UNITS_HRS = 'HRS'
OPTION_V3_QUALITY_UNITS_PCNT = 'PCNT'
OPTION_V3_QUALITY_UNITS_MGL = 'MG/L'
OPTION_V3_QUALITY_UNITS_UGL = 'UG/L'
def get_option_v3_schema(name: str) -> dict[str, dict[str, Any]]:
return { 'FLOW_UNITS' : element_schema,
'PRESSURE_UNITS' : element_schema,
'HEADLOSS_MODEL' : element_schema,
'SPECIFIC_GRAVITY' : element_schema,
'SPECIFIC_VISCOSITY' : element_schema,
'MAXIMUM_TRIALS' : element_schema,
'HEAD_TOLERANCE' : element_schema,
'FLOW_TOLERANCE' : element_schema,
'FLOW_CHANGE_LIMIT' : element_schema,
'RELATIVE_ACCURACY' : element_schema,
'TIME_WEIGHT' : element_schema,
'STEP_SIZING' : element_schema,
'IF_UNBALANCED' : element_schema,
'DEMAND_MODEL' : element_schema,
'DEMAND_PATTERN' : element_schema,
'DEMAND_MULTIPLIER' : element_schema,
'MINIMUM_PRESSURE' : element_schema,
'SERVICE_PRESSURE' : element_schema,
'PRESSURE_EXPONENT' : element_schema,
'LEAKAGE_MODEL' : element_schema,
'LEAKAGE_COEFF1' : element_schema,
'LEAKAGE_COEFF2' : element_schema,
'EMITTER_EXPONENT' : element_schema,
'QUALITY_MODEL' : element_schema,
'QUALITY_NAME' : element_schema,
'QUALITY_UNITS' : element_schema,
'TRACE_NODE' : element_schema,
'SPECIFIC_DIFFUSIVITY' : element_schema,
'QUALITY_TOLERANCE' : element_schema }
def get_option_v3(name: str) -> dict[str, Any]:
ts = read_all(name, f"select * from options_v3")
d = {}
for e in ts:
d[e['key']] = str(e['value'])
return d
def set_option_v3_cmd(name: str, cs: ChangeSet) -> DbChangeSet:
raw_old = get_option_v3(name)
old = {}
new = {}
new_dict = cs.operations[0]
schema = get_option_v3_schema(name)
for key in schema.keys():
if key in new_dict:
old[key] = str(raw_old[key])
new[key] = str(new_dict[key])
redo_cs = g_update_prefix | { 'type' : 'option_v3' }
redo_sql = ''
for key, value in new.items():
if redo_sql != '':
redo_sql += '\n'
redo_sql += f"update options_v3 set value = '{value}' where key = '{key}';"
redo_cs |= { key: value }
undo_cs = g_update_prefix | { 'type' : 'option_v3' }
undo_sql = ''
for key, value in old.items():
if undo_sql != '':
undo_sql += '\n'
undo_sql += f"update options_v3 set value = '{value}' where key = '{key}';"
undo_cs |= { key: value }
return DbChangeSet(redo_sql, undo_sql, [redo_cs], [undo_cs])
def set_option_v3_only(name: str, cs: ChangeSet) -> ChangeSet:
v3_cmd = set_option_v3_cmd(name, cs)
return execute_command(name, v3_cmd)
_key_map_23 = {
'UNITS' : 'FLOW_UNITS',
'PRESSURE' : 'PRESSURE_UNITS',
'HEADLOSS' : 'HEADLOSS_MODEL',
'QUALITY' : 'QUALITY_MODEL',
'UNBALANCED' : 'IF_UNBALANCED',
'PATTERN' : 'DEMAND_PATTERN',
'DEMAND MODEL' : 'DEMAND_MODEL',
'DEMAND MULTIPLIER' : 'DEMAND_MULTIPLIER',
'EMITTER EXPONENT' : 'EMITTER_EXPONENT',
'VISCOSITY' : 'SPECIFIC_VISCOSITY',
'DIFFUSIVITY' : 'SPECIFIC_DIFFUSIVITY',
'SPECIFIC GRAVITY' : 'SPECIFIC_GRAVITY',
'TRIALS' : 'MAXIMUM_TRIALS',
'ACCURACY' : 'RELATIVE_ACCURACY',
#'HEADERROR' : '',
'FLOWCHANGE' : 'FLOW_CHANGE_LIMIT',
'MINIMUM PRESSURE' : 'MINIMUM_PRESSURE',
'REQUIRED PRESSURE' : 'SERVICE_PRESSURE',
'PRESSURE EXPONENT' : 'PRESSURE_EXPONENT',
'TOLERANCE' : 'QUALITY_TOLERANCE',
'HTOL' : 'HEAD_TOLERANCE',
'QTOL' : 'FLOW_TOLERANCE',
#'RQTOL' : '',
#'CHECKFREQ' : '',
#'MAXCHECK' : '',
#'DAMPLIMIT' : '',
}
_key_map_32 = {
'FLOW_UNITS' : 'UNITS',
'PRESSURE_UNITS' : 'PRESSURE',
'HEADLOSS_MODEL' : 'HEADLOSS',
'SPECIFIC_GRAVITY' : 'SPECIFIC GRAVITY',
'SPECIFIC_VISCOSITY' : 'VISCOSITY',
'MAXIMUM_TRIALS' : 'TRIALS',
'HEAD_TOLERANCE' : 'HTOL',
'FLOW_TOLERANCE' : 'QTOL',
'FLOW_CHANGE_LIMIT' : 'FLOWCHANGE',
'RELATIVE_ACCURACY' : 'ACCURACY',
#'TIME_WEIGHT' : '',
#'STEP_SIZING' : '',
'IF_UNBALANCED' : 'UNBALANCED',
'DEMAND_MODEL' : 'DEMAND MODEL',
'DEMAND_PATTERN' : 'PATTERN',
'DEMAND_MULTIPLIER' : 'DEMAND MULTIPLIER',
'MINIMUM_PRESSURE' : 'MINIMUM PRESSURE',
'SERVICE_PRESSURE' : 'REQUIRED PRESSURE',
'PRESSURE_EXPONENT' : 'PRESSURE EXPONENT',
#'LEAKAGE_MODEL' : '',
#'LEAKAGE_COEFF1' : '',
#'LEAKAGE_COEFF2' : '',
'EMITTER_EXPONENT' : 'EMITTER EXPONENT',
'QUALITY_MODEL' : 'QUALITY',
#'QUALITY_NAME' : '',
#'QUALITY_UNITS' : '',
#'TRACE_NODE' : '',
'SPECIFIC_DIFFUSIVITY' : 'DIFFUSIVITY',
'QUALITY_TOLERANCE' : 'TOLERANCE'
}
def generate_v2(cs: ChangeSet) -> ChangeSet:
op = cs.operations[0]
if op['type'] == 'option':
return cs
map = _key_map_32
cs_v2 = {}
for key in op:
if key == 'operation' or key == 'type':
continue
if key in map.keys():
if key != 'QUALITY_MODEL' and key != 'DEMAND_MODEL':
cs_v2 |= { map[key] : op[key] }
elif key == 'QUALITY_MODEL':
if str(op[key]).upper() == OPTION_QUALITY_TRACE and 'TRACE_NODE' in op.keys():
cs_v2 |= { map[key] : f"{OPTION_QUALITY_TRACE} {op['TRACE_NODE']}" }
else:
cs_v2 |= { map[key] : str(op[key]).upper() }
elif key == 'DEMAND_MODEL':
if op[key] == OPTION_V3_DEMAND_MODEL_FIXED:
cs_v2 |= { map[key] : OPTION_DEMAND_MODEL_DDA }
else:
cs_v2 |= { map[key] : OPTION_DEMAND_MODEL_PDA }
if len(cs_v2) > 0:
cs_v2 |= g_update_prefix | { 'type' : 'option' }
return ChangeSet(cs_v2)
return ChangeSet()
def generate_v3(cs: ChangeSet) -> ChangeSet:
op = cs.operations[0]
if op['type'] == 'option_v3':
return cs
map = _key_map_23
cs_v3 = {}
for key in op:
if key == 'operation' or key == 'type':
continue
if key in map.keys():
if key != 'QUALITY' and key != 'DEMAND MODEL':
cs_v3 |= { map[key] : op[key] }
elif key == 'QUALITY':
tokens = str(op[key]).split()
if len(tokens) >= 1:
cs_v3 |= { map[key] : tokens[0].upper() }
if tokens[0].upper() == OPTION_QUALITY_TRACE and len(tokens) >= 2:
cs_v3 |= { 'TRACE_NODE' : tokens[1] }
elif key == 'DEMAND MODEL':
if op[key] == OPTION_DEMAND_MODEL_DDA:
cs_v3 |= { map[key] : OPTION_V3_DEMAND_MODEL_FIXED }
else:
cs_v3 |= { map[key] : OPTION_V3_DEMAND_MODEL_POWER }
if len(cs_v3) > 0:
cs_v3 |= g_update_prefix | { 'type' : 'option_v3' }
return ChangeSet(cs_v3)
return ChangeSet()

69
api/s23_options_v3.py Normal file
View File

@@ -0,0 +1,69 @@
from .database import *
from .s23_options_util import get_option_schema, get_option_v3_schema, generate_v2, generate_v3
from .batch_cmd import execute_batch_command
def set_option_v3(name: str, cs: ChangeSet) -> ChangeSet:
cs.operations[0]['operation'] = API_UPDATE
cs.operations[0]['type'] = 'option_v3'
new_cs = cs
new_cs.merge(generate_v2(cs))
return execute_batch_command(name, new_cs)
def _parse_v2(v2_lines: list[str]) -> dict[str, str]:
cs_v2 = g_update_prefix | { 'type' : 'option' }
for s in v2_lines:
tokens = s.split()
if tokens[0].upper() == 'PATTERN': # can not upper id
cs_v2 |= { 'PATTERN' : tokens[1] }
elif tokens[0].upper() == 'QUALITY': # can not upper trace node
value = tokens[1]
if len(tokens) > 2:
value += f' {tokens[2]}'
cs_v2 |= { 'QUALITY' : value }
else:
line = s.upper().strip()
for key in get_option_schema('').keys():
if line.startswith(key):
value = line.removeprefix(key).strip()
cs_v2 |= { key : value }
return cs_v2
def inp_in_option_v3(section: list[str]) -> ChangeSet:
if len(section) <= 0:
return ChangeSet()
cs_v3 = g_update_prefix | { 'type' : 'option_v3' }
v2_lines = []
for s in section:
if s.startswith(';'):
continue
tokens = s.strip().split()
key = tokens[0]
if key in get_option_v3_schema('').keys():
value = tokens[1] if len(tokens) >= 2 else ''
cs_v3 |= { key : value }
else:
v2_lines.append(s.strip())
# unlikely...
cs_v2 = _parse_v2(v2_lines)
result = ChangeSet(cs_v3)
result.merge(generate_v3(ChangeSet(cs_v2)))
result.merge(generate_v2(result))
return result
def inp_out_option_v3(name: str) -> list[str]:
lines = []
objs = read_all(name, f"select * from options_v3")
for obj in objs:
key = obj['key']
value = obj['value']
if str(value).strip() != '':
lines.append(f'{key} {value}')
return lines

View File

@@ -12,7 +12,7 @@ def get_node_coord(name: str, id: str) -> dict[str, float]:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][EPA3][IN][OUT]
# id x y
#--------------------------------------------------------------
# exception ! need merge to node change set !

View File

@@ -76,8 +76,9 @@ def delete_vertex(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][IN][OUT]
# id x y
# [EPA3][NOT SUPPORT]
#--------------------------------------------------------------
def inp_in_vertex(section: list[str]) -> ChangeSet:
vertices: dict[str, list[dict[str, float]]] = {}

View File

@@ -8,8 +8,6 @@ def get_junction_schema(name: str) -> dict[str, dict[str, Any]]:
'x' : {'type': 'float' , 'optional': False , 'readonly': False},
'y' : {'type': 'float' , 'optional': False , 'readonly': False},
'elevation' : {'type': 'float' , 'optional': False , 'readonly': False},
'demand' : {'type': 'float' , 'optional': True , 'readonly': False},
'pattern' : {'type': 'str' , 'optional': True , 'readonly': False},
'links' : {'type': 'str_list' , 'optional': False , 'readonly': True } }
@@ -21,8 +19,6 @@ def get_junction(name: str, id: str) -> dict[str, Any]:
d['x'] = float(xy['x'])
d['y'] = float(xy['y'])
d['elevation'] = float(j['elevation'])
d['demand'] = float(j['demand']) if j['demand'] != None else None
d['pattern'] = str(j['pattern']) if j['pattern'] != None else None
d['links'] = get_node_links(name, id)
return d
@@ -34,18 +30,14 @@ class Junction(object):
self.x = float(input['x'])
self.y = float(input['y'])
self.elevation = float(input['elevation'])
self.demand = float(input['demand']) if 'demand' in input and input['demand'] != None else None
self.pattern = str(input['pattern']) if 'pattern' in input and input['pattern'] != None else None
self.f_type = f"'{self.type}'"
self.f_id = f"'{self.id}'"
self.f_coord = f"'({self.x}, {self.y})'"
self.f_elevation = self.elevation
self.f_demand = self.demand if self.demand != None else 'null'
self.f_pattern = f"'{self.pattern}'" if self.pattern != None else 'null'
def as_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id, 'x': self.x, 'y': self.y, 'elevation': self.elevation, 'demand': self.demand, 'pattern': self.pattern }
return { 'type': self.type, 'id': self.id, 'x': self.x, 'y': self.y, 'elevation': self.elevation }
def as_id_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id }
@@ -62,11 +54,11 @@ def set_junction_cmd(name: str, cs: ChangeSet) -> DbChangeSet:
raw_new[key] = new_dict[key]
new = Junction(raw_new)
redo_sql = f"update junctions set elevation = {new.f_elevation}, demand = {new.f_demand}, pattern = {new.f_pattern} where id = {new.f_id};"
redo_sql = f"update junctions set elevation = {new.f_elevation} where id = {new.f_id};"
redo_sql += f"\nupdate coordinates set coord = {new.f_coord} where node = {new.f_id};"
undo_sql = f"update coordinates set coord = {old.f_coord} where node = {old.f_id};"
undo_sql += f"\nupdate junctions set elevation = {old.f_elevation}, demand = {old.f_demand}, pattern = {old.f_pattern} where id = {old.f_id};"
undo_sql += f"\nupdate junctions set elevation = {old.f_elevation} where id = {old.f_id};"
redo_cs = g_update_prefix | new.as_dict()
undo_cs = g_update_prefix | old.as_dict()
@@ -82,7 +74,7 @@ def add_junction_cmd(name: str, cs: ChangeSet) -> DbChangeSet:
new = Junction(cs.operations[0])
redo_sql = f"insert into _node (id, type) values ({new.f_id}, {new.f_type});"
redo_sql += f"\ninsert into junctions (id, elevation, demand, pattern) values ({new.f_id}, {new.f_elevation}, {new.f_demand}, {new.f_pattern});"
redo_sql += f"\ninsert into junctions (id, elevation) values ({new.f_id}, {new.f_elevation});"
redo_sql += f"\ninsert into coordinates (node, coord) values ({new.f_id}, {new.f_coord});"
undo_sql = f"delete from coordinates where node = {new.f_id};"
@@ -107,7 +99,7 @@ def delete_junction_cmd(name: str, cs: ChangeSet) -> DbChangeSet:
redo_sql += f"\ndelete from _node where id = {old.f_id};"
undo_sql = f"insert into _node (id, type) values ({old.f_id}, {old.f_type});"
undo_sql += f"\ninsert into junctions (id, elevation, demand, pattern) values ({old.f_id}, {old.f_elevation}, {old.f_demand}, {old.f_pattern});"
undo_sql += f"\ninsert into junctions (id, elevation) values ({old.f_id}, {old.f_elevation});"
undo_sql += f"\ninsert into coordinates (node, coord) values ({old.f_id}, {old.f_coord});"
redo_cs = g_delete_prefix | old.as_id_dict()
@@ -121,12 +113,18 @@ def delete_junction(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2]
# [EPA2]
# [IN]
# id elev. (demand) (demand pattern) ;desc
# [OUT]
# id elev. ;desc
#--------------------------------------------------------------
# [EPA3]
# [IN]
# id elev. (demand) (demand pattern)
# [OUT]
# id elev. * * minpressure fullpressure
#--------------------------------------------------------------
class InpJunction:
def __init__(self, line: str) -> None:
tokens = line.split()
@@ -150,6 +148,7 @@ def inp_in_junction(section: list[str]) -> ChangeSet:
continue
obj = InpJunction(s)
cs.append(g_add_prefix | {'type': 'junction', 'id': obj.id, 'elevation': obj.elevation, 'demand': obj.demand, 'pattern': obj.pattern})
cs.append(g_update_prefix | { 'type': 'demand', 'junction': obj.id, 'demands': [{'demand': obj.demand, 'pattern': obj.pattern, 'category': None}] })
return cs
@@ -159,18 +158,6 @@ def inp_out_junction(name: str) -> list[str]:
for obj in objs:
id = obj['id']
elev = obj['elevation']
demand = obj['demand'] if obj['demand'] != None else ''
pattern = obj['pattern'] if obj['pattern'] != None else ''
desc = ';'
lines.append(f'{id} {elev} {demand} {pattern} {desc}')
lines.append(f'{id} {elev} {desc}')
return lines
def unset_junction_by_pattern(name: str, pattern: str) -> ChangeSet:
cs = ChangeSet()
rows = read_all(name, f"select id from junctions where pattern = '{pattern}'")
for row in rows:
cs.append(g_update_prefix | {'type': 'junction', 'id': row['id'], 'pattern': None})
return cs

View File

@@ -117,7 +117,7 @@ def delete_reservoir(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][EPA3][IN][OUT]
# id elev (pattern) ;desc
#--------------------------------------------------------------
class InpReservoir:

View File

@@ -145,10 +145,16 @@ def delete_tank(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2]
# [IN]
# id elev initlevel minlevel maxlevel diam (minvol vcurve overflow) ;desc
# xxx
# * YES
# [OUT]
# id elev initlevel minlevel maxlevel diam minvol (vcurve overflow) ;desc
#--------------------------------------------------------------
# [EPA3]
# id elev initlevel minlevel maxlevel diam minvol (vcurve)
#--------------------------------------------------------------
class InpTank:
def __init__(self, line: str) -> None:
@@ -164,7 +170,7 @@ class InpTank:
self.min_level = float(tokens[3])
self.max_level = float(tokens[4])
self.diameter = float(tokens[5])
self.min_vol = float(tokens[6])
self.min_vol = float(tokens[6]) if num_without_desc >= 7 else 0.0
self.vol_curve = str(tokens[7]) if num_without_desc >= 8 and tokens[7] != '*' else None
self.overflow = str(tokens[8].upper()) if num_without_desc >= 9 else None
self.desc = str(tokens[-1]) if has_desc else None

View File

@@ -124,7 +124,7 @@ def delete_pipe(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2]
# [EPA2][EPA3]
# [IN]
# id node1 node2 length diam rcoeff (lcoeff status) ;desc
# [OUT]

View File

@@ -115,7 +115,7 @@ def delete_pump(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][EPA3][IN][OUT]
# id node1 node2 KEYWORD value {KEYWORD value ...} ;desc
# where KEYWORD = [POWER,HEAD,PATTERN,SPEED]
#--------------------------------------------------------------

View File

@@ -16,7 +16,7 @@ def get_valve_schema(name: str) -> dict[str, dict[str, Any]]:
'node2' : {'type': 'str' , 'optional': False , 'readonly': False},
'diameter' : {'type': 'float' , 'optional': False , 'readonly': False},
'v_type' : {'type': 'str' , 'optional': False , 'readonly': False},
'setting' : {'type': 'float' , 'optional': False , 'readonly': False},
'setting' : {'type': 'str' , 'optional': False , 'readonly': False},
'minor_loss' : {'type': 'float' , 'optional': False , 'readonly': False} }
@@ -28,7 +28,7 @@ def get_valve(name: str, id: str) -> dict[str, Any]:
d['node2'] = str(p['node2'])
d['diameter'] = float(p['diameter'])
d['v_type'] = str(p['type'])
d['setting'] = float(p['setting'])
d['setting'] = str(p['setting'])
d['minor_loss'] = float(p['minor_loss'])
return d
@@ -41,7 +41,7 @@ class Valve(object):
self.node2 = str(input['node2'])
self.diameter = float(input['diameter'])
self.v_type = str(input['v_type'])
self.setting = float(input['setting'])
self.setting = str(input['setting'])
self.minor_loss = float(input['minor_loss'])
self.f_type = f"'{self.type}'"
@@ -50,7 +50,7 @@ class Valve(object):
self.f_node2 = f"'{self.node2}'"
self.f_diameter = self.diameter
self.f_v_type = f"'{self.v_type}'"
self.f_setting = self.setting
self.f_setting = f"'{self.setting}'"
self.f_minor_loss = self.minor_loss
def as_dict(self) -> dict[str, Any]:
@@ -123,10 +123,10 @@ def delete_valve(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][EPA3][IN][OUT]
# id node1 node2 diam type setting (lcoeff lcurve)
# for GPV, setting is string = head curve id
# for PCV, add loss curve if present
# [NOT SUPPORT] for PCV, add loss curve if present
#--------------------------------------------------------------
class InpValve:
def __init__(self, line: str) -> None:
@@ -141,7 +141,7 @@ class InpValve:
self.node2 = str(tokens[2])
self.diameter = float(tokens[3])
self.v_type = str(tokens[4].upper())
self.setting = float(tokens[5])
self.setting = str(tokens[5])
self.minor_loss = float(tokens[6])
self.desc = str(tokens[-1]) if has_desc else None

View File

@@ -1,5 +1,4 @@
from .database import *
from .s2_junctions import *
def get_demand_schema(name: str) -> dict[str, dict[str, Any]]:
return { 'junction' : {'type': 'str' , 'optional': False , 'readonly': True },
@@ -40,7 +39,7 @@ def set_demand_cmd(name: str, cs: ChangeSet) -> DbChangeSet:
redo_sql += f"\ninsert into demands (junction, demand, pattern, category) values ({f_junction}, {f_demand}, {f_pattern}, {f_category});"
new['demands'].append({ 'demand': demand, 'pattern': pattern, 'category': category })
_undo_sql = f"delete from demands where junction = {f_junction};"
undo_sql = f"delete from demands where junction = {f_junction};"
for r in old['demands']:
demand = float(r['demand'])
pattern = str(r['pattern']) if 'pattern' in r and r['pattern'] != None else None
@@ -48,37 +47,12 @@ def set_demand_cmd(name: str, cs: ChangeSet) -> DbChangeSet:
f_demand = demand
f_pattern = f"'{pattern}'" if pattern != None else 'null'
f_category = f"'{category}'" if category != None else 'null'
_undo_sql += f"\ninsert into demands (junction, demand, pattern, category) values ({f_junction}, {f_demand}, {f_pattern}, {f_category});"
undo_sql += f"\ninsert into demands (junction, demand, pattern, category) values ({f_junction}, {f_demand}, {f_pattern}, {f_category});"
redo_cs = []
redo_cs.append(g_update_prefix | { 'type': 'demand' } | new)
undo_cs = []
undo_cs.append(g_update_prefix | { 'type': 'demand' } | old)
redo_cs = g_update_prefix | { 'type': 'demand' } | new
undo_cs = g_update_prefix | { 'type': 'demand' } | old
cmd = None
if len(cs.operations[0]['demands']) > 0:
r = cs.operations[0]['demands'][0]
demand = float(r['demand'])
pattern = str(r['pattern']) if 'pattern' in r and r['pattern'] != None else None
cmd = set_junction_cmd(name, ChangeSet({'id': junction, 'demand': demand, 'pattern': pattern}))
else:
cmd = set_junction_cmd(name, ChangeSet({'id': junction, 'demand': None, 'pattern': None}))
undo_sql = ''
if cmd != None:
redo_sql += '\n'
redo_sql += cmd.redo_sql
undo_sql += cmd.undo_sql
undo_sql += '\n'
undo_sql += _undo_sql
redo_cs += cmd.redo_cs
undo_cs += cmd.undo_cs
undo_cs.reverse()
return DbChangeSet(redo_sql, undo_sql, redo_cs, undo_cs)
return DbChangeSet(redo_sql, undo_sql, [redo_cs], [undo_cs])
def set_demand(name: str, cs: ChangeSet) -> ChangeSet:
@@ -86,7 +60,7 @@ def set_demand(name: str, cs: ChangeSet) -> ChangeSet:
#--------------------------------------------------------------
# [EPANET2][IN][OUT]
# [EPA2][EPA3][IN][OUT]
# node base_demand (pattern) ;category
#--------------------------------------------------------------
class InpDemand:
@@ -123,29 +97,6 @@ def inp_in_demand(section: list[str]) -> ChangeSet:
return cs
def fill_demand(junction_cs : ChangeSet, demand_cs : ChangeSet) -> ChangeSet:
cs = ChangeSet()
for j_cs in junction_cs.operations:
if 'demand' not in j_cs:
continue
in_demand = False
for d_cs in demand_cs.operations:
if j_cs['id'] == d_cs['junction']:
in_demand = True
break
if not in_demand:
obj_cs : dict[str, Any] = g_update_prefix | {'type': 'demand', 'junction' : j_cs['id'], 'demands' : []}
j_demand = j_cs['demand']
j_pattern = j_cs['pattern'] if 'pattern' in j_cs else None
obj_cs['demands'].append({'demand': j_demand, 'pattern' : j_pattern, 'category': None})
cs.append(obj_cs)
return cs
def inp_out_demand(name: str) -> list[str]:
lines = []
objs = read_all(name, f"select * from demands order by _order")
@@ -168,11 +119,11 @@ def delete_demand_by_junction(name: str, junction: str) -> ChangeSet:
def unset_demand_by_pattern(name: str, pattern: str) -> ChangeSet:
cs = ChangeSet()
rows = read_all(name, f"select distinct id from junctions where pattern = '{pattern}'")
rows = read_all(name, f"select distinct junction from demands where pattern = '{pattern}'")
for row in rows:
ds = get_demand(name, row['id'])
ds = get_demand(name, row['junction'])
for d in ds['demands']:
d['pattern'] = None
cs.append(g_update_prefix | {'type': 'demand', 'junction': row['id'], 'demands': ds['demands']})
cs.append(g_update_prefix | {'type': 'demand', 'junction': row['junction'], 'demands': ds['demands']})
return cs

View File

@@ -24,6 +24,7 @@ s20_mixing = 'mixing'
s21_time = 'time'
s22_report = 'report'
s23_option = 'option'
s23_option_v3 = 'option_v3'
s24_coordinate = 'coordinate'
s25_vertex = 'vertex'
s26_label = 'label'

View File

@@ -4,8 +4,4 @@ create table junctions
(
id varchar(32) primary key references _node(id)
, elevation numeric not null
, demand numeric
, pattern varchar(32) references _pattern(id)
);
-- unset pattern when delete pattern

View File

@@ -12,7 +12,7 @@ insert into report (key, value) values
, ('STATUS', 'FULL')
, ('SUMMARY', 'YES')
, ('MESSAGES', 'YES')
, ('ENERY', 'YES')
, ('ENERGY', 'YES')
, ('NODES', 'ALL')
, ('LINKS', 'ALL')
;

View File

@@ -10,27 +10,68 @@ create table options
insert into options (key, value) values
('UNITS', 'GPM')
, ('PRESSURE', 'PSI')
, ('HEADLOSS', 'H-W')
--, ('HYDRAULICS', '')
, ('QUALITY', 'NONE')
, ('UNBALANCED', 'STOP')
, ('PATTERN', '1')
, ('DEMAND MODEL', 'DDA')
, ('DEMAND MULTIPLIER', '1.0')
, ('EMITTER EXPONENT', '0.5')
, ('VISCOSITY', '1.0')
, ('DIFFUSIVITY', '1.0')
, ('SPECIFIC GRAVITY', '1.0')
, ('TRIALS', '40')
, ('ACCURACY', '0.001')
, ('FLOWCHANGE', '0')
, ('HEADERROR', '0')
, ('HEADERROR', '0.0')
, ('FLOWCHANGE', '0.0')
, ('MINIMUM PRESSURE', '0.0')
, ('REQUIRED PRESSURE', '0.1')
, ('PRESSURE EXPONENT', '0.5')
, ('TOLERANCE', '0.01')
, ('HTOL', '0.0005')
, ('QTOL', '0.0001')
, ('RQTOL', '0.0000001')
, ('CHECKFREQ', '2')
, ('MAXCHECK', '10')
, ('DAMPLIMIT', '0')
, ('UNBALANCED', 'STOP')
, ('DEMAND MODEL', 'DDA')
, ('MINIMUM PRESSURE', '0')
, ('REQUIRED PRESSURE', '0.1')
, ('PRESSURE EXPONENT', '0.5')
, ('PATTERN', '1')
, ('DEMAND MULTIPLIER', '1.0')
, ('EMITTER EXPONENT', '0.5')
, ('QUALITY', 'NONE')
, ('DIFFUSIVITY', '1.0')
, ('TOLERANCE', '0.01')
--, ('MAP', '')
;
create table options_v3
(
key text primary key
, value text not null
);
insert into options_v3 (key, value) values
('FLOW_UNITS', 'GPM')
, ('PRESSURE_UNITS', 'PSI')
, ('HEADLOSS_MODEL', 'H-W')
, ('SPECIFIC_GRAVITY', '1.0')
, ('SPECIFIC_VISCOSITY', '1.0')
, ('MAXIMUM_TRIALS', '40')
, ('HEAD_TOLERANCE', '0.0005')
, ('FLOW_TOLERANCE', '0.0001')
, ('FLOW_CHANGE_LIMIT', '0.0')
, ('RELATIVE_ACCURACY', '0.001')
, ('TIME_WEIGHT', '0.0')
, ('STEP_SIZING', 'FULL')
, ('IF_UNBALANCED', 'STOP')
, ('DEMAND_MODEL', 'FIXED')
, ('DEMAND_PATTERN', '1')
, ('DEMAND_MULTIPLIER', '1.0')
, ('MINIMUM_PRESSURE', '0.0')
, ('SERVICE_PRESSURE', '0.1')
, ('PRESSURE_EXPONENT', '0.5')
, ('LEAKAGE_MODEL', 'NONE')
, ('LEAKAGE_COEFF1', '0.0')
, ('LEAKAGE_COEFF2', '0.0')
, ('EMITTER_EXPONENT', '0.5')
, ('QUALITY_MODEL', 'NONE')
, ('QUALITY_NAME', 'CHEMICAL')
, ('QUALITY_UNITS', 'MG/L')
, ('TRACE_NODE', '')
, ('SPECIFIC_DIFFUSIVITY', '1.0')
, ('QUALITY_TOLERANCE', '0.01')
;

View File

@@ -9,7 +9,7 @@ create table valves
, node2 varchar(32) references _node(id) not null
, diameter numeric not null
, type valves_type not null
, setting numeric not null
, setting text not null
, minor_loss numeric not null
);

View File

@@ -1,3 +1,5 @@
-- [OPTIONS]
drop table if exists options_v3;
drop table if exists options;

File diff suppressed because it is too large Load Diff

View File

@@ -7,7 +7,7 @@ import epanet
# ChangeSet
############################################################
API_ADD = api.API_ADD
API_ADD = api.API_ADD
API_UPDATE = api.API_UPDATE
API_DELETE = api.API_DELETE
@@ -18,21 +18,21 @@ ChangeSet = api.ChangeSet
# enum
############################################################
JUNCTION = api.JUNCTION
JUNCTION = api.JUNCTION
RESERVOIR = api.RESERVOIR
TANK = api.TANK
PIPE = api.PIPE
PUMP = api.PUMP
VALVE = api.VALVE
PATTERN = api.PATTERN
CURVE = api.CURVE
TANK = api.TANK
PIPE = api.PIPE
PUMP = api.PUMP
VALVE = api.VALVE
PATTERN = api.PATTERN
CURVE = api.CURVE
OVERFLOW_YES = api.OVERFLOW_YES
OVERFLOW_NO = api.OVERFLOW_NO
OVERFLOW_NO = api.OVERFLOW_NO
PIPE_STATUS_OPEN = api.PIPE_STATUS_OPEN
PIPE_STATUS_OPEN = api.PIPE_STATUS_OPEN
PIPE_STATUS_CLOSED = api.PIPE_STATUS_CLOSED
PIPE_STATUS_CV = api.PIPE_STATUS_CV
PIPE_STATUS_CV = api.PIPE_STATUS_CV
VALVES_TYPE_PRV = api.VALVES_TYPE_PRV
VALVES_TYPE_PSV = api.VALVES_TYPE_PSV
@@ -44,64 +44,113 @@ VALVES_TYPE_GPV = api.VALVES_TYPE_GPV
TAG_TYPE_NODE = api.TAG_TYPE_NODE
TAG_TYPE_LINK = api.TAG_TYPE_LINK
LINK_STATUS_OPEN = api.LINK_STATUS_OPEN
LINK_STATUS_OPEN = api.LINK_STATUS_OPEN
LINK_STATUS_CLOSED = api.LINK_STATUS_CLOSED
LINK_STATUS_ACTIVE = api.LINK_STATUS_ACTIVE
CURVE_TYPE_PUMP = api.CURVE_TYPE_PUMP
CURVE_TYPE_PUMP = api.CURVE_TYPE_PUMP
CURVE_TYPE_EFFICIENCY = api.CURVE_TYPE_EFFICIENCY
CURVE_TYPE_VOLUME = api.CURVE_TYPE_VOLUME
CURVE_TYPE_HEADLOSS = api.CURVE_TYPE_HEADLOSS
CURVE_TYPE_VOLUME = api.CURVE_TYPE_VOLUME
CURVE_TYPE_HEADLOSS = api.CURVE_TYPE_HEADLOSS
SOURCE_TYPE_CONCEN = api.SOURCE_TYPE_CONCEN
SOURCE_TYPE_MASS = api.SOURCE_TYPE_MASS
SOURCE_TYPE_CONCEN = api.SOURCE_TYPE_CONCEN
SOURCE_TYPE_MASS = api.SOURCE_TYPE_MASS
SOURCE_TYPE_FLOWPACED = api.SOURCE_TYPE_FLOWPACED
SOURCE_TYPE_SETPOINT = api.SOURCE_TYPE_SETPOINT
SOURCE_TYPE_SETPOINT = api.SOURCE_TYPE_SETPOINT
MIXING_MODEL_MIXED = api.MIXING_MODEL_MIXED
MIXING_MODEL_2COMP = api.MIXING_MODEL_2COMP
MIXING_MODEL_FIFO = api.MIXING_MODEL_FIFO
MIXING_MODEL_LIFO = api.MIXING_MODEL_LIFO
MIXING_MODEL_FIFO = api.MIXING_MODEL_FIFO
MIXING_MODEL_LIFO = api.MIXING_MODEL_LIFO
TIME_STATISTIC_NONE = api.TIME_STATISTIC_NONE
TIME_STATISTIC_NONE = api.TIME_STATISTIC_NONE
TIME_STATISTIC_AVERAGED = api.TIME_STATISTIC_AVERAGED
TIME_STATISTIC_MINIMUM = api.TIME_STATISTIC_MINIMUM
TIME_STATISTIC_MAXIMUM = api.TIME_STATISTIC_MAXIMUM
TIME_STATISTIC_RANGE = api.TIME_STATISTIC_RANGE
TIME_STATISTIC_MINIMUM = api.TIME_STATISTIC_MINIMUM
TIME_STATISTIC_MAXIMUM = api.TIME_STATISTIC_MAXIMUM
TIME_STATISTIC_RANGE = api.TIME_STATISTIC_RANGE
OPTION_UNITS_CFS = api.OPTION_UNITS_CFS
OPTION_UNITS_GPM = api.OPTION_UNITS_GPM
OPTION_UNITS_MGD = api.OPTION_UNITS_MGD
OPTION_UNITS_CFS = api.OPTION_UNITS_CFS
OPTION_UNITS_GPM = api.OPTION_UNITS_GPM
OPTION_UNITS_MGD = api.OPTION_UNITS_MGD
OPTION_UNITS_IMGD = api.OPTION_UNITS_IMGD
OPTION_UNITS_AFD = api.OPTION_UNITS_AFD
OPTION_UNITS_LPS = api.OPTION_UNITS_LPS
OPTION_UNITS_LPM = api.OPTION_UNITS_LPM
OPTION_UNITS_MLD = api.OPTION_UNITS_MLD
OPTION_UNITS_CMH = api.OPTION_UNITS_CMH
OPTION_UNITS_CMD = api.OPTION_UNITS_CMD
OPTION_UNITS_AFD = api.OPTION_UNITS_AFD
OPTION_UNITS_LPS = api.OPTION_UNITS_LPS
OPTION_UNITS_LPM = api.OPTION_UNITS_LPM
OPTION_UNITS_MLD = api.OPTION_UNITS_MLD
OPTION_UNITS_CMH = api.OPTION_UNITS_CMH
OPTION_UNITS_CMD = api.OPTION_UNITS_CMD
OPTION_PRESSURE_PSI = api.OPTION_PRESSURE_PSI
OPTION_PRESSURE_KPA = api.OPTION_PRESSURE_KPA
OPTION_PRESSURE_M = api.OPTION_PRESSURE_M
OPTION_HEADLOSS_HW = api.OPTION_HEADLOSS_HW
OPTION_HEADLOSS_DW = api.OPTION_HEADLOSS_DW
OPTION_HEADLOSS_CM = api.OPTION_HEADLOSS_CM
OPTION_UNBALANCED_STOP = api.OPTION_UNBALANCED_STOP
OPTION_UNBALANCED_STOP = api.OPTION_UNBALANCED_STOP
OPTION_UNBALANCED_CONTINUE = api.OPTION_UNBALANCED_CONTINUE
OPTION_DEMAND_MODEL_DDA = api.OPTION_DEMAND_MODEL_DDA
OPTION_DEMAND_MODEL_PDA = api.OPTION_DEMAND_MODEL_PDA
OPTION_QUALITY_NONE = api.OPTION_QUALITY_NONE
OPTION_QUALITY_NONE = api.OPTION_QUALITY_NONE
OPTION_QUALITY_CHEMICAL = api.OPTION_QUALITY_CHEMICAL
OPTION_QUALITY_AGE = api.OPTION_QUALITY_AGE
OPTION_QUALITY_TRACE = api.OPTION_QUALITY_TRACE
OPTION_QUALITY_AGE = api.OPTION_QUALITY_AGE
OPTION_QUALITY_TRACE = api.OPTION_QUALITY_TRACE
OPTION_V3_FLOW_UNITS_CFS = api.OPTION_V3_FLOW_UNITS_CFS
OPTION_V3_FLOW_UNITS_GPM = api.OPTION_V3_FLOW_UNITS_GPM
OPTION_V3_FLOW_UNITS_MGD = api.OPTION_V3_FLOW_UNITS_MGD
OPTION_V3_FLOW_UNITS_IMGD = api.OPTION_V3_FLOW_UNITS_IMGD
OPTION_V3_FLOW_UNITS_AFD = api.OPTION_V3_FLOW_UNITS_AFD
OPTION_V3_FLOW_UNITS_LPS = api.OPTION_V3_FLOW_UNITS_LPS
OPTION_V3_FLOW_UNITS_LPM = api.OPTION_V3_FLOW_UNITS_LPM
OPTION_V3_FLOW_UNITS_MLD = api.OPTION_V3_FLOW_UNITS_MLD
OPTION_V3_FLOW_UNITS_CMH = api.OPTION_V3_FLOW_UNITS_CMH
OPTION_V3_FLOW_UNITS_CMD = api.OPTION_V3_FLOW_UNITS_CMD
OPTION_V3_PRESSURE_UNITS_PSI = api.OPTION_V3_PRESSURE_UNITS_PSI
OPTION_V3_PRESSURE_UNITS_KPA = api.OPTION_V3_PRESSURE_UNITS_KPA
OPTION_V3_PRESSURE_UNITS_M = api.OPTION_V3_PRESSURE_UNITS_M
OPTION_V3_HEADLOSS_MODEL_HW = api.OPTION_V3_HEADLOSS_MODEL_HW
OPTION_V3_HEADLOSS_MODEL_DW = api.OPTION_V3_HEADLOSS_MODEL_DW
OPTION_V3_HEADLOSS_MODEL_CM = api.OPTION_V3_HEADLOSS_MODEL_CM
OPTION_V3_STEP_SIZING_FULL = api.OPTION_V3_STEP_SIZING_FULL
OPTION_V3_STEP_SIZING_RELAXATION = api.OPTION_V3_STEP_SIZING_RELAXATION
OPTION_V3_STEP_SIZING_LINESEARCH = api.OPTION_V3_STEP_SIZING_LINESEARCH
OPTION_V3_IF_UNBALANCED_STOP = api.OPTION_V3_IF_UNBALANCED_STOP
OPTION_V3_IF_UNBALANCED_CONTINUE = api.OPTION_V3_IF_UNBALANCED_CONTINUE
OPTION_V3_DEMAND_MODEL_FIXED = api.OPTION_V3_DEMAND_MODEL_FIXED
OPTION_V3_DEMAND_MODEL_CONSTRAINED = api.OPTION_V3_DEMAND_MODEL_CONSTRAINED
OPTION_V3_DEMAND_MODEL_POWER = api.OPTION_V3_DEMAND_MODEL_POWER
OPTION_V3_DEMAND_MODEL_LOGISTIC = api.OPTION_V3_DEMAND_MODEL_LOGISTIC
OPTION_V3_LEAKAGE_MODEL_NONE = api.OPTION_V3_LEAKAGE_MODEL_NONE
OPTION_V3_LEAKAGE_MODEL_POWER = api.OPTION_V3_LEAKAGE_MODEL_POWER
OPTION_V3_LEAKAGE_MODEL_FAVAD = api.OPTION_V3_LEAKAGE_MODEL_FAVAD
OPTION_V3_QUALITY_MODEL_NONE = api.OPTION_V3_QUALITY_MODEL_NONE
OPTION_V3_QUALITY_MODEL_CHEMICAL = api.OPTION_V3_QUALITY_MODEL_CHEMICAL
OPTION_V3_QUALITY_MODEL_AGE = api.OPTION_V3_QUALITY_MODEL_AGE
OPTION_V3_QUALITY_MODEL_TRACE = api.OPTION_V3_QUALITY_MODEL_TRACE
OPTION_V3_QUALITY_UNITS_HRS = api.OPTION_V3_QUALITY_UNITS_HRS
OPTION_V3_QUALITY_UNITS_PCNT = api.OPTION_V3_QUALITY_UNITS_PCNT
OPTION_V3_QUALITY_UNITS_MGL = api.OPTION_V3_QUALITY_UNITS_MGL
OPTION_V3_QUALITY_UNITS_UGL = api.OPTION_V3_QUALITY_UNITS_UGL
SCADA_DEVICE_TYPE_PRESSURE = api.SCADA_DEVICE_TYPE_PRESSURE
SCADA_DEVICE_TYPE_DEMAND = api.SCADA_DEVICE_TYPE_DEMAND
SCADA_DEVICE_TYPE_QUALITY = api.SCADA_DEVICE_TYPE_QUALITY
SCADA_DEVICE_TYPE_LEVEL = api.SCADA_DEVICE_TYPE_LEVEL
SCADA_DEVICE_TYPE_FLOW = api.SCADA_DEVICE_TYPE_FLOW
SCADA_DEVICE_TYPE_DEMAND = api.SCADA_DEVICE_TYPE_DEMAND
SCADA_DEVICE_TYPE_QUALITY = api.SCADA_DEVICE_TYPE_QUALITY
SCADA_DEVICE_TYPE_LEVEL = api.SCADA_DEVICE_TYPE_LEVEL
SCADA_DEVICE_TYPE_FLOW = api.SCADA_DEVICE_TYPE_FLOW
SCADA_ELEMENT_STATUS_ONLINE = api.SCADA_ELEMENT_STATUS_ONLINE
SCADA_ELEMENT_STATUS_ONLINE = api.SCADA_ELEMENT_STATUS_ONLINE
SCADA_ELEMENT_STATUS_OFFLINE = api.SCADA_ELEMENT_STATUS_OFFLINE
@@ -708,6 +757,20 @@ def set_option(name: str, cs: ChangeSet) -> ChangeSet:
return api.set_option(name, cs)
############################################################
# option_v3 23.[EPA3][OPTIONS]
############################################################
def get_option_v3_schema(name: str) -> dict[str, dict[str, Any]]:
return api.get_option_v3_schema(name)
def get_option_v3(name: str) -> dict[str, Any]:
return api.get_option_v3(name)
def set_option_v3(name: str, cs: ChangeSet) -> ChangeSet:
return api.set_option_v3(name, cs)
############################################################
# coord 24.[COORDINATES]
############################################################