Merge branch 'master' of https://e.coding.net/tjwater/tjwatercloud/TJWaterServer
This commit is contained in:
@@ -62,6 +62,16 @@ from .s16_emitters import get_emitter_schema, get_emitter, set_emitter
|
||||
|
||||
from .s17_quality import get_quality_schema, get_quality, set_quality
|
||||
|
||||
from .s18_sources import SOURCE_TYPE_CONCEN, SOURCE_TYPE_MASS, SOURCE_TYPE_FLOWPACED, SOURCE_TYPE_SETPOINT
|
||||
from .s18_sources import get_source_schema, get_source, set_source, add_source, delete_source
|
||||
|
||||
from .s19_reactions import get_global_reaction_schema, get_global_reaction, set_global_reaction
|
||||
from .s19_reactions import get_pipe_reaction_schema, get_pipe_reaction, set_pipe_reaction
|
||||
from .s19_reactions import get_tank_reaction_schema, get_tank_reaction, set_tank_reaction
|
||||
|
||||
from .s20_mixing import MIXING_MODEL_MIXED, MIXING_MODEL_2COMP, MIXING_MODEL_FIFO, MIXING_MODEL_LIFO
|
||||
from .s20_mixing import get_mixing_schema, get_mixing, set_mixing, add_mixing, delete_mixing
|
||||
|
||||
from .s21_times import TIME_STATISTIC_NONE, TIME_STATISTIC_AVERAGED, TIME_STATISTIC_MINIMUM, TIME_STATISTIC_MAXIMUM, TIME_STATISTIC_RANGE
|
||||
from .s21_times import get_time_schema, get_time, set_time
|
||||
|
||||
|
||||
@@ -87,8 +87,8 @@ class PumpEnergy(object):
|
||||
self.pattern = str(input['pattern']) if 'pattern' in input and input['pattern'] != None else None
|
||||
self.effic = str(input['effic']) if 'effic' in input and input['effic'] != None else None
|
||||
|
||||
self.f_pump = f"'{self.pump}'"
|
||||
self.f_type = f"'{self.type}'"
|
||||
self.f_pump = f"'{self.pump}'"
|
||||
self.f_price = self.price if self.price != None else 'null'
|
||||
self.f_pattern = f"'{self.pattern}'" if self.pattern != None else 'null'
|
||||
self.f_effic = f"'{self.effic}'" if self.effic != None else 'null'
|
||||
|
||||
100
api/s18_sources.py
Normal file
100
api/s18_sources.py
Normal file
@@ -0,0 +1,100 @@
|
||||
from .operation import *
|
||||
from .s0_base import *
|
||||
|
||||
SOURCE_TYPE_CONCEN = 'CONCEN'
|
||||
SOURCE_TYPE_MASS = 'MASS'
|
||||
SOURCE_TYPE_FLOWPACED = 'FLOWPACED'
|
||||
SOURCE_TYPE_SETPOINT = 'SETPOINT'
|
||||
|
||||
def get_source_schema(name: str) -> dict[str, dict[str, Any]]:
|
||||
return { 'node' : {'type': 'str' , 'optional': False , 'readonly': True },
|
||||
's_type' : {'type': 'str' , 'optional': False , 'readonly': False},
|
||||
'strength' : {'type': 'float' , 'optional': False , 'readonly': False},
|
||||
'pattern' : {'type': 'str' , 'optional': True , 'readonly': False} }
|
||||
|
||||
|
||||
def get_source(name: str, node: str) -> dict[str, Any]:
|
||||
s = read(name, f"select * from sources where node = '{node}'")
|
||||
d = {}
|
||||
d['node'] = str(s['node'])
|
||||
d['s_type'] = str(s['type'])
|
||||
d['strength'] = float(s['strength'])
|
||||
d['pattern'] = str(s['pattern']) if s['pattern'] != None else None
|
||||
return d
|
||||
|
||||
|
||||
class Source(object):
|
||||
def __init__(self, input: dict[str, Any]) -> None:
|
||||
self.type = 'source'
|
||||
self.node = str(input['node'])
|
||||
self.s_type = str(input['s_type'])
|
||||
self.strength = float(input['strength'])
|
||||
self.pattern = str(input['pattern']) if 'pattern' in input and input['pattern'] != None else None
|
||||
|
||||
self.f_type = f"'{self.type}'"
|
||||
self.f_node = f"'{self.node}'"
|
||||
self.f_s_type = f"'{self.s_type}'"
|
||||
self.f_strength = self.strength
|
||||
self.f_pattern = f"'{self.pattern}'" if self.pattern != None else 'null'
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
return { 'type': self.type, 'node': self.node, 's_type': self.s_type, 'strength': self.strength, 'pattern': self.pattern }
|
||||
|
||||
def as_id_dict(self) -> dict[str, Any]:
|
||||
return { 'type': self.type, 'node': self.node }
|
||||
|
||||
|
||||
def set_source_cache(name: str, cs: ChangeSet) -> SqlChangeSet:
|
||||
old = Source(get_source(name, cs.operations[0]['node']))
|
||||
raw_new = get_source(name, cs.operations[0]['node'])
|
||||
|
||||
new_dict = cs.operations[0]
|
||||
schema = get_source_schema(name)
|
||||
for key, value in schema.items():
|
||||
if key in new_dict and not value['readonly']:
|
||||
raw_new[key] = new_dict[key]
|
||||
new = Source(raw_new)
|
||||
|
||||
redo_sql = f"update sources set type = {new.f_s_type}, strength = {new.f_strength}, pattern = {new.f_pattern} where node = {new.f_node};"
|
||||
undo_sql = f"update sources set type = {old.f_s_type}, strength = {old.f_strength}, pattern = {old.f_pattern} where node = {old.f_node};"
|
||||
|
||||
redo_cs = g_update_prefix | new.as_dict()
|
||||
undo_cs = g_update_prefix | old.as_dict()
|
||||
|
||||
return SqlChangeSet(redo_sql, undo_sql, redo_cs, undo_cs)
|
||||
|
||||
|
||||
def set_source(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return execute_command(name, set_source_cache(name, cs))
|
||||
|
||||
|
||||
def add_source_cache(name: str, cs: ChangeSet) -> SqlChangeSet:
|
||||
new = Source(cs.operations[0])
|
||||
|
||||
redo_sql = f"insert into sources (node, type, strength, pattern) values ({new.f_node}, {new.f_s_type}, {new.f_strength}, {new.f_pattern});"
|
||||
undo_sql = f"delete from sources where node = {new.f_node};"
|
||||
|
||||
redo_cs = g_add_prefix | new.as_dict()
|
||||
undo_cs = g_delete_prefix | new.as_id_dict()
|
||||
|
||||
return SqlChangeSet(redo_sql, undo_sql, redo_cs, undo_cs)
|
||||
|
||||
|
||||
def add_source(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return execute_command(name, add_source_cache(name, cs))
|
||||
|
||||
|
||||
def delete_source_cache(name: str, cs: ChangeSet) -> SqlChangeSet:
|
||||
old = Source(get_source(name, cs.operations[0]['node']))
|
||||
|
||||
redo_sql = f"delete from sources where node = {old.f_node};"
|
||||
undo_sql = f"insert into sources (node, type, strength, pattern) values ({old.f_node}, {old.f_s_type}, {old.f_strength}, {old.f_pattern});"
|
||||
|
||||
redo_cs = g_delete_prefix | old.as_id_dict()
|
||||
undo_cs = g_add_prefix | old.as_dict()
|
||||
|
||||
return SqlChangeSet(redo_sql, undo_sql, redo_cs, undo_cs)
|
||||
|
||||
|
||||
def delete_source(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return execute_command(name, delete_source_cache(name, cs))
|
||||
194
api/s19_reactions.py
Normal file
194
api/s19_reactions.py
Normal file
@@ -0,0 +1,194 @@
|
||||
from .operation import *
|
||||
|
||||
|
||||
def get_global_reaction_schema(name: str) -> dict[str, dict[str, Any]]:
|
||||
return { 'order_bulk' : {'type': 'float' , 'optional': True , 'readonly': False},
|
||||
'order_wall' : {'type': 'float' , 'optional': True , 'readonly': False},
|
||||
'order_tank' : {'type': 'float' , 'optional': True , 'readonly': False},
|
||||
'global_bulk' : {'type': 'float' , 'optional': True , 'readonly': False},
|
||||
'global_wall' : {'type': 'float' , 'optional': True , 'readonly': False},
|
||||
'limiting_potential' : {'type': 'float' , 'optional': True , 'readonly': False},
|
||||
'roughness_correlation' : {'type': 'float' , 'optional': True , 'readonly': False} }
|
||||
|
||||
|
||||
def get_global_reaction(name: str) -> dict[str, Any]:
|
||||
gr = read(name, f"select * from reactions_global")
|
||||
d = {}
|
||||
d['order_bulk'] = float(gr['order_bulk']) if gr['order_bulk'] != None else None
|
||||
d['order_wall'] = float(gr['order_wall']) if gr['order_wall'] != None else None
|
||||
d['order_tank'] = float(gr['order_tank']) if gr['order_tank'] != None else None
|
||||
d['global_bulk'] = float(gr['global_bulk']) if gr['global_bulk'] != None else None
|
||||
d['global_wall'] = float(gr['global_wall']) if gr['global_wall'] != None else None
|
||||
d['limiting_potential'] = float(gr['limiting_potential']) if gr['limiting_potential'] != None else None
|
||||
d['roughness_correlation'] = float(gr['roughness_correlation']) if gr['roughness_correlation'] != None else None
|
||||
return d
|
||||
|
||||
|
||||
class GlobalReaction(object):
|
||||
def __init__(self, input: dict[str, Any]) -> None:
|
||||
self.type = 'global_reaction'
|
||||
self.order_bulk = float(input['order_bulk']) if 'order_bulk' in input and input['order_bulk'] != None else None
|
||||
self.order_wall = float(input['order_wall']) if 'order_wall' in input and input['order_wall'] != None else None
|
||||
self.order_tank = float(input['order_tank']) if 'order_tank' in input and input['order_tank'] != None else None
|
||||
self.global_bulk = float(input['global_bulk']) if 'global_bulk' in input and input['global_bulk'] != None else None
|
||||
self.global_wall = float(input['global_wall']) if 'global_wall' in input and input['global_wall'] != None else None
|
||||
self.limiting_potential = float(input['limiting_potential']) if 'limiting_potential' in input and input['limiting_potential'] != None else None
|
||||
self.roughness_correlation = float(input['roughness_correlation']) if 'roughness_correlation' in input and input['roughness_correlation'] != None else None
|
||||
|
||||
self.f_type = f"'{self.type}'"
|
||||
self.f_order_bulk = self.order_bulk if self.order_bulk != None else 'null'
|
||||
self.f_order_wall = self.order_wall if self.order_wall != None else 'null'
|
||||
self.f_order_tank = self.order_tank if self.order_tank != None else 'null'
|
||||
self.f_global_bulk = self.global_bulk if self.global_bulk != None else 'null'
|
||||
self.f_global_wall = self.global_wall if self.global_wall != None else 'null'
|
||||
self.f_limiting_potential = self.limiting_potential if self.limiting_potential != None else 'null'
|
||||
self.f_roughness_correlation = self.roughness_correlation if self.roughness_correlation != None else 'null'
|
||||
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
return { 'type': self.type, 'order_bulk': self.order_bulk, 'order_wall': self.order_wall, 'order_tank': self.order_tank, 'global_bulk': self.global_bulk, 'global_wall': self.global_wall, 'limiting_potential': self.limiting_potential, 'roughness_correlation': self.roughness_correlation }
|
||||
|
||||
|
||||
def set_global_reaction_cache(name: str, cs: ChangeSet) -> SqlChangeSet:
|
||||
old = GlobalReaction(get_global_reaction(name))
|
||||
raw_new = get_global_reaction(name)
|
||||
|
||||
new_dict = cs.operations[0]
|
||||
schema = get_global_reaction_schema(name)
|
||||
for key, value in schema.items():
|
||||
if key in new_dict and not value['readonly']:
|
||||
raw_new[key] = new_dict[key]
|
||||
new = GlobalReaction(raw_new)
|
||||
|
||||
redo_sql = f"update reactions_global set order_bulk = {new.f_order_bulk}, order_wall = {new.f_order_wall}, order_tank = {new.f_order_tank}, global_bulk = {new.f_global_bulk}, global_wall = {new.f_global_wall}, limiting_potential = {new.f_limiting_potential}, roughness_correlation = {new.f_roughness_correlation} where _no = 0;"
|
||||
undo_sql = f"update reactions_global set order_bulk = {old.f_order_bulk}, order_wall = {old.f_order_wall}, order_tank = {old.f_order_tank}, global_bulk = {old.f_global_bulk}, global_wall = {old.f_global_wall}, limiting_potential = {old.f_limiting_potential}, roughness_correlation = {old.f_roughness_correlation} where _no = 0;"
|
||||
|
||||
redo_cs = g_update_prefix | new.as_dict()
|
||||
undo_cs = g_update_prefix | old.as_dict()
|
||||
|
||||
return SqlChangeSet(redo_sql, undo_sql, redo_cs, undo_cs)
|
||||
|
||||
|
||||
def set_global_reaction(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return execute_command(name, set_global_reaction_cache(name, cs))
|
||||
|
||||
|
||||
def get_pipe_reaction_schema(name: str) -> dict[str, dict[str, Any]]:
|
||||
return { 'pipe' : {'type': 'str' , 'optional': False , 'readonly': True },
|
||||
'bulk' : {'type': 'float' , 'optional': True , 'readonly': False},
|
||||
'wall' : {'type': 'float' , 'optional': True , 'readonly': False} }
|
||||
|
||||
|
||||
def get_pipe_reaction(name: str, pipe: str) -> dict[str, Any]:
|
||||
d = {}
|
||||
d['pipe'] = pipe
|
||||
pr = try_read(name, f"select * from reactions_pipe_bulk where pipe = '{pipe}'")
|
||||
d['bulk'] = float(pr['value']) if pr != None else None
|
||||
pr = try_read(name, f"select * from reactions_pipe_wall where pipe = '{pipe}'")
|
||||
d['wall'] = float(pr['value']) if pr != None else None
|
||||
return d
|
||||
|
||||
|
||||
class PipeReaction(object):
|
||||
def __init__(self, input: dict[str, Any]) -> None:
|
||||
self.type = 'pipe_reaction'
|
||||
self.pipe = str(input['pipe'])
|
||||
self.bulk = float(input['bulk']) if 'bulk' in input and input['bulk'] != None else None
|
||||
self.wall = float(input['wall']) if 'wall' in input and input['wall'] != None else None
|
||||
|
||||
self.f_type = f"'{self.type}'"
|
||||
self.f_pipe = f"'{self.pipe}'"
|
||||
self.f_bulk = self.bulk if self.bulk != None else 'null'
|
||||
self.f_wall = self.wall if self.wall != None else 'null'
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
return { 'type': self.type, 'pipe': self.pipe, 'bulk': self.bulk, 'wall': self.wall }
|
||||
|
||||
|
||||
def set_pipe_reaction_cache(name: str, cs: ChangeSet) -> SqlChangeSet:
|
||||
old = PipeReaction(get_pipe_reaction(name, cs.operations[0]['pipe']))
|
||||
raw_new = get_pipe_reaction(name, cs.operations[0]['pipe'])
|
||||
|
||||
new_dict = cs.operations[0]
|
||||
schema = get_pipe_reaction_schema(name)
|
||||
for key, value in schema.items():
|
||||
if key in new_dict and not value['readonly']:
|
||||
raw_new[key] = new_dict[key]
|
||||
new = PipeReaction(raw_new)
|
||||
|
||||
redo_sql = f"delete from reactions_pipe_bulk where pipe = {new.f_pipe};\ndelete from reactions_pipe_wall where pipe = {new.f_pipe};"
|
||||
if new.bulk != None:
|
||||
redo_sql += f"\ninsert into reactions_pipe_bulk (pipe, value) values ({new.f_pipe}, {new.f_bulk});"
|
||||
if new.wall != None:
|
||||
redo_sql += f"\ninsert into reactions_pipe_wall (pipe, value) values ({new.f_pipe}, {new.f_wall});"
|
||||
|
||||
undo_sql = f"delete from reactions_pipe_bulk where pipe = {old.f_pipe};\ndelete from reactions_pipe_wall where pipe = {old.f_pipe};"
|
||||
if old.bulk != None:
|
||||
undo_sql += f"\ninsert into reactions_pipe_bulk (pipe, value) values ({old.f_pipe}, {old.f_bulk});"
|
||||
if old.wall != None:
|
||||
undo_sql += f"\ninsert into reactions_pipe_wall (pipe, value) values ({old.f_pipe}, {old.f_wall});"
|
||||
|
||||
redo_cs = g_update_prefix | new.as_dict()
|
||||
undo_cs = g_update_prefix | old.as_dict()
|
||||
|
||||
return SqlChangeSet(redo_sql, undo_sql, redo_cs, undo_cs)
|
||||
|
||||
|
||||
def set_pipe_reaction(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return execute_command(name, set_pipe_reaction_cache(name, cs))
|
||||
|
||||
|
||||
def get_tank_reaction_schema(name: str) -> dict[str, dict[str, Any]]:
|
||||
return { 'tank' : {'type': 'str' , 'optional': False , 'readonly': True },
|
||||
'value' : {'type': 'float' , 'optional': True , 'readonly': False} }
|
||||
|
||||
|
||||
def get_tank_reaction(name: str, tank: str) -> dict[str, Any]:
|
||||
d = {}
|
||||
d['tank'] = tank
|
||||
pr = try_read(name, f"select * from reactions_tank where tank = '{tank}'")
|
||||
d['value'] = float(pr['value']) if pr != None else None
|
||||
return d
|
||||
|
||||
|
||||
class TankReaction(object):
|
||||
def __init__(self, input: dict[str, Any]) -> None:
|
||||
self.type = 'tank_reaction'
|
||||
self.tank = str(input['tank'])
|
||||
self.value = float(input['value']) if 'value' in input and input['value'] != None else None
|
||||
|
||||
self.f_type = f"'{self.type}'"
|
||||
self.f_tank = f"'{self.tank}'"
|
||||
self.f_value = self.value if self.value != None else 'null'
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
return { 'type': self.type, 'tank': self.tank, 'value': self.value }
|
||||
|
||||
|
||||
def set_tank_reaction_cache(name: str, cs: ChangeSet) -> SqlChangeSet:
|
||||
old = TankReaction(get_tank_reaction(name, cs.operations[0]['tank']))
|
||||
raw_new = get_tank_reaction(name, cs.operations[0]['tank'])
|
||||
|
||||
new_dict = cs.operations[0]
|
||||
schema = get_tank_reaction_schema(name)
|
||||
for key, value in schema.items():
|
||||
if key in new_dict and not value['readonly']:
|
||||
raw_new[key] = new_dict[key]
|
||||
new = TankReaction(raw_new)
|
||||
|
||||
redo_sql = f"delete from reactions_tank where tank = {new.f_tank};"
|
||||
if new.value != None:
|
||||
redo_sql += f"\ninsert into reactions_tank (tank, value) values ({new.f_tank}, {new.f_value});"
|
||||
|
||||
undo_sql = f"delete from reactions_tank where tank = {old.f_tank};"
|
||||
if old.value != None:
|
||||
undo_sql += f"\ninsert into reactions_tank (tank, value) values ({old.f_tank}, {old.f_value});"
|
||||
|
||||
redo_cs = g_update_prefix | new.as_dict()
|
||||
undo_cs = g_update_prefix | old.as_dict()
|
||||
|
||||
return SqlChangeSet(redo_sql, undo_sql, redo_cs, undo_cs)
|
||||
|
||||
|
||||
def set_tank_reaction(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return execute_command(name, set_tank_reaction_cache(name, cs))
|
||||
96
api/s20_mixing.py
Normal file
96
api/s20_mixing.py
Normal file
@@ -0,0 +1,96 @@
|
||||
from .operation import *
|
||||
from .s0_base import *
|
||||
|
||||
MIXING_MODEL_MIXED = 'MIXED'
|
||||
MIXING_MODEL_2COMP = '2COMP'
|
||||
MIXING_MODEL_FIFO = 'FIFO'
|
||||
MIXING_MODEL_LIFO = 'LIFO'
|
||||
|
||||
def get_mixing_schema(name: str) -> dict[str, dict[str, Any]]:
|
||||
return { 'tank' : {'type': 'str' , 'optional': False , 'readonly': True },
|
||||
'model' : {'type': 'str' , 'optional': False , 'readonly': False},
|
||||
'value' : {'type': 'float' , 'optional': True , 'readonly': False} }
|
||||
|
||||
|
||||
def get_mixing(name: str, tank: str) -> dict[str, Any]:
|
||||
m = read(name, f"select * from mixing where tank = '{tank}'")
|
||||
d = {}
|
||||
d['tank'] = str(m['tank'])
|
||||
d['model'] = str(m['model'])
|
||||
d['value'] = float(m['value']) if m['value'] != None else None
|
||||
return d
|
||||
|
||||
|
||||
class Mixing(object):
|
||||
def __init__(self, input: dict[str, Any]) -> None:
|
||||
self.type = 'mixing'
|
||||
self.tank = str(input['tank'])
|
||||
self.model = str(input['model'])
|
||||
self.value = float(input['value']) if 'value' in input and input['value'] != None else None
|
||||
|
||||
self.f_type = f"'{self.type}'"
|
||||
self.f_tank = f"'{self.tank}'"
|
||||
self.f_model = f"'{self.model}'"
|
||||
self.f_value = self.value if self.value != None else 'null'
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
return { 'type': self.type, 'tank': self.tank, 'model': self.model, 'value': self.value }
|
||||
|
||||
def as_id_dict(self) -> dict[str, Any]:
|
||||
return { 'type': self.type, 'tank': self.tank }
|
||||
|
||||
|
||||
def set_mixing_cache(name: str, cs: ChangeSet) -> SqlChangeSet:
|
||||
old = Mixing(get_mixing(name, cs.operations[0]['tank']))
|
||||
raw_new = get_mixing(name, cs.operations[0]['tank'])
|
||||
|
||||
new_dict = cs.operations[0]
|
||||
schema = get_mixing_schema(name)
|
||||
for key, value in schema.items():
|
||||
if key in new_dict and not value['readonly']:
|
||||
raw_new[key] = new_dict[key]
|
||||
new = Mixing(raw_new)
|
||||
|
||||
redo_sql = f"update mixing set model = {new.f_model}, value = {new.f_value} where tank = {new.f_tank};"
|
||||
undo_sql = f"update mixing set model = {old.f_model}, value = {old.f_value} where tank = {old.f_tank};"
|
||||
|
||||
redo_cs = g_update_prefix | new.as_dict()
|
||||
undo_cs = g_update_prefix | old.as_dict()
|
||||
|
||||
return SqlChangeSet(redo_sql, undo_sql, redo_cs, undo_cs)
|
||||
|
||||
|
||||
def set_mixing(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return execute_command(name, set_mixing_cache(name, cs))
|
||||
|
||||
|
||||
def add_mixing_cache(name: str, cs: ChangeSet) -> SqlChangeSet:
|
||||
new = Mixing(cs.operations[0])
|
||||
|
||||
redo_sql = f"insert into mixing (tank, model, value) values ({new.f_tank}, {new.f_model}, {new.f_value});"
|
||||
undo_sql = f"delete from mixing where tank = {new.f_tank};"
|
||||
|
||||
redo_cs = g_add_prefix | new.as_dict()
|
||||
undo_cs = g_delete_prefix | new.as_id_dict()
|
||||
|
||||
return SqlChangeSet(redo_sql, undo_sql, redo_cs, undo_cs)
|
||||
|
||||
|
||||
def add_mixing(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return execute_command(name, add_mixing_cache(name, cs))
|
||||
|
||||
|
||||
def delete_mixing_cache(name: str, cs: ChangeSet) -> SqlChangeSet:
|
||||
old = Mixing(get_mixing(name, cs.operations[0]['tank']))
|
||||
|
||||
redo_sql = f"delete from mixing where tank = {old.f_tank};"
|
||||
undo_sql = f"insert into mixing (tank, model, value) values ({old.f_tank}, {old.f_model}, {old.f_value});"
|
||||
|
||||
redo_cs = g_delete_prefix | old.as_id_dict()
|
||||
undo_cs = g_add_prefix | old.as_dict()
|
||||
|
||||
return SqlChangeSet(redo_sql, undo_sql, redo_cs, undo_cs)
|
||||
|
||||
|
||||
def delete_mixing(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return execute_command(name, delete_mixing_cache(name, cs))
|
||||
0
api/s22_report.py
Normal file
0
api/s22_report.py
Normal file
@@ -7,5 +7,5 @@ create table sources
|
||||
node varchar(32) primary key references _node(id)
|
||||
, type sources_type not null
|
||||
, strength numeric not null
|
||||
, time_pattern varchar(32) references _pattern(id)
|
||||
, pattern varchar(32) references _pattern(id)
|
||||
);
|
||||
|
||||
@@ -1,42 +1,33 @@
|
||||
-- [REACTIONS]
|
||||
|
||||
create type reactions_order_param as enum ('BULK', 'WALL', 'TANK');
|
||||
|
||||
create table reactions_order
|
||||
(
|
||||
key reactions_order_param not null
|
||||
, value numeric not null
|
||||
);
|
||||
|
||||
create type reactions_global_param as enum ('BULK', 'WALL');
|
||||
|
||||
create table reactions_global
|
||||
(
|
||||
key reactions_global_param not null
|
||||
_no integer primary key
|
||||
, order_bulk numeric
|
||||
, order_wall numeric
|
||||
, order_tank numeric
|
||||
, global_bulk numeric
|
||||
, global_wall numeric
|
||||
, limiting_potential numeric
|
||||
, roughness_correlation numeric
|
||||
);
|
||||
|
||||
insert into reactions_global values (0, null, null, null, null, null, null, null);
|
||||
|
||||
create table reactions_pipe_bulk
|
||||
(
|
||||
pipe varchar(32) primary key references pipes(id) not null
|
||||
, value numeric not null
|
||||
);
|
||||
|
||||
create type reactions_pipe_param as enum ('BULK', 'WALL');
|
||||
|
||||
create table reactions_pipe
|
||||
create table reactions_pipe_wall
|
||||
(
|
||||
key reactions_pipe_param not null
|
||||
, pipe varchar(32) references pipes(id) not null
|
||||
pipe varchar(32) primary key references pipes(id) not null
|
||||
, value numeric not null
|
||||
);
|
||||
|
||||
create table reactions_tank
|
||||
(
|
||||
tank varchar(32) references tanks(id) not null
|
||||
tank varchar(32) primary key references tanks(id) not null
|
||||
, value numeric not null
|
||||
);
|
||||
|
||||
create table reactions_limiting_potential
|
||||
(
|
||||
value numeric not null
|
||||
);
|
||||
|
||||
create table reactions_roughness_correlation
|
||||
(
|
||||
value numeric not null
|
||||
);
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
-- [REPORT]
|
||||
|
||||
-- TODO: constraint
|
||||
|
||||
create table report
|
||||
(
|
||||
key text not null
|
||||
@@ -13,8 +11,8 @@ insert into report (key, value) values
|
||||
--, ('FILE', '')
|
||||
, ('STATUS', 'NO')
|
||||
, ('SUMMARY', 'YES')
|
||||
--, ('MESSAGES', 'NO')
|
||||
, ('ENERY', 'NO')
|
||||
, ('NODES', 'NONE')
|
||||
, ('LINKS', 'NONE')
|
||||
, ('MESSAGES', 'YES')
|
||||
, ('ENERY', 'YES')
|
||||
, ('NODES', 'ALL')
|
||||
, ('LINKS', 'ALL')
|
||||
;
|
||||
|
||||
@@ -1,19 +1,9 @@
|
||||
-- [REACTIONS]
|
||||
|
||||
drop table if exists reactions_roughness_correlation;
|
||||
|
||||
drop table if exists reactions_limiting_potential;
|
||||
|
||||
drop table if exists reactions_tank;
|
||||
|
||||
drop table if exists reactions_pipe;
|
||||
drop table if exists reactions_pipe_wall;
|
||||
|
||||
drop type if exists reactions_pipe_param;
|
||||
drop table if exists reactions_pipe_bulk;
|
||||
|
||||
drop table if exists reactions_global;
|
||||
|
||||
drop type if exists reactions_global_param;
|
||||
|
||||
drop table if exists reactions_order;
|
||||
|
||||
drop type if exists reactions_order_param;
|
||||
|
||||
@@ -2410,6 +2410,480 @@ class TestApi:
|
||||
self.leave(p)
|
||||
|
||||
|
||||
# 18 source
|
||||
|
||||
|
||||
def test_source(self):
|
||||
p = 'test_source'
|
||||
self.enter(p)
|
||||
|
||||
add_junction(p, ChangeSet({'id': 'j0', 'x': 0.0, 'y': 10.0, 'elevation': 20.0}))
|
||||
add_pattern(p, ChangeSet({'id' : 'p0', 'factors': [1.0, 2.0, 3.0]}))
|
||||
add_pattern(p, ChangeSet({'id' : 'p1', 'factors': [1.0, 2.0, 3.0]}))
|
||||
|
||||
add_source(p, ChangeSet({'node': 'j0', 's_type': SOURCE_TYPE_CONCEN, 'strength': 10.0, 'pattern': 'p0'}))
|
||||
s = get_source(p, 'j0')
|
||||
assert s['node'] == 'j0'
|
||||
assert s['s_type'] == SOURCE_TYPE_CONCEN
|
||||
assert s['strength'] == 10.0
|
||||
assert s['pattern'] == 'p0'
|
||||
|
||||
set_source(p, ChangeSet({'node': 'j0', 's_type': SOURCE_TYPE_FLOWPACED}))
|
||||
s = get_source(p, 'j0')
|
||||
assert s['node'] == 'j0'
|
||||
assert s['s_type'] == SOURCE_TYPE_FLOWPACED
|
||||
assert s['strength'] == 10.0
|
||||
assert s['pattern'] == 'p0'
|
||||
|
||||
set_source(p, ChangeSet({'node': 'j0', 'strength': 20.0}))
|
||||
s = get_source(p, 'j0')
|
||||
assert s['node'] == 'j0'
|
||||
assert s['s_type'] == SOURCE_TYPE_FLOWPACED
|
||||
assert s['strength'] == 20.0
|
||||
assert s['pattern'] == 'p0'
|
||||
|
||||
set_source(p, ChangeSet({'node': 'j0', 'pattern': 'p1'}))
|
||||
s = get_source(p, 'j0')
|
||||
assert s['node'] == 'j0'
|
||||
assert s['s_type'] == SOURCE_TYPE_FLOWPACED
|
||||
assert s['strength'] == 20.0
|
||||
assert s['pattern'] == 'p1'
|
||||
|
||||
delete_source(p, ChangeSet({'node': 'j0'}))
|
||||
|
||||
self.leave(p)
|
||||
|
||||
|
||||
def test_source_op(self):
|
||||
p = 'test_source_op'
|
||||
self.enter(p)
|
||||
|
||||
add_junction(p, ChangeSet({'id': 'j0', 'x': 0.0, 'y': 10.0, 'elevation': 20.0}))
|
||||
add_pattern(p, ChangeSet({'id' : 'p0', 'factors': [1.0, 2.0, 3.0]}))
|
||||
add_pattern(p, ChangeSet({'id' : 'p1', 'factors': [1.0, 2.0, 3.0]}))
|
||||
|
||||
cs = add_source(p, ChangeSet({'node': 'j0', 's_type': SOURCE_TYPE_CONCEN, 'strength': 10.0, 'pattern': 'p0'})).operations[0]
|
||||
assert cs['operation'] == API_ADD
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
assert cs['s_type'] == SOURCE_TYPE_CONCEN
|
||||
assert cs['strength'] == 10.0
|
||||
assert cs['pattern'] == 'p0'
|
||||
|
||||
cs = execute_undo(p).operations[0]
|
||||
assert cs['operation'] == API_DELETE
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
|
||||
cs = execute_redo(p).operations[0]
|
||||
assert cs['operation'] == API_ADD
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
assert cs['s_type'] == SOURCE_TYPE_CONCEN
|
||||
assert cs['strength'] == 10.0
|
||||
assert cs['pattern'] == 'p0'
|
||||
|
||||
cs = set_source(p, ChangeSet({'node': 'j0', 's_type': SOURCE_TYPE_FLOWPACED})).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
assert cs['s_type'] == SOURCE_TYPE_FLOWPACED
|
||||
assert cs['strength'] == 10.0
|
||||
assert cs['pattern'] == 'p0'
|
||||
|
||||
cs = execute_undo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
assert cs['s_type'] == SOURCE_TYPE_CONCEN
|
||||
assert cs['strength'] == 10.0
|
||||
assert cs['pattern'] == 'p0'
|
||||
|
||||
cs = execute_redo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
assert cs['s_type'] == SOURCE_TYPE_FLOWPACED
|
||||
assert cs['strength'] == 10.0
|
||||
assert cs['pattern'] == 'p0'
|
||||
|
||||
cs = set_source(p, ChangeSet({'node': 'j0', 'strength': 20.0})).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
assert cs['s_type'] == SOURCE_TYPE_FLOWPACED
|
||||
assert cs['strength'] == 20.0
|
||||
assert cs['pattern'] == 'p0'
|
||||
|
||||
cs = execute_undo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
assert cs['s_type'] == SOURCE_TYPE_FLOWPACED
|
||||
assert cs['strength'] == 10.0
|
||||
assert cs['pattern'] == 'p0'
|
||||
|
||||
cs = execute_redo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
assert cs['s_type'] == SOURCE_TYPE_FLOWPACED
|
||||
assert cs['strength'] == 20.0
|
||||
assert cs['pattern'] == 'p0'
|
||||
|
||||
cs = set_source(p, ChangeSet({'node': 'j0', 'pattern': 'p1'})).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
assert cs['s_type'] == SOURCE_TYPE_FLOWPACED
|
||||
assert cs['strength'] == 20.0
|
||||
assert cs['pattern'] == 'p1'
|
||||
|
||||
cs = execute_undo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
assert cs['s_type'] == SOURCE_TYPE_FLOWPACED
|
||||
assert cs['strength'] == 20.0
|
||||
assert cs['pattern'] == 'p0'
|
||||
|
||||
cs = execute_redo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
assert cs['s_type'] == SOURCE_TYPE_FLOWPACED
|
||||
assert cs['strength'] == 20.0
|
||||
assert cs['pattern'] == 'p1'
|
||||
|
||||
cs = delete_source(p, ChangeSet({'node': 'j0'})).operations[0]
|
||||
assert cs['operation'] == API_DELETE
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
|
||||
cs = execute_undo(p).operations[0]
|
||||
assert cs['operation'] == API_ADD
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
assert cs['s_type'] == SOURCE_TYPE_FLOWPACED
|
||||
assert cs['strength'] == 20.0
|
||||
assert cs['pattern'] == 'p1'
|
||||
|
||||
cs = execute_redo(p).operations[0]
|
||||
assert cs['operation'] == API_DELETE
|
||||
assert cs['type'] == 'source'
|
||||
assert cs['node'] == 'j0'
|
||||
|
||||
self.leave(p)
|
||||
|
||||
|
||||
# 19 reaction
|
||||
|
||||
|
||||
def test_global_reaction(self):
|
||||
p = 'test_global_reaction'
|
||||
self.enter(p)
|
||||
|
||||
gr = get_global_reaction(p)
|
||||
assert gr['order_bulk'] == None
|
||||
assert gr['order_wall'] == None
|
||||
assert gr['order_tank'] == None
|
||||
assert gr['global_bulk'] == None
|
||||
assert gr['global_wall'] == None
|
||||
assert gr['limiting_potential'] == None
|
||||
assert gr['roughness_correlation'] == None
|
||||
|
||||
set_global_reaction(p, ChangeSet({ 'order_bulk' : 10.0 }))
|
||||
gr = get_global_reaction(p)
|
||||
assert gr['order_bulk'] == 10.0
|
||||
assert gr['order_wall'] == None
|
||||
assert gr['order_tank'] == None
|
||||
assert gr['global_bulk'] == None
|
||||
assert gr['global_wall'] == None
|
||||
assert gr['limiting_potential'] == None
|
||||
assert gr['roughness_correlation'] == None
|
||||
|
||||
set_global_reaction(p, ChangeSet({ 'order_bulk' : None }))
|
||||
gr = get_global_reaction(p)
|
||||
assert gr['order_bulk'] == None
|
||||
assert gr['order_wall'] == None
|
||||
assert gr['order_tank'] == None
|
||||
assert gr['global_bulk'] == None
|
||||
assert gr['global_wall'] == None
|
||||
assert gr['limiting_potential'] == None
|
||||
assert gr['roughness_correlation'] == None
|
||||
|
||||
self.leave(p)
|
||||
|
||||
|
||||
def test_global_reaction_op(self):
|
||||
p = 'test_global_reaction_op'
|
||||
self.enter(p)
|
||||
|
||||
cs = set_global_reaction(p, ChangeSet({ 'order_bulk' : 10.0 })).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'global_reaction'
|
||||
assert cs['order_bulk'] == 10.0
|
||||
assert cs['order_wall'] == None
|
||||
assert cs['order_tank'] == None
|
||||
assert cs['global_bulk'] == None
|
||||
assert cs['global_wall'] == None
|
||||
assert cs['limiting_potential'] == None
|
||||
assert cs['roughness_correlation'] == None
|
||||
|
||||
cs = execute_undo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'global_reaction'
|
||||
assert cs['order_bulk'] == None
|
||||
assert cs['order_wall'] == None
|
||||
assert cs['order_tank'] == None
|
||||
assert cs['global_bulk'] == None
|
||||
assert cs['global_wall'] == None
|
||||
assert cs['limiting_potential'] == None
|
||||
assert cs['roughness_correlation'] == None
|
||||
|
||||
cs = execute_redo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'global_reaction'
|
||||
assert cs['order_bulk'] == 10.0
|
||||
assert cs['order_wall'] == None
|
||||
assert cs['order_tank'] == None
|
||||
assert cs['global_bulk'] == None
|
||||
assert cs['global_wall'] == None
|
||||
assert cs['limiting_potential'] == None
|
||||
assert cs['roughness_correlation'] == None
|
||||
|
||||
self.leave(p)
|
||||
|
||||
|
||||
def test_pipe_reaction(self):
|
||||
p = 'test_pipe_reaction'
|
||||
self.enter(p)
|
||||
|
||||
add_junction(p, ChangeSet({'id': 'j1', 'x': 0.0, 'y': 10.0, 'elevation': 20.0}))
|
||||
add_junction(p, ChangeSet({'id': 'j2', 'x': 0.0, 'y': 10.0, 'elevation': 20.0}))
|
||||
add_pipe(p, ChangeSet({'id': 'p0', 'node1': 'j1', 'node2': 'j2', 'length': 100.0, 'diameter': 10.0, 'roughness': 0.1, 'minor_loss': 0.5, 'status': PIPE_STATUS_OPEN }))
|
||||
|
||||
pp = get_pipe_reaction(p, 'p0')
|
||||
assert pp['pipe'] == 'p0'
|
||||
assert pp['bulk'] == None
|
||||
assert pp['wall'] == None
|
||||
|
||||
set_pipe_reaction(p, ChangeSet({'pipe': 'p0', 'bulk': 10.0, 'wall': 20.0}))
|
||||
pp = get_pipe_reaction(p, 'p0')
|
||||
assert pp['pipe'] == 'p0'
|
||||
assert pp['bulk'] == 10.0
|
||||
assert pp['wall'] == 20.0
|
||||
|
||||
set_pipe_reaction(p, ChangeSet({'pipe': 'p0', 'bulk': None, 'wall': None}))
|
||||
pp = get_pipe_reaction(p, 'p0')
|
||||
assert pp['pipe'] == 'p0'
|
||||
assert pp['bulk'] == None
|
||||
assert pp['wall'] == None
|
||||
|
||||
self.leave(p)
|
||||
|
||||
|
||||
def test_pipe_reaction_op(self):
|
||||
p = 'test_pipe_reaction_op'
|
||||
self.enter(p)
|
||||
|
||||
add_junction(p, ChangeSet({'id': 'j1', 'x': 0.0, 'y': 10.0, 'elevation': 20.0}))
|
||||
add_junction(p, ChangeSet({'id': 'j2', 'x': 0.0, 'y': 10.0, 'elevation': 20.0}))
|
||||
add_pipe(p, ChangeSet({'id': 'p0', 'node1': 'j1', 'node2': 'j2', 'length': 100.0, 'diameter': 10.0, 'roughness': 0.1, 'minor_loss': 0.5, 'status': PIPE_STATUS_OPEN }))
|
||||
|
||||
cs = set_pipe_reaction(p, ChangeSet({'pipe': 'p0', 'bulk': 10.0, 'wall': 20.0})).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'pipe_reaction'
|
||||
assert cs['pipe'] == 'p0'
|
||||
assert cs['bulk'] == 10.0
|
||||
assert cs['wall'] == 20.0
|
||||
|
||||
cs = execute_undo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'pipe_reaction'
|
||||
assert cs['pipe'] == 'p0'
|
||||
assert cs['bulk'] == None
|
||||
assert cs['wall'] == None
|
||||
|
||||
cs = execute_redo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'pipe_reaction'
|
||||
assert cs['pipe'] == 'p0'
|
||||
assert cs['bulk'] == 10.0
|
||||
assert cs['wall'] == 20.0
|
||||
|
||||
self.leave(p)
|
||||
|
||||
|
||||
def test_tank_reaction(self):
|
||||
p = 'test_tank_reaction'
|
||||
self.enter(p)
|
||||
|
||||
add_tank(p, ChangeSet({'id': 't0', 'x': 0.0, 'y': 10.0, 'elevation': 20.0, 'init_level': 1.0, 'min_level': 0.0, 'max_level': 2.0, 'diameter': 10.0, 'min_vol': 100.0, 'vol_curve': None, 'overflow': OVERFLOW_NO}))
|
||||
|
||||
pt = get_tank_reaction(p, 't0')
|
||||
assert pt['tank'] == 't0'
|
||||
assert pt['value'] == None
|
||||
|
||||
set_tank_reaction(p, ChangeSet({'tank': 't0', 'value': 10.0}))
|
||||
pt = get_tank_reaction(p, 't0')
|
||||
assert pt['tank'] == 't0'
|
||||
assert pt['value'] == 10.0
|
||||
|
||||
set_tank_reaction(p, ChangeSet({'tank': 't0', 'value': None}))
|
||||
pt = get_tank_reaction(p, 't0')
|
||||
assert pt['tank'] == 't0'
|
||||
assert pt['value'] == None
|
||||
|
||||
self.leave(p)
|
||||
|
||||
|
||||
def test_tank_reaction_op(self):
|
||||
p = 'test_tank_reaction_op'
|
||||
self.enter(p)
|
||||
|
||||
add_tank(p, ChangeSet({'id': 't0', 'x': 0.0, 'y': 10.0, 'elevation': 20.0, 'init_level': 1.0, 'min_level': 0.0, 'max_level': 2.0, 'diameter': 10.0, 'min_vol': 100.0, 'vol_curve': None, 'overflow': OVERFLOW_NO}))
|
||||
|
||||
cs = set_tank_reaction(p, ChangeSet({'tank': 't0', 'value': 10.0})).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'tank_reaction'
|
||||
assert cs['tank'] == 't0'
|
||||
assert cs['value'] == 10.0
|
||||
|
||||
cs = execute_undo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'tank_reaction'
|
||||
assert cs['tank'] == 't0'
|
||||
assert cs['value'] == None
|
||||
|
||||
cs = execute_redo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'tank_reaction'
|
||||
assert cs['tank'] == 't0'
|
||||
assert cs['value'] == 10.0
|
||||
|
||||
self.leave(p)
|
||||
|
||||
|
||||
# 20 mixing
|
||||
|
||||
|
||||
def test_mixing(self):
|
||||
p = 'test_mixing'
|
||||
self.enter(p)
|
||||
|
||||
add_tank(p, ChangeSet({'id': 't0', 'x': 0.0, 'y': 10.0, 'elevation': 20.0, 'init_level': 1.0, 'min_level': 0.0, 'max_level': 2.0, 'diameter': 10.0, 'min_vol': 100.0, 'vol_curve': None, 'overflow': OVERFLOW_NO}))
|
||||
|
||||
add_mixing(p, ChangeSet({'tank': 't0', 'model': MIXING_MODEL_MIXED, 'value': 10.0}))
|
||||
m = get_mixing(p,'t0')
|
||||
assert m['tank'] == 't0'
|
||||
assert m['model'] == MIXING_MODEL_MIXED
|
||||
assert m['value'] == 10.0
|
||||
|
||||
set_mixing(p, ChangeSet({'tank': 't0', 'model': MIXING_MODEL_2COMP}))
|
||||
m = get_mixing(p,'t0')
|
||||
assert m['tank'] == 't0'
|
||||
assert m['model'] == MIXING_MODEL_2COMP
|
||||
assert m['value'] == 10.0
|
||||
|
||||
set_mixing(p, ChangeSet({'tank': 't0', 'value': 20.0}))
|
||||
m = get_mixing(p,'t0')
|
||||
assert m['tank'] == 't0'
|
||||
assert m['model'] == MIXING_MODEL_2COMP
|
||||
assert m['value'] == 20.0
|
||||
|
||||
delete_mixing(p, ChangeSet({'tank': 't0'}))
|
||||
|
||||
self.leave(p)
|
||||
|
||||
|
||||
def test_mixing_op(self):
|
||||
p = 'test_mixing_op'
|
||||
self.enter(p)
|
||||
|
||||
add_tank(p, ChangeSet({'id': 't0', 'x': 0.0, 'y': 10.0, 'elevation': 20.0, 'init_level': 1.0, 'min_level': 0.0, 'max_level': 2.0, 'diameter': 10.0, 'min_vol': 100.0, 'vol_curve': None, 'overflow': OVERFLOW_NO}))
|
||||
|
||||
cs = add_mixing(p, ChangeSet({'tank': 't0', 'model': MIXING_MODEL_MIXED, 'value': 10.0})).operations[0]
|
||||
assert cs['operation'] == API_ADD
|
||||
assert cs['type'] == 'mixing'
|
||||
assert cs['tank'] == 't0'
|
||||
assert cs['model'] == MIXING_MODEL_MIXED
|
||||
assert cs['value'] == 10.0
|
||||
|
||||
cs = execute_undo(p).operations[0]
|
||||
assert cs['operation'] == API_DELETE
|
||||
assert cs['type'] == 'mixing'
|
||||
assert cs['tank'] == 't0'
|
||||
|
||||
cs = execute_redo(p).operations[0]
|
||||
assert cs['operation'] == API_ADD
|
||||
assert cs['type'] == 'mixing'
|
||||
assert cs['tank'] == 't0'
|
||||
assert cs['model'] == MIXING_MODEL_MIXED
|
||||
assert cs['value'] == 10.0
|
||||
|
||||
cs = set_mixing(p, ChangeSet({'tank': 't0', 'model': MIXING_MODEL_2COMP})).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'mixing'
|
||||
assert cs['tank'] == 't0'
|
||||
assert cs['model'] == MIXING_MODEL_2COMP
|
||||
assert cs['value'] == 10.0
|
||||
|
||||
cs = execute_undo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'mixing'
|
||||
assert cs['tank'] == 't0'
|
||||
assert cs['model'] == MIXING_MODEL_MIXED
|
||||
assert cs['value'] == 10.0
|
||||
|
||||
cs = execute_redo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'mixing'
|
||||
assert cs['tank'] == 't0'
|
||||
assert cs['model'] == MIXING_MODEL_2COMP
|
||||
assert cs['value'] == 10.0
|
||||
|
||||
cs = set_mixing(p, ChangeSet({'tank': 't0', 'value': 20.0})).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'mixing'
|
||||
assert cs['tank'] == 't0'
|
||||
assert cs['model'] == MIXING_MODEL_2COMP
|
||||
assert cs['value'] == 20.0
|
||||
|
||||
cs = execute_undo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'mixing'
|
||||
assert cs['tank'] == 't0'
|
||||
assert cs['model'] == MIXING_MODEL_2COMP
|
||||
assert cs['value'] == 10.0
|
||||
|
||||
cs = execute_redo(p).operations[0]
|
||||
assert cs['operation'] == API_UPDATE
|
||||
assert cs['type'] == 'mixing'
|
||||
assert cs['tank'] == 't0'
|
||||
assert cs['model'] == MIXING_MODEL_2COMP
|
||||
assert cs['value'] == 20.0
|
||||
|
||||
cs = delete_mixing(p, ChangeSet({'tank': 't0'})).operations[0]
|
||||
assert cs['operation'] == API_DELETE
|
||||
assert cs['type'] == 'mixing'
|
||||
assert cs['tank'] == 't0'
|
||||
|
||||
cs = execute_undo(p).operations[0]
|
||||
assert cs['operation'] == API_ADD
|
||||
assert cs['type'] == 'mixing'
|
||||
assert cs['tank'] == 't0'
|
||||
assert cs['model'] == MIXING_MODEL_2COMP
|
||||
assert cs['value'] == 20.0
|
||||
|
||||
cs = execute_redo(p).operations[0]
|
||||
assert cs['operation'] == API_DELETE
|
||||
assert cs['type'] == 'mixing'
|
||||
assert cs['tank'] == 't0'
|
||||
|
||||
self.leave(p)
|
||||
|
||||
|
||||
# 21 time
|
||||
|
||||
|
||||
|
||||
91
tjnetwork.py
91
tjnetwork.py
@@ -52,6 +52,16 @@ CURVE_TYPE_EFFICIENCY = api.CURVE_TYPE_EFFICIENCY
|
||||
CURVE_TYPE_VOLUME = api.CURVE_TYPE_VOLUME
|
||||
CURVE_TYPE_HEADLOSS = api.CURVE_TYPE_HEADLOSS
|
||||
|
||||
SOURCE_TYPE_CONCEN = api.SOURCE_TYPE_CONCEN
|
||||
SOURCE_TYPE_MASS = api.SOURCE_TYPE_MASS
|
||||
SOURCE_TYPE_FLOWPACED = api.SOURCE_TYPE_FLOWPACED
|
||||
SOURCE_TYPE_SETPOINT = api.SOURCE_TYPE_SETPOINT
|
||||
|
||||
MIXING_MODEL_MIXED = api.MIXING_MODEL_MIXED
|
||||
MIXING_MODEL_2COMP = api.MIXING_MODEL_2COMP
|
||||
MIXING_MODEL_FIFO = api.MIXING_MODEL_FIFO
|
||||
MIXING_MODEL_LIFO = api.MIXING_MODEL_LIFO
|
||||
|
||||
TIME_STATISTIC_NONE = api.TIME_STATISTIC_NONE
|
||||
TIME_STATISTIC_AVERAGED = api.TIME_STATISTIC_AVERAGED
|
||||
TIME_STATISTIC_MINIMUM = api.TIME_STATISTIC_MINIMUM
|
||||
@@ -517,6 +527,80 @@ def set_quality(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return api.set_quality(name, cs)
|
||||
|
||||
|
||||
############################################################
|
||||
# source 18.[SOURCES]
|
||||
############################################################
|
||||
|
||||
def get_source_schema(name: str) -> dict[str, dict[str, Any]]:
|
||||
return api.get_source_schema(name)
|
||||
|
||||
def get_source(name: str, node: str) -> dict[str, Any]:
|
||||
return api.get_source(name, node)
|
||||
|
||||
def set_source(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return api.set_source(name, cs)
|
||||
|
||||
# example: add_source(p, ChangeSet({'node': 'j0', 's_type': SOURCE_TYPE_CONCEN, 'strength': 10.0, 'pattern': 'p0'}))
|
||||
def add_source(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return api.add_source(name, cs)
|
||||
|
||||
def delete_source(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return api.delete_source(name, cs)
|
||||
|
||||
|
||||
############################################################
|
||||
# reaction 19.[REACTIONS]
|
||||
############################################################
|
||||
|
||||
def get_global_reaction_schema(name: str) -> dict[str, dict[str, Any]]:
|
||||
return api.get_global_reaction_schema(name)
|
||||
|
||||
def get_global_reaction(name: str) -> dict[str, Any]:
|
||||
return api.get_global_reaction(name)
|
||||
|
||||
def set_global_reaction(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return api.set_global_reaction(name, cs)
|
||||
|
||||
def get_pipe_reaction_schema(name: str) -> dict[str, dict[str, Any]]:
|
||||
return api.get_pipe_reaction_schema(name)
|
||||
|
||||
def get_pipe_reaction(name: str, pipe: str) -> dict[str, Any]:
|
||||
return api.get_pipe_reaction(name, pipe)
|
||||
|
||||
def set_pipe_reaction(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return api.set_pipe_reaction(name, cs)
|
||||
|
||||
def get_tank_reaction_schema(name: str) -> dict[str, dict[str, Any]]:
|
||||
return api.get_tank_reaction_schema(name)
|
||||
|
||||
def get_tank_reaction(name: str, tank: str) -> dict[str, Any]:
|
||||
return api.get_tank_reaction(name, tank)
|
||||
|
||||
def set_tank_reaction(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return api.set_tank_reaction(name, cs)
|
||||
|
||||
|
||||
############################################################
|
||||
# mixing 20.[MIXING]
|
||||
############################################################
|
||||
|
||||
def get_mixing_schema(name: str) -> dict[str, dict[str, Any]]:
|
||||
return api.get_mixing_schema(name)
|
||||
|
||||
def get_mixing(name: str, tank: str) -> dict[str, Any]:
|
||||
return api.get_mixing(name, tank)
|
||||
|
||||
def set_mixing(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return api.set_mixing(name, cs)
|
||||
|
||||
# example: add_mixing(p, ChangeSet({'tank': 't0', 'model': MIXING_MODEL_MIXED, 'value': 10.0}))
|
||||
def add_mixing(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return api.add_mixing(name, cs)
|
||||
|
||||
def delete_mixing(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return api.delete_mixing(name, cs)
|
||||
|
||||
|
||||
############################################################
|
||||
# time 21.[TIMES]
|
||||
############################################################
|
||||
@@ -531,6 +615,13 @@ def set_time(name: str, cs: ChangeSet) -> ChangeSet:
|
||||
return api.set_time(name, cs)
|
||||
|
||||
|
||||
############################################################
|
||||
# report 22.[REPORT]
|
||||
############################################################
|
||||
|
||||
# hardcode...
|
||||
|
||||
|
||||
############################################################
|
||||
# option 23.[OPTIONS]
|
||||
############################################################
|
||||
|
||||
Reference in New Issue
Block a user