Huge refactor to api and add batch api

This commit is contained in:
WQY\qiong
2022-10-14 23:18:01 +08:00
parent 200aaaca99
commit c5480d55ca
20 changed files with 1811 additions and 1510 deletions

View File

@@ -2,11 +2,14 @@ from .project import have_project, create_project, delete_project
from .project import is_project_open, get_project_open_count, open_project, close_project
from .project import copy_project
from .change_set import ChangeSet
from .operation import API_ADD, API_UPDATE, API_DELETE
from .operation import ChangeSet
from .operation import get_current_operation
from .operation import execute_undo, execute_redo
from .operation import have_snapshot, take_snapshot, pick_snapshot, sync_with_server
from .operation import have_snapshot, take_snapshot, pick_snapshot
from .operation import sync_with_server
from .command import execute_batch_commands
from .s0_base import JUNCTION, RESERVOIR, TANK, PIPE, PUMP, VALVE
from .s0_base import is_node, is_junction, is_reservoir, is_tank
@@ -16,7 +19,7 @@ from .s0_base import is_pattern
from .s0_base import get_nodes, get_links, get_curves, get_patterns
from .s0_base import get_node_links
from .s1_title import set_title, get_title
from .s1_title import get_title_schema, get_title, set_title
from .s2_junctions import get_junction_schema, add_junction, get_junction, set_junction, delete_junction
@@ -33,4 +36,4 @@ from .s6_pumps import get_pump_schema, add_pump, get_pump, set_pump, delete_pump
from .s7_valves import VALVES_TYPE_PRV, VALVES_TYPE_PSV, VALVES_TYPE_PBV, VALVES_TYPE_FCV, VALVES_TYPE_TCV, VALVES_TYPE_GPV
from .s7_valves import get_valve_schema, add_valve, get_valve, set_valve, delete_valve
from .s24_coordinates import get_node_coord
from .s24_coordinates import get_node_coord

View File

@@ -1,22 +0,0 @@
class ChangeSet:
def __init__(self):
self.operations : list[dict[str, str]] = []
def add(self, type: str, id: str):
self.operations.append({ 'operation': 'add', 'type': type, 'id': id })
return self
def delete(self, type: str, id: str):
self.operations.append({ 'operation': 'delete', 'type': type, 'id': id })
return self
def update(self, type: str, id: str, properties: list[str]):
self.operations.append({ 'operation': 'update', 'type': type, 'id': id, 'properties': properties })
return self
def append(self, other):
self.operations += other.operations
return self
def compress(self):
return self

84
api/command.py Normal file
View File

@@ -0,0 +1,84 @@
from .s1_title import *
from .s2_junctions import *
from .s3_reservoirs import *
from .s4_tanks import *
from .s5_pipes import *
from .s6_pumps import *
from .s7_valves import *
def execute_add_command(name: str, cs: ChangeSet) -> ChangeSet:
type = cs.operations[0]['type']
if type == JUNCTION:
return add_junction(name, cs)
elif type == RESERVOIR:
return add_reservoir(name, cs)
elif type == TANK:
return add_tank(name, cs)
elif type == PIPE:
return add_pipe(name, cs)
elif type == PUMP:
return add_pump(name, cs)
elif type == VALVE:
return add_valve(name, cs)
return ChangeSet()
def execute_update_command(name: str, cs: ChangeSet) -> ChangeSet:
type = cs.operations[0]['type']
if type == 'title':
return set_title(name, cs)
if type == JUNCTION:
return set_junction(name, cs)
elif type == RESERVOIR:
return set_reservoir(name, cs)
elif type == TANK:
return set_tank(name, cs)
elif type == PIPE:
return set_pipe(name, cs)
elif type == PUMP:
return set_pump(name, cs)
elif type == VALVE:
return set_valve(name, cs)
return ChangeSet()
def execute_delete_command(name: str, cs: ChangeSet) -> ChangeSet:
type = cs.operations[0]['type']
if type == JUNCTION:
return add_junction(name, cs)
elif type == RESERVOIR:
return delete_reservoir(name, cs)
elif type == TANK:
return delete_tank(name, cs)
elif type == PIPE:
return delete_pipe(name, cs)
elif type == PUMP:
return delete_pump(name, cs)
elif type == VALVE:
return delete_valve(name, cs)
return ChangeSet()
def execute_batch_commands(name: str, cs: ChangeSet) -> ChangeSet:
result = ChangeSet()
try:
for op in cs.operations:
operation = op['operation']
if operation == API_ADD:
result.merge(execute_add_command(name, ChangeSet(op)))
elif operation == API_UPDATE:
result.merge(execute_update_command(name, ChangeSet(op)))
elif operation == API_DELETE:
result.merge(execute_delete_command(name, ChangeSet(op)))
except:
pass
return result

View File

@@ -1,22 +1,123 @@
from typing import Any
from psycopg.rows import dict_row, Row
from .connection import g_conn_dict as conn
from .utility import *
from .change_set import *
API_ADD = 'add'
API_DELETE = 'delete'
API_UPDATE = 'update'
API_DELETE = 'delete'
g_add_prefix = { 'operation': API_ADD }
g_update_prefix = { 'operation': API_UPDATE }
g_delete_prefix = { 'operation': API_DELETE }
def _remove_operation(name: str, id: int) -> None:
row = read(name, f'select * from operation where parent = {id}')
if row != None:
raise Exception('Disallow to remove parent operation !')
class ChangeSet:
def __init__(self, ps: dict[str, Any] | None = None):
self.operations : list[dict[str, Any]] = []
if ps != None:
self.append(ps)
sql = f'delete from snapshot_operation where id = {id};'
sql += f'delete from operation where id = {id}'
write(name, sql)
def add(self, ps: dict[str, Any]):
self.operations.append(g_add_prefix | ps)
return self
def update(self, ps: dict[str, Any]):
self.operations.append(g_update_prefix | ps)
return self
def delete(self, ps: dict[str, Any]):
self.operations.append(g_delete_prefix | ps)
return self
def append(self, ps: dict[str, Any]):
self.operations.append(ps)
return self
def merge(self, cs):
if len(cs.operations) > 0:
self.operations += cs.operations
return self
def compress(self):
return self
def read(name: str, sql: str) -> Row:
with conn[name].cursor(row_factory=dict_row) as cur:
cur.execute(sql)
row = cur.fetchone()
if row == None:
raise Exception(sql)
return row
def write(name: str, sql: str) -> None:
with conn[name].cursor() as cur:
cur.execute(sql)
def get_current_operation(name: str) -> int:
return int(read(name, 'select id from current_operation')['id'])
def execute_command(name: str, redo_sql: str, undo_sql: str, redo_cs: dict[str, str], undo_cs: dict[str, str]) -> ChangeSet:
write(name, redo_sql)
parent = get_current_operation(name)
redo_sql = redo_sql.replace("'", "''")
undo_sql = undo_sql.replace("'", "''")
redo_cs_str = str(redo_cs).replace("'", "''")
undo_cs_str = str(undo_cs).replace("'", "''")
write(name, f"insert into operation (id, redo, undo, parent, redo_cs, undo_cs) values (default, '{redo_sql}', '{undo_sql}', {parent}, '{redo_cs_str}', '{undo_cs_str}')")
current = read(name, 'select max(id) as id from operation')['id']
write(name, f"update current_operation set id = {current}")
return ChangeSet(redo_cs)
def execute_undo(name: str, discard: bool = False) -> ChangeSet:
row = read(name, f'select * from operation where id = {get_current_operation(name)}')
write(name, row['undo'])
# update foreign key
write(name, f"update current_operation set id = {row['parent']} where id = {row['id']}")
if discard:
# update foreign key
write(name, f"update operation set redo_child = null where id = {row['parent']}")
# on delete cascade => child & snapshot
write(name, f"delete from operation where id = {row['id']}")
else:
write(name, f"update operation set redo_child = {row['id']} where id = {row['parent']}")
return ChangeSet(eval(row['undo_cs']))
def execute_redo(name: str) -> ChangeSet:
row = read(name, f'select * from operation where id = {get_current_operation(name)}')
if row['redo_child'] == None:
return ChangeSet()
row = read(name, f"select * from operation where id = {row['redo_child']}")
write(name, row['redo'])
write(name, f"update current_operation set id = {row['id']} where id = {row['parent']}")
return ChangeSet(eval(row['redo_cs']))
def have_snapshot(name: str, tag: str) -> bool:
return read(name, f"select id from snapshot_operation where tag = '{tag}'") != None
def take_snapshot(name: str, tag: str) -> int | None:
if tag == None or tag == '':
return None
current = get_current_operation(name)
write(name, f"insert into snapshot_operation (id, tag) values ({current}, '{tag}')")
return current
def _get_parents(name: str, id: int) -> list[int]:
@@ -27,149 +128,11 @@ def _get_parents(name: str, id: int) -> list[int]:
return ids
def get_current_operation(name: str) -> int:
row = read(name, f'select id from current_operation')
return int(row['id'])
def _update_current_operation(name: str, old_id: int, id: int) -> None:
return write(name, f'update current_operation set id = {id} where id = {old_id}')
def _add_redo_undo(name: str, redo: str, undo: str, api_id: str, api_op: str, api_object_type: str, api_object_id: str, api_object_properties: list[str]) -> int:
parent = get_current_operation(name)
ps = []
for p in api_object_properties:
ps.append(f'"{p}"')
if len(ps) > 0:
ps = ','.join(ps)
ps = '{' + ps + '}'
sql = f"insert into operation (id, redo, undo, parent, api_id, api_op, api_object_type, api_object_id, api_object_properties) values (default, '{redo}', '{undo}', {parent}, '{api_id}', '{api_op}', '{api_object_type}', '{api_object_id}', '{ps}')"
else:
sql = f"insert into operation (id, redo, undo, parent, api_id, api_op, api_object_type, api_object_id) values (default, '{redo}', '{undo}', {parent}, '{api_id}', '{api_op}', '{api_object_type}', '{api_object_id}')"
write(name, sql)
return int(read(name, 'select max(id) from operation')['max'])
def _query_operation(name: str, id: str) -> dict[str, str]:
return read(name, f'select * from operation where id = {id}')
def _query_redo_child(name: str, id: str) -> str:
row = read(name, f'select redo_child from operation where id = {id}')
return row['redo_child']
def _set_redo_child(name: str, id: int, child: int | str) -> None:
return write(name, f'update operation set redo_child = {child} where id = {id}')
def add_operation(name: str, redo: str, undo: str, api_id: str, api_op: str, api_object_type: str, api_object_id: str, api_object_properties: list[str] = []) -> None:
curr = _add_redo_undo(name, redo, undo, api_id, api_op, api_object_type, api_object_id, api_object_properties)
old = get_current_operation(name)
_update_current_operation(name, old, curr)
def _reverser_op(op: str) -> str:
if op == API_ADD:
return API_DELETE
elif op == API_DELETE:
return API_ADD
else:
return op
def _get_change_set(row: dict[str, str], undo: bool) -> ChangeSet:
op = row['api_op']
if undo:
op = _reverser_op(op)
type = row['api_object_type']
id = row['api_object_id']
change = ChangeSet()
if op == API_ADD:
change.add(type, id)
elif op == API_DELETE:
change.delete(type, id)
elif op == API_UPDATE:
change.update(type, id, row['api_object_properties'])
return change
def execute_undo(name: str, discard: bool) -> ChangeSet:
curr = get_current_operation(name)
row = _query_operation(name, curr)
undo = row['undo']
if undo == '':
print("nothing to undo!")
return
change = _get_change_set(row, True)
parent = int(row['parent'])
_set_redo_child(name, parent, 'null' if discard else curr)
write(name, undo)
_update_current_operation(name, curr, parent)
if discard:
_remove_operation(name, curr)
return change
def execute_redo(name: str) -> ChangeSet:
curr = get_current_operation(name)
redoChild = _query_redo_child(name, curr)
if redoChild == None:
print("nothing to redo!")
return
child = int(redoChild)
row = _query_operation(name, child)
redo = row['redo']
change = _get_change_set(row, False)
write(name, redo)
_update_current_operation(name, curr, child)
return change
def _get_operation_by_tag(name: str, tag: str) -> int | None:
row = read(name, f"select id from snapshot_operation where tag = '{tag}'")
return int(row['id']) if row != None else None
def have_snapshot(name: str, tag: str) -> bool:
return _get_operation_by_tag(name, tag) != None
def take_snapshot(name: str, tag: str) -> int | None:
if tag == None or tag == '':
print('Non empty tag is expected!')
return None
curr = get_current_operation(name)
write(name, f"insert into snapshot_operation (id, tag) values ({curr}, '{tag}')")
return curr
def pick_snapshot(name: str, tag: str, discard: bool) -> ChangeSet:
if tag == None or tag == '':
print('Non empty tag is expected!')
if not have_snapshot(name, tag):
return ChangeSet()
target = _get_operation_by_tag(name, tag)
if target == None:
print('No such snapshot!')
return ChangeSet()
target = int(read(name, f"select id from snapshot_operation where tag = '{tag}'")['id'])
curr = get_current_operation(name)
@@ -180,14 +143,14 @@ def pick_snapshot(name: str, tag: str, discard: bool) -> ChangeSet:
if target in curr_parents:
for _ in range(curr_parents.index(target)):
change.append(execute_undo(name, discard))
change.merge(execute_undo(name, discard))
elif curr in target_parents:
target_parents.reverse()
curr_index = target_parents.index(curr)
for i in range(curr_index, len(target_parents) - 1):
write(name, f"update operation set redo_child = '{target_parents[i + 1]}' where id = '{target_parents[i]}'")
change.append(execute_redo(name))
change.merge(execute_redo(name))
else:
ancestor_index = -1
@@ -196,24 +159,20 @@ def pick_snapshot(name: str, tag: str, discard: bool) -> ChangeSet:
ancestor = curr_parents[ancestor_index + 1]
for _ in range(curr_parents.index(ancestor)):
change.append(execute_undo(name, discard))
change.merge(execute_undo(name, discard))
target_parents.reverse()
curr_index = target_parents.index(ancestor)
for i in range(curr_index, len(target_parents) - 1):
write(name, f"update operation set redo_child = '{target_parents[i + 1]}' where id = '{target_parents[i]}'")
change.append(execute_redo(name))
change.merge(execute_redo(name))
return change.compress()
def get_change_set(name: str, operation: int, undo: bool) -> ChangeSet:
row = read(name, f'select api_id, api_op, api_object_type, api_object_id, api_object_properties from operation where id = {operation}')
return _get_change_set(row, undo)
def get_current_change_set(name: str) -> ChangeSet:
return get_change_set(name, get_current_operation(name), False)
def _get_change_set(name: str, operation: int, undo: bool) -> dict[str, Any]:
row = read(name, f'select * from operation where id = {operation}')
return eval(row['undo']) if undo else eval(row['redo'])
def sync_with_server(name: str, operation: int) -> ChangeSet:
@@ -228,13 +187,13 @@ def sync_with_server(name: str, operation: int) -> ChangeSet:
if fr in to_parents:
index = to_parents.index(fr) - 1
while index >= 0:
change.append(get_change_set(name, to_parents[index], False))
change.append(_get_change_set(name, to_parents[index], False)) #redo
index -= 1
elif to in fr_parents:
index = 0
while index <= fr_parents.index(to) - 1:
change.append(get_change_set(name, fr_parents[index], True))
change.append(_get_change_set(name, fr_parents[index], True))
index += 1
else:
@@ -246,12 +205,12 @@ def sync_with_server(name: str, operation: int) -> ChangeSet:
index = 0
while index <= fr_parents.index(ancestor) - 1:
change.append(get_change_set(name, fr_parents[index], True))
change.append(_get_change_set(name, fr_parents[index], True))
index += 1
index = to_parents.index(ancestor) - 1
while index >= 0:
change.append(get_change_set(name, to_parents[index], False))
change.append(_get_change_set(name, to_parents[index], False))
index -= 1
return change.compress()

View File

@@ -1,7 +1,6 @@
from psycopg.rows import dict_row, Row
from .connection import g_conn_dict as conn
from .operation import *
from .change_set import ChangeSet
_NODE = "_node"
@@ -94,93 +93,6 @@ def get_patterns(name: str) -> list[str]:
return _get_all(name, _PATTERN)
def add_node(name: str, node_type: str, id: str, x: float, y: float, table_sql: str, table_undo_sql: str) -> ChangeSet:
if is_node(name, id):
return ChangeSet()
with conn[name].cursor() as cur:
sql = f"insert into _node (id, type) values ('{id}', '{node_type}');"
sql += table_sql
sql += f"insert into coordinates (node, coord) values ('{id}', '({x}, {y})');"
cur.execute(sql)
redo = sql.replace("'", '"')
undo = f'delete from coordinates where node = "{id}";'
undo += table_undo_sql
undo += f'delete from _node where id = "{id}";'
add_operation(name, redo, undo)
change = ChangeSet()
change.add(node_type, id)
return change
def delete_node(name: str, node_type: str, id: str, table_sql: str, table_undo_sql: str) -> ChangeSet:
if not is_node(name, id):
return ChangeSet()
with conn[name].cursor(row_factory=dict_row) as cur:
cur.execute(f"select * from coordinates where node = '{id}'")
row = cur.fetchone()
if row == None:
return ChangeSet()
coord = row['coord']
sql = f"delete from coordinates where node = '{id}'; "
sql += table_sql
sql += f" delete from _node where id = '{id}';"
cur.execute(sql)
redo = sql.replace("'", '"')
undo = f'insert into _node (id, type) values ("{id}", "{node_type}");'
undo += table_undo_sql
undo += f'insert into coordinates (node, coord) values ("{id}", "{coord}");'
add_operation(name, redo, undo)
change = ChangeSet()
change.delete(node_type, id)
return change
def add_link(name: str, link_type: str, id: str, table_sql: str, table_undo_sql: str) -> ChangeSet:
if is_link(name, id):
return ChangeSet()
with conn[name].cursor() as cur:
sql = f"insert into _link (id, type) values ('{id}', '{link_type}');"
sql += table_sql
cur.execute(sql)
redo = sql.replace("'", '"')
undo = table_undo_sql
undo += f'delete from _link where id = "{id}";'
add_operation(name, redo, undo)
change = ChangeSet()
change.add(link_type, id)
return change
def delete_link(name: str, link_type: str, id: str, table_sql: str, table_undo_sql: str) -> ChangeSet:
if not is_link(name, id):
return ChangeSet()
with conn[name].cursor(row_factory=dict_row) as cur:
sql = table_sql
sql += f" delete from _link where id = '{id}';"
cur.execute(sql)
redo = sql.replace("'", '"')
undo = f'insert into _link (id, type) values ("{id}", "{link_type}"); '
undo += table_undo_sql
add_operation(name, redo, undo)
change = ChangeSet()
change.delete(link_type, id)
return change
def get_node_links(name: str, id: str) -> list[str]:
with conn[name].cursor(row_factory=dict_row) as cur:
links: list[str] = []

View File

@@ -1,18 +1,23 @@
from psycopg.rows import dict_row
from .operation import *
from .connection import g_conn_dict as conn
from .change_set import ChangeSet
def get_title(name: str) -> str:
with conn[name].cursor(row_factory=dict_row) as cur:
cur.execute(f"select * from title")
return cur.fetchone()['value']
def set_title(name: str, value: str) -> ChangeSet:
old = get_title(name)
def get_title_schema(name: str) -> dict[str, dict[str, Any]]:
return {'value': {'type': 'float', 'optional': False, 'readonly': False}}
sql = f"update title set value = '{value}'"
undo = f"update title set value = ''{old}''"
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'set_title', API_UPDATE, 'title', '')
return get_current_change_set(name)
def get_title(name: str) -> dict[str, Any]:
title = read(name, 'select * from title')
return { 'value': title['value'] }
def set_title(name: str, cs: ChangeSet) -> ChangeSet:
new = cs.operations[0]['value']
old = get_title(name)['value']
redo_sql = f"update title set value = '{new}';"
undo_sql = f"update title set value = '{old}';"
redo_cs = g_update_prefix | { 'type': 'title', 'value': new }
undo_cs = g_update_prefix | { 'type': 'title', 'value': old }
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)

View File

@@ -1,42 +1,11 @@
from psycopg.rows import dict_row, Row
from .connection import g_conn_dict as conn
from .s0_base import *
from .operation import *
from .change_set import ChangeSet
from .operation import read
def _to_client_point(coord: str) -> dict[str, float]:
coord = coord.removeprefix('(').removesuffix(')').split(',')
return { 'x': float(coord[0]), 'y': float(coord[1]) }
xy = coord.removeprefix('(').removesuffix(')').split(',')
return { 'x': float(xy[0]), 'y': float(xy[1]) }
def get_node_coord(name: str, id: str) -> dict[str, float] | None:
with conn[name].cursor(row_factory=dict_row) as cur:
cur.execute(f"select * from coordinates where node = '{id}'")
row = cur.fetchone()
if row == None:
return None
coord = str(row['coord'])
return _to_client_point(coord)
'''
def set_node_coord(name: str, node_type: str, id: str, x: float, y: float) -> ChangeSet:
old = get_node_coord(name, id)
if old == None:
return ChangeSet()
old_x, old_y = old['x'], old['y']
with conn[name].cursor() as cur:
sql = f"update coordinates set coord = '({x},{y})' where node = '{id}'"
cur.execute(sql)
redo = sql.replace("'", '"')
undo = f'update coordinates set coord = "({old_x},{old_y})" where node = "{id}"'
add_operation(name, redo, undo)
change = ChangeSet()
change.update(node_type, id, 'coord')
return change
'''
def get_node_coord(name: str, id: str) -> dict[str, float]:
row = read(name, f"select * from coordinates where node = '{id}'")
return _to_client_point(row['coord'])

View File

@@ -1,105 +1,108 @@
from typing import Any
from psycopg.rows import Row
from .operation import *
from .s0_base import *
from .change_set import ChangeSet
from .s24_coordinates import *
from .utility import *
from .schema import *
schema: dict[str, dict[str, Any]] = { \
'id' : define_property(str_type, False, True), \
'elevation' : define_property(float_type), \
'demand' : define_property(float_type, True), \
'pattern' : define_property(str_type, True), \
'coord' : define_property(client_point_type), \
'links' : define_property(str_list_type, False, True)}
def get_junction_schema(name: str) -> dict[str, dict[str, Any]]:
return schema
return { 'id' : {'type': 'str' , 'optional': False , 'readonly': True },
'x' : {'type': 'float' , 'optional': False , 'readonly': False},
'y' : {'type': 'float' , 'optional': False , 'readonly': False},
'elevation' : {'type': "float" , 'optional': False , 'readonly': False},
'demand' : {'type': "float" , 'optional': True , 'readonly': False},
'pattern' : {'type': "str" , 'optional': True , 'readonly': False},
'links' : {'type': "str_list" , 'optional': False , 'readonly': True } }
def _query_junction(name: str, id: str) -> Row | None:
return read(name, f"select * from junctions where id = '{id}'")
def get_junction(name: str, id: str) -> dict[str, Any]:
j = read(name, f"select * from junctions where id = '{id}'")
xy = get_node_coord(name, id)
d = {}
d['id'] = str(j['id'])
d['x'] = float(xy['x'])
d['y'] = float(xy['y'])
d['elevation'] = float(j['elevation'])
d['demand'] = float(j['demand']) if j['demand'] != None else None
d['pattern'] = str(j['pattern']) if j['pattern'] != None else None
d['links'] = get_node_links(name, id)
return d
def add_junction(name: str, id: str, x: float, y: float, elevation: float) -> ChangeSet:
if is_junction(name, id):
return ChangeSet()
class Junction(object):
def __init__(self, input: dict[str, Any]) -> None:
self.type = 'junction'
self.id = str(input['id'])
self.x = float(input['x'])
self.y = float(input['y'])
self.elevation = float(input['elevation'])
self.demand = float(input['demand']) if 'demand' in input and input['demand'] != None else None
self.pattern = str(input['pattern']) if 'pattern' in input and input['pattern'] != None else None
sql = f"insert into _node (id, type) values ('{id}', '{JUNCTION}');"
sql += f"\ninsert into junctions (id, elevation) values ('{id}', {elevation});"
sql += f"\ninsert into coordinates (node, coord) values ('{id}', '({x}, {y})');"
self.f_type = f"'{self.type}'"
self.f_id = f"'{self.id}'"
self.f_coord = f"'({self.x}, {self.y})'"
self.f_elevation = self.elevation
self.f_demand = self.demand if self.demand != None else 'null'
self.f_pattern = f"'{self.pattern}'" if self.pattern != None else 'null'
undo = f"delete from coordinates where node = ''{id}'';"
undo += f"\ndelete from junctions where id = ''{id}'';"
undo += f"\ndelete from _node where id = ''{id}'';"
def as_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id, 'x': self.x, 'y': self.y, 'elevation': self.elevation, 'demand': self.demand, 'pattern': self.pattern }
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'add_junction', API_ADD, JUNCTION, id)
return get_current_change_set(name)
def as_id_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id }
def get_junction(name: str, id: str) -> dict[str, Any] | None:
row = _query_junction(name, id)
if row == None:
return None
def set_junction(name: str, cs: ChangeSet) -> ChangeSet:
old = Junction(get_junction(name, cs.operations[0]['id']))
raw_new = get_junction(name, cs.operations[0]['id'])
ps: dict[str, str] = {}
ps['id'] = id
ps['elevation'] = float(row['elevation'])
ps['demand'] = float(row['demand']) if row['demand'] != None else None
ps['pattern'] = row['pattern']
ps['coord'] = get_node_coord(name, id)
ps['links'] = get_node_links(name, id)
return ps
new_dict = cs.operations[0]
schema = get_junction_schema(name)
for key, value in schema.items():
if key in new_dict and not value['readonly']:
raw_new[key] = new_dict[key]
new = Junction(raw_new)
redo_sql = f"update junctions set elevation = {new.f_elevation}, demand = {new.f_demand}, pattern = {new.f_pattern} where id = {new.f_id};"
redo_sql += f"\nupdate coordinates set coord = {new.f_coord} where node = {new.f_id};"
undo_sql = f"update coordinates set coord = {old.f_coord} where node = {old.f_id};"
undo_sql += f"\nupdate junctions set elevation = {old.f_elevation}, demand = {old.f_demand}, pattern = {old.f_pattern} where id = {old.f_id};"
redo_cs = g_update_prefix | new.as_dict()
undo_cs = g_update_prefix | old.as_dict()
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
def set_junction(name: str, id: str, properties: dict[str, Any]) -> ChangeSet:
if not is_junction(name, id):
return ChangeSet()
if 'pattern' in properties:
if not is_pattern(properties['pattern']):
return ChangeSet()
def add_junction(name: str, cs: ChangeSet) -> ChangeSet:
new = Junction(cs.operations[0])
old = Serialize(get_junction(name, id), schema).to_storage()
redo_sql = f"insert into _node (id, type) values ({new.f_id}, {new.f_type});"
redo_sql += f"\ninsert into junctions (id, elevation, demand, pattern) values ({new.f_id}, {new.f_elevation}, {new.f_demand}, {new.f_pattern});"
redo_sql += f"\ninsert into coordinates (node, coord) values ({new.f_id}, {new.f_coord});"
new = get_junction(name, id)
ps: list[str] = []
for key in properties:
if key in schema and schema[key]['readonly'] == False:
new[key] = properties[key]
ps.append(key)
new = Serialize(new, schema).to_execution()
undo_sql = f"delete from coordinates where node = {new.f_id};"
undo_sql += f"\ndelete from junctions where id = {new.f_id};"
undo_sql += f"\ndelete from _node where id = {new.f_id};"
sql = f"update junctions set elevation = {new['elevation']}, demand = {new['demand']}, pattern = {new['pattern']} where id = '{id}';"
undo = ""
if 'coord' in ps:
sql += f"\nupdate coordinates set coord = {new['coord']} where node = '{id}';"
undo = f"update coordinates set coord = {old['coord']} where node = ''{id}'';"
undo += f"\nupdate junctions set elevation = {old['elevation']}, demand = {old['demand']}, pattern = {old['pattern']} where id = ''{id}'';"
redo_cs = g_add_prefix | new.as_dict()
undo_cs = g_delete_prefix | new.as_id_dict()
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'set_junction', API_UPDATE, JUNCTION, id, ps)
return get_current_change_set(name)
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
def delete_junction(name: str, id: str) -> ChangeSet:
row = get_junction(name, id)
if row == None:
return ChangeSet()
def delete_junction(name: str, cs: ChangeSet) -> ChangeSet:
old = Junction(get_junction(name, cs.operations[0]['id']))
old = Serialize(get_junction(name, id), schema).to_storage()
redo_sql = f"delete from coordinates where node = {old.f_id};"
redo_sql += f"\ndelete from junctions where id = {old.f_id};"
redo_sql += f"\ndelete from _node where id = {old.f_id};"
sql = f"delete from coordinates where node = '{id}';"
sql += f"\ndelete from junctions where id = '{id}';"
sql += f"\ndelete from _node where id = '{id}';"
undo_sql = f"insert into _node (id, type) values ({old.f_id}, {old.f_type});"
undo_sql += f"\ninsert into junctions (id, elevation, demand, pattern) values ({old.f_id}, {old.f_elevation}, {old.f_demand}, {old.f_pattern});"
undo_sql += f"\ninsert into coordinates (node, coord) values ({old.f_id}, {old.f_coord});"
undo = f"insert into _node (id, type) values (''{id}'', ''{JUNCTION}'');"
undo += f"\ninsert into junctions (id, elevation, demand, pattern) values (''{id}'', {old['elevation']}, {old['demand']}, {old['pattern']});"
undo += f"\ninsert into coordinates (node, coord) values (''{id}'', {old['coord']});"
redo_cs = g_delete_prefix | old.as_id_dict()
undo_cs = g_add_prefix | old.as_dict()
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'delete_junction', API_DELETE, JUNCTION, id)
return get_current_change_set(name)
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)

View File

@@ -1,103 +1,104 @@
from typing import Any
from psycopg.rows import Row
from .operation import *
from .s0_base import *
from .change_set import ChangeSet
from .s24_coordinates import *
from .utility import *
from .schema import *
schema: dict[str, dict[str, Any]] = { \
'id' : define_property(str_type, False, True), \
'head' : define_property(float_type), \
'pattern' : define_property(str_type, True), \
'coord' : define_property(client_point_type), \
'links' : define_property(str_list_type, False, True)}
def get_reservoir_schema(name: str) -> dict[str, dict[str, Any]]:
return schema
return { 'id' : {'type': 'str' , 'optional': False , 'readonly': True },
'x' : {'type': 'float' , 'optional': False , 'readonly': False},
'y' : {'type': 'float' , 'optional': False , 'readonly': False},
'head' : {'type': "float" , 'optional': False , 'readonly': False},
'pattern' : {'type': "str" , 'optional': True , 'readonly': False},
'links' : {'type': "str_list" , 'optional': False , 'readonly': True } }
def _query_reservoir(name: str, id: str) -> Row | None:
return read(name, f"select * from reservoirs where id = '{id}'")
def get_reservoir(name: str, id: str) -> dict[str, Any]:
r = read(name, f"select * from reservoirs where id = '{id}'")
xy = get_node_coord(name, id)
d = {}
d['id'] = str(r['id'])
d['x'] = float(xy['x'])
d['y'] = float(xy['y'])
d['head'] = float(r['head'])
d['pattern'] = str(r['pattern']) if r['pattern'] != None else None
d['links'] = get_node_links(name, id)
return d
def add_reservoir(name: str, id: str, x: float, y: float, head: float) -> ChangeSet:
if is_reservoir(name, id):
return ChangeSet()
class Reservoir(object):
def __init__(self, input: dict[str, Any]) -> None:
self.type = 'reservoir'
self.id = str(input['id'])
self.x = float(input['x'])
self.y = float(input['y'])
self.head = float(input['head'])
self.pattern = str(input['pattern']) if 'pattern' in input and input['pattern'] != None else None
sql = f"insert into _node (id, type) values ('{id}', '{RESERVOIR}');"
sql += f"\ninsert into reservoirs (id, head) values ('{id}', {head});"
sql += f"\ninsert into coordinates (node, coord) values ('{id}', '({x}, {y})');"
self.f_type = f"'{self.type}'"
self.f_id = f"'{self.id}'"
self.f_coord = f"'({self.x}, {self.y})'"
self.f_head = self.head
self.f_pattern = f"'{self.pattern}'" if self.pattern != None else 'null'
undo = f"delete from coordinates where node = ''{id}'';"
undo += f"\ndelete from reservoirs where id = ''{id}'';"
undo += f"\ndelete from _node where id = ''{id}'';"
def as_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id, 'x': self.x, 'y': self.y, 'head': self.head, 'pattern': self.pattern }
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'add_reservoir', API_ADD, RESERVOIR, id)
return get_current_change_set(name)
def as_id_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id }
def get_reservoir(name: str, id: str) -> dict[str, Any] | None:
row = _query_reservoir(name, id)
if row == None:
return None
def set_reservoir(name: str, cs: ChangeSet) -> ChangeSet:
old = Reservoir(get_reservoir(name, cs.operations[0]['id']))
raw_new = get_reservoir(name, cs.operations[0]['id'])
ps: dict[str, str] = {}
ps['id'] = id
ps['head'] = float(row['head'])
ps['pattern'] = row['pattern']
ps['coord'] = get_node_coord(name, id)
ps['links'] = get_node_links(name, id)
return ps
new_dict = cs.operations[0]
schema = get_reservoir_schema(name)
for key, value in schema.items():
if key in new_dict and not value['readonly']:
raw_new[key] = new_dict[key]
new = Reservoir(raw_new)
redo_sql = f"update reservoirs set head = {new.f_head}, pattern = {new.f_pattern} where id = {new.f_id};"
redo_sql += f"\nupdate coordinates set coord = {new.f_coord} where node = {new.f_id};"
undo_sql = f"update coordinates set coord = {old.f_coord} where node = {old.f_id};"
undo_sql += f"\nupdate reservoirs set head = {old.f_head}, pattern = {old.f_pattern} where id = {old.f_id};"
redo_cs = g_update_prefix | new.as_dict()
undo_cs = g_update_prefix | old.as_dict()
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
def set_reservoir(name: str, id: str, properties: dict[str, Any]) -> ChangeSet:
if not is_reservoir(name, id):
return ChangeSet()
if 'pattern' in properties:
if not is_pattern(properties['pattern']):
return ChangeSet()
def add_reservoir(name: str, cs: ChangeSet) -> ChangeSet:
new = Reservoir(cs.operations[0])
old = Serialize(get_reservoir(name, id), schema).to_storage()
redo_sql = f"insert into _node (id, type) values ({new.f_id}, {new.f_type});"
redo_sql += f"\ninsert into reservoirs (id, head, pattern) values ({new.f_id}, {new.f_head}, {new.f_pattern});"
redo_sql += f"\ninsert into coordinates (node, coord) values ({new.f_id}, {new.f_coord});"
new = get_reservoir(name, id)
ps: list[str] = []
for key in properties:
if key in schema and schema[key]['readonly'] == False:
new[key] = properties[key]
ps.append(key)
new = Serialize(new, schema).to_execution()
undo_sql = f"delete from coordinates where node = {new.f_id};"
undo_sql += f"\ndelete from reservoirs where id = {new.f_id};"
undo_sql += f"\ndelete from _node where id = {new.f_id};"
sql = f"update reservoirs set head = {new['head']}, pattern = {new['pattern']} where id = '{id}';"
undo = ""
if 'coord' in ps:
sql += f"\nupdate coordinates set coord = {new['coord']} where node = '{id}';"
undo = f"update coordinates set coord = {old['coord']} where node = ''{id}'';"
undo += f"\nupdate reservoirs set head = {old['head']}, pattern = {old['pattern']} where id = ''{id}'';"
redo_cs = g_add_prefix | new.as_dict()
undo_cs = g_delete_prefix | new.as_id_dict()
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'set_reservoir', API_UPDATE, RESERVOIR, id, ps)
return get_current_change_set(name)
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
def delete_reservoir(name: str, id: str) -> ChangeSet:
row = get_reservoir(name, id)
if row == None:
return ChangeSet()
def delete_reservoir(name: str, cs: ChangeSet) -> ChangeSet:
old = Reservoir(get_reservoir(name, cs.operations[0]['id']))
old = Serialize(get_reservoir(name, id), schema).to_storage()
redo_sql = f"delete from coordinates where node = {old.f_id};"
redo_sql += f"\ndelete from reservoirs where id = {old.f_id};"
redo_sql += f"\ndelete from _node where id = {old.f_id};"
sql = f"delete from coordinates where node = '{id}';"
sql += f"\ndelete from reservoirs where id = '{id}';"
sql += f"\ndelete from _node where id = '{id}';"
undo_sql = f"insert into _node (id, type) values ({old.f_id}, {old.f_type});"
undo_sql += f"\ninsert into reservoirs (id, head, pattern) values ({old.f_id}, {old.f_head}, {old.f_pattern});"
undo_sql += f"\ninsert into coordinates (node, coord) values ({old.f_id}, {old.f_coord});"
undo = f"insert into _node (id, type) values (''{id}'', ''{RESERVOIR}'');"
undo += f"\ninsert into reservoirs (id, head, pattern) values (''{id}'', {old['head']}, {old['pattern']});"
undo += f"\ninsert into coordinates (node, coord) values (''{id}'', {old['coord']});"
redo_cs = g_delete_prefix | old.as_id_dict()
undo_cs = g_add_prefix | old.as_dict()
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'delete_reservoir', API_DELETE, RESERVOIR, id)
return get_current_change_set(name)
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)

View File

@@ -1,127 +1,132 @@
from typing import Any
from psycopg.rows import Row
from .operation import *
from .s0_base import *
from .change_set import ChangeSet
from .s24_coordinates import *
from .utility import *
from .schema import *
OVERFLOW_YES = 'yes'
OVERFLOW_NO = 'no'
schema: dict[str, dict[str, Any]] = { \
'id' : define_property(str_type, False, True), \
'elevation' : define_property(float_type), \
'init_level': define_property(float_type), \
'min_level' : define_property(float_type), \
'max_level' : define_property(float_type), \
'diameter' : define_property(float_type), \
'min_vol' : define_property(float_type), \
'vol_curve' : define_property(str_type, True), \
'overflow' : define_property(str_type, True), \
'coord' : define_property(client_point_type), \
'links' : define_property(str_list_type, False, True)}
def get_tank_schema(name: str) -> dict[str, dict[str, Any]]:
return schema
return { 'id' : {'type': 'str' , 'optional': False , 'readonly': True },
'x' : {'type': 'float' , 'optional': False , 'readonly': False},
'y' : {'type': 'float' , 'optional': False , 'readonly': False},
'elevation' : {'type': "float" , 'optional': False , 'readonly': False},
'init_level' : {'type': "float" , 'optional': False , 'readonly': False},
'min_level' : {'type': "float" , 'optional': False , 'readonly': False},
'max_level' : {'type': "float" , 'optional': False , 'readonly': False},
'diameter' : {'type': "float" , 'optional': False , 'readonly': False},
'min_vol' : {'type': "float" , 'optional': False , 'readonly': False},
'vol_curve' : {'type': "str" , 'optional': True , 'readonly': False},
'overflow' : {'type': "str" , 'optional': True , 'readonly': False},
'links' : {'type': "str_list" , 'optional': False , 'readonly': True } }
def _query_tank(name: str, id: str) -> Row | None:
return read(name, f"select * from tanks where id = '{id}'")
def get_tank(name: str, id: str) -> dict[str, Any]:
t = read(name, f"select * from tanks where id = '{id}'")
xy = get_node_coord(name, id)
d = {}
d['id'] = str(t['id'])
d['x'] = float(xy['x'])
d['y'] = float(xy['y'])
d['elevation'] = float(t['elevation'])
d['init_level'] = float(t['init_level'])
d['min_level'] = float(t['min_level'])
d['max_level'] = float(t['max_level'])
d['diameter'] = float(t['diameter'])
d['min_vol'] = float(t['min_vol'])
d['vol_curve'] = str(t['vol_curve']) if t['vol_curve'] != None else None
d['overflow'] = str(t['overflow']) if t['overflow'] != None else None
d['links'] = get_node_links(name, id)
return d
def add_tank(name: str, id: str, x: float, y: float, elevation: float, init_level: float = 0, min_level: float = 0, max_level: float = 0, diameter: float = 0, min_vol: float = 0) -> ChangeSet:
if is_tank(name, id):
return ChangeSet()
class Tank(object):
def __init__(self, input: dict[str, Any]) -> None:
self.type = 'tank'
self.id = str(input['id'])
self.x = float(input['x'])
self.y = float(input['y'])
self.elevation = float(input['elevation'])
self.init_level = float(input['init_level'])
self.min_level = float(input['min_level'])
self.max_level = float(input['max_level'])
self.diameter = float(input['diameter'])
self.min_vol = float(input['min_vol'])
self.vol_curve = str(input['vol_curve']) if 'vol_curve' in input and input['vol_curve'] != None else None
self.overflow = str(input['overflow']) if 'overflow' in input and input['overflow'] != None else None
sql = f"insert into _node (id, type) values ('{id}', '{TANK}');"
sql += f"\ninsert into tanks (id, elevation, init_level, min_level, max_level, diameter, min_vol) values ('{id}', {elevation}, {init_level}, {min_level}, {max_level}, {diameter}, {min_vol});"
sql += f"\ninsert into coordinates (node, coord) values ('{id}', '({x}, {y})');"
self.f_type = f"'{self.type}'"
self.f_id = f"'{self.id}'"
self.f_coord = f"'({self.x}, {self.y})'"
self.f_elevation = self.elevation
self.f_init_level = self.init_level
self.f_min_level = self.min_level
self.f_max_level = self.max_level
self.f_diameter = self.diameter
self.f_min_vol = self.min_vol
self.f_vol_curve = f"'{self.vol_curve}'" if self.vol_curve != None else 'null'
self.f_overflow = f"'{self.overflow}'" if self.overflow != None else 'null'
undo = f"delete from coordinates where node = ''{id}'';"
undo += f"\ndelete from tanks where id = ''{id}'';"
undo += f"\ndelete from _node where id = ''{id}'';"
def as_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id, 'x': self.x, 'y': self.y, 'elevation': self.elevation, 'init_level': self.init_level, 'min_level': self.min_level, 'max_level': self.max_level, 'diameter': self.diameter, 'min_vol': self.min_vol, 'vol_curve': self.vol_curve, 'overflow': self.overflow }
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'add_tank', API_ADD, TANK, id)
return get_current_change_set(name)
def as_id_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id }
def get_tank(name: str, id: str) -> dict[str, Any] | None:
row = _query_tank(name, id)
if row == None:
return None
def set_tank(name: str, cs: ChangeSet) -> ChangeSet:
old = Tank(get_tank(name, cs.operations[0]['id']))
raw_new = get_tank(name, cs.operations[0]['id'])
ps: dict[str, str] = {}
ps['id'] = id
ps['elevation'] = float(row['elevation'])
ps['init_level'] = float(row['init_level'])
ps['min_level'] = float(row['min_level'])
ps['max_level'] = float(row['max_level'])
ps['diameter'] = float(row['diameter'])
ps['min_vol'] = float(row['min_vol'])
ps['vol_curve'] = row['vol_curve']
ps['overflow'] = row['overflow']
ps['coord'] = get_node_coord(name, id)
ps['links'] = get_node_links(name, id)
return ps
new_dict = cs.operations[0]
schema = get_tank_schema(name)
for key, value in schema.items():
if key in new_dict and not value['readonly']:
raw_new[key] = new_dict[key]
new = Tank(raw_new)
redo_sql = f"update tanks set elevation = {new.f_elevation}, init_level = {new.f_init_level}, min_level = {new.f_min_level}, max_level = {new.f_max_level}, diameter = {new.f_diameter}, min_vol = {new.f_min_vol}, vol_curve = {new.f_vol_curve}, overflow = {new.f_overflow} where id = {new.f_id};"
redo_sql += f"\nupdate coordinates set coord = {new.f_coord} where node = {new.f_id};"
undo_sql = f"update coordinates set coord = {old.f_coord} where node = {old.f_id};"
undo_sql += f"\nupdate tanks set elevation = {old.f_elevation}, init_level = {old.f_init_level}, min_level = {old.f_min_level}, max_level = {old.f_max_level}, diameter = {old.f_diameter}, min_vol = {old.f_min_vol}, vol_curve = {old.f_vol_curve}, overflow = {old.f_overflow} where id = {old.f_id};"
redo_cs = g_update_prefix | new.as_dict()
undo_cs = g_update_prefix | old.as_dict()
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
def set_tank(name: str, id: str, properties: dict[str, Any]) -> ChangeSet:
if not is_tank(name, id):
return ChangeSet()
if 'vol_curve' in properties:
if not is_curve(properties['vol_curve']):
return ChangeSet()
if 'overflow' in properties:
if properties['overflow'] != OVERFLOW_YES and properties['overflow'] != OVERFLOW_NO:
return ChangeSet()
def add_tank(name: str, cs: ChangeSet) -> ChangeSet:
new = Tank(cs.operations[0])
old = Serialize(get_tank(name, id), schema).to_storage()
redo_sql = f"insert into _node (id, type) values ({new.f_id}, {new.f_type});"
redo_sql += f"\ninsert into tanks (id, elevation, init_level, min_level, max_level, diameter, min_vol, vol_curve, overflow) values ({new.f_id}, {new.f_elevation}, {new.f_init_level}, {new.f_min_level}, {new.f_max_level}, {new.f_diameter}, {new.f_min_vol}, {new.f_vol_curve}, {new.f_overflow});"
redo_sql += f"\ninsert into coordinates (node, coord) values ({new.f_id}, {new.f_coord});"
new = get_tank(name, id)
ps: list[str] = []
for key in properties:
if key in schema and schema[key]['readonly'] == False:
new[key] = properties[key]
ps.append(key)
new = Serialize(new, schema).to_execution()
undo_sql = f"delete from coordinates where node = {new.f_id};"
undo_sql += f"\ndelete from tanks where id = {new.f_id};"
undo_sql += f"\ndelete from _node where id = {new.f_id};"
sql = f"update tanks set elevation = {new['elevation']}, \
init_level = {new['init_level']}, min_level = {new['min_level']}, max_level = {new['max_level']}, \
diameter = {new['diameter']}, min_vol = {new['min_vol']}, vol_curve = {new['vol_curve']}, overflow = {new['overflow']} where id = '{id}';"
undo = ""
if 'coord' in ps:
sql += f"\nupdate coordinates set coord = {new['coord']} where node = '{id}';"
undo = f"update coordinates set coord = {old['coord']} where node = ''{id}'';"
undo += f"\nupdate tanks set elevation = {old['elevation']}, \
init_level = {old['init_level']}, min_level = {old['min_level']}, max_level = {old['max_level']}, \
diameter = {old['diameter']}, min_vol = {old['min_vol']}, vol_curve = {old['vol_curve']}, overflow = {old['overflow']} where id = ''{id}'';"
redo_cs = g_add_prefix | new.as_dict()
undo_cs = g_delete_prefix | new.as_id_dict()
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'set_tank', API_UPDATE, TANK, id, ps)
return get_current_change_set(name)
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
def delete_tank(name: str, id: str) -> ChangeSet:
row = get_tank(name, id)
if row == None:
return ChangeSet()
def delete_tank(name: str, cs: ChangeSet) -> ChangeSet:
old = Tank(get_tank(name, cs.operations[0]['id']))
old = Serialize(get_tank(name, id), schema).to_storage()
redo_sql = f"delete from coordinates where node = {old.f_id};"
redo_sql += f"\ndelete from tanks where id = {old.f_id};"
redo_sql += f"\ndelete from _node where id = {old.f_id};"
sql = f"delete from coordinates where node = '{id}';"
sql += f"\ndelete from tanks where id = '{id}';"
sql += f"\ndelete from _node where id = '{id}';"
undo_sql = f"insert into _node (id, type) values ({old.f_id}, {old.f_type});"
undo_sql += f"\ninsert into tanks (id, elevation, init_level, min_level, max_level, diameter, min_vol, vol_curve, overflow) values ({old.f_id}, {old.f_elevation}, {old.f_init_level}, {old.f_min_level}, {old.f_max_level}, {old.f_diameter}, {old.f_min_vol}, {old.f_vol_curve}, {old.f_overflow});"
undo_sql += f"\ninsert into coordinates (node, coord) values ({old.f_id}, {old.f_coord});"
undo = f"insert into _node (id, type) values (''{id}'', ''{TANK}'');"
undo += f"\ninsert into tanks (id, elevation, init_level, min_level, max_level, diameter, min_vol, vol_curve, overflow) \
values (''{id}'', {old['elevation']}, {old['init_level']}, {old['min_level']}, {old['max_level']}, {old['diameter']}, {old['min_vol']}, {old['vol_curve']}, {old['overflow']});"
undo += f"\ninsert into coordinates (node, coord) values (''{id}'', {old['coord']});"
redo_cs = g_delete_prefix | old.as_id_dict()
undo_cs = g_add_prefix | old.as_dict()
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'delete_tank', API_DELETE, TANK, id)
return get_current_change_set(name)
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)

View File

@@ -1,10 +1,5 @@
from typing import Any
from psycopg.rows import Row
from .operation import *
from .s0_base import *
from .change_set import ChangeSet
from .s24_coordinates import *
from .utility import *
from .schema import *
PIPE_STATUS_OPEN = 'open'
@@ -12,125 +7,105 @@ PIPE_STATUS_CLOSED = 'closed'
PIPE_STATUS_CV = 'cv'
schema: dict[str, dict[str, Any]] = { \
'id' : define_property(str_type, False, True), \
'node1' : define_property(str_type), \
'node2' : define_property(str_type), \
'length' : define_property(float_type), \
'diameter' : define_property(float_type), \
'roughness' : define_property(float_type), \
'minor_loss': define_property(float_type), \
'status' : define_property(str_type)}
def get_pipe_schema(name: str) -> dict[str, dict[str, Any]]:
return schema
return { 'id' : {'type': 'str' , 'optional': False , 'readonly': True },
'node1' : {'type': "str" , 'optional': False , 'readonly': False},
'node2' : {'type': "str" , 'optional': False , 'readonly': False},
'length' : {'type': "float" , 'optional': False , 'readonly': False},
'diameter' : {'type': "float" , 'optional': False , 'readonly': False},
'roughness' : {'type': "float" , 'optional': False , 'readonly': False},
'minor_loss' : {'type': "float" , 'optional': False , 'readonly': False},
'status' : {'type': "str" , 'optional': False , 'readonly': False} }
def _query_pipe(name: str, id: str) -> Row | None:
return read(name, f"select * from pipes where id = '{id}'")
def get_pipe(name: str, id: str) -> dict[str, Any]:
p = read(name, f"select * from pipes where id = '{id}'")
d = {}
d['id'] = str(p['id'])
d['node1'] = str(p['node1'])
d['node2'] = str(p['node2'])
d['length'] = float(p['length'])
d['diameter'] = float(p['diameter'])
d['roughness'] = float(p['roughness'])
d['minor_loss'] = float(p['minor_loss'])
d['status'] = str(p['status'])
return d
def _get_pipe_node1(name: str, id: str) -> str | None:
row = _query_pipe(name, id)
return row['node1'] if row != None else None
class Pipe(object):
def __init__(self, input: dict[str, Any]) -> None:
self.type = 'pipe'
self.id = str(input['id'])
self.node1 = str(input['node1'])
self.node2 = str(input['node2'])
self.length = float(input['length'])
self.diameter = float(input['diameter'])
self.roughness = float(input['roughness'])
self.minor_loss = float(input['minor_loss'])
self.status = str(input['status'])
self.f_type = f"'{self.type}'"
self.f_id = f"'{self.id}'"
self.f_node1 = f"'{self.node1}'"
self.f_node2 = f"'{self.node2}'"
self.f_length = self.length
self.f_diameter = self.diameter
self.f_roughness = self.roughness
self.f_minor_loss = self.minor_loss
self.f_status = f"'{self.status}'"
def as_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id, 'node1': self.node1, 'node2': self.node2, 'length': self.length, 'diameter': self.diameter, 'roughness': self.roughness, 'minor_loss': self.minor_loss, 'status': self.status }
def as_id_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id }
def _get_pipe_node2(name: str, id: str) -> str | None:
row = _query_pipe(name, id)
return row['node2'] if row != None else None
def set_pipe(name: str, cs: ChangeSet) -> ChangeSet:
old = Pipe(get_pipe(name, cs.operations[0]['id']))
raw_new = get_pipe(name, cs.operations[0]['id'])
new_dict = cs.operations[0]
schema = get_pipe_schema(name)
for key, value in schema.items():
if key in new_dict and not value['readonly']:
raw_new[key] = new_dict[key]
new = Pipe(raw_new)
redo_sql = f"update pipes set node1 = {new.f_node1}, node2 = {new.f_node2}, length = {new.f_length}, diameter = {new.f_diameter}, roughness = {new.f_roughness}, minor_loss = {new.f_minor_loss}, status = {new.f_status} where id = {new.f_id};"
undo_sql = f"update pipes set node1 = {old.f_node1}, node2 = {old.f_node2}, length = {old.f_length}, diameter = {old.f_diameter}, roughness = {old.f_roughness}, minor_loss = {old.f_minor_loss}, status = {old.f_status} where id = {old.f_id};"
redo_cs = g_update_prefix | new.as_dict()
undo_cs = g_update_prefix | old.as_dict()
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
def add_pipe(name: str, id: str, node1: str, node2: str, length: float = 0, diameter: float = 0, roughness: float = 0, minor_loss: float = 0, status: str = PIPE_STATUS_OPEN) -> ChangeSet:
if is_pipe(name, id):
return ChangeSet()
if not is_node(name, node1):
return ChangeSet()
if not is_node(name, node2):
return ChangeSet()
if node1 == node2:
return ChangeSet()
if status != PIPE_STATUS_OPEN and status != PIPE_STATUS_CLOSED and status != PIPE_STATUS_CV:
return ChangeSet()
def add_pipe(name: str, cs: ChangeSet) -> ChangeSet:
new = Pipe(cs.operations[0])
sql = f"insert into _link (id, type) values ('{id}', '{PIPE}');"
sql += f"\ninsert into pipes (id, node1, node2, length, diameter, roughness, minor_loss, status) values ('{id}', '{node1}', '{node2}', {length}, {diameter}, {roughness}, {minor_loss}, '{status}');"
redo_sql = f"insert into _link (id, type) values ({new.f_id}, {new.f_type});"
redo_sql += f"\ninsert into pipes (id, node1, node2, length, diameter, roughness, minor_loss, status) values ({new.f_id}, {new.f_node1}, {new.f_node2}, {new.f_length}, {new.f_diameter}, {new.f_roughness}, {new.f_minor_loss}, {new.f_status});"
undo = f"delete from pipes where id = ''{id}'';"
undo += f"\ndelete from _link where id = ''{id}'';"
undo_sql = f"delete from pipes where id = {new.f_id};"
undo_sql += f"\ndelete from _link where id = {new.f_id};"
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'add_pipe', API_ADD, PIPE, id)
return get_current_change_set(name)
redo_cs = g_add_prefix | new.as_dict()
undo_cs = g_delete_prefix | new.as_id_dict()
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
def get_pipe(name: str, id: str) -> dict[str, Any] | None:
row = _query_pipe(name, id)
if row == None:
return None
def delete_pipe(name: str, cs: ChangeSet) -> ChangeSet:
old = Pipe(get_pipe(name, cs.operations[0]['id']))
ps: dict[str, str] = {}
ps['id'] = id
ps['node1'] = row['node1']
ps['node2'] = row['node2']
ps['length'] = float(row['length'])
ps['diameter'] = float(row['diameter'])
ps['roughness'] = float(row['roughness'])
ps['minor_loss'] = float(row['minor_loss'])
ps['status'] = row['status']
return ps
redo_sql = f"delete from pipes where id = {old.f_id};"
redo_sql += f"\ndelete from _link where id = {old.f_id};"
undo_sql = f"insert into _link (id, type) values ({old.f_id}, {old.f_type});"
undo_sql += f"\ninsert into pipes (id, node1, node2, length, diameter, roughness, minor_loss, status) values ({old.f_id}, {old.f_node1}, {old.f_node2}, {old.f_length}, {old.f_diameter}, {old.f_roughness}, {old.f_minor_loss}, {old.f_status});"
def set_pipe(name: str, id: str, properties: dict[str, Any]) -> ChangeSet:
if not is_pipe(name, id):
return ChangeSet()
if 'node1' in properties:
if not is_node(name, properties['node1']) or _get_pipe_node2(name, id) == properties['node1']:
return ChangeSet()
if 'node2' in properties:
if not is_node(name, properties['node2']) or _get_pipe_node1(name, id) == properties['node2']:
return ChangeSet()
if 'node1' in properties and 'node2' in properties:
if properties['node1'] == properties['node2']:
return ChangeSet()
if 'status' in properties:
if properties['status'] != PIPE_STATUS_OPEN and properties['status'] != PIPE_STATUS_CLOSED and properties['status'] != PIPE_STATUS_CV:
return ChangeSet()
redo_cs = g_delete_prefix | old.as_id_dict()
undo_cs = g_add_prefix | old.as_dict()
old = Serialize(get_pipe(name, id), schema).to_storage()
new = get_pipe(name, id)
ps: list[str] = []
for key in properties:
if key in schema and schema[key]['readonly'] == False:
new[key] = properties[key]
ps.append(key)
new = Serialize(new, schema).to_execution()
sql = f"update pipes set node1 = {new['node1']}, node2 = {new['node2']}, \
length = {new['length']}, diameter = {new['diameter']}, roughness = {new['roughness']}, minor_loss = {new['minor_loss']}, status = {new['status']} where id = '{id}';"
undo = f"update pipes set node1 = {old['node1']}, node2 = {old['node2']}, \
length = {old['length']}, diameter = {old['diameter']}, roughness = {old['roughness']}, minor_loss = {old['minor_loss']}, status = {old['status']} where id = ''{id}'';"
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'set_pipe', API_UPDATE, PIPE, id, ps)
return get_current_change_set(name)
def delete_pipe(name: str, id: str) -> ChangeSet:
row = get_pipe(name, id)
if row == None:
return ChangeSet()
old = Serialize(get_pipe(name, id), schema).to_storage()
sql = f"delete from pipes where id = '{id}';"
sql += f"\ndelete from _link where id = '{id}';"
undo = f"insert into _link (id, type) values (''{id}'', ''{PIPE}'');"
undo += f"\ninsert into pipes (id, node1, node2, length, diameter, roughness, minor_loss, status) \
values (''{id}'', {old['node1']}, {old['node2']}, {old['length']}, {old['diameter']}, {old['roughness']}, {old['minor_loss']}, {old['status']});"
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'delete_pipe', API_DELETE, PIPE, id)
return get_current_change_set(name)
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)

View File

@@ -1,113 +1,86 @@
from typing import Any
from psycopg.rows import Row
from .operation import *
from .s0_base import *
from .change_set import ChangeSet
from .s24_coordinates import *
from .utility import *
from .schema import *
schema: dict[str, dict[str, Any]] = { \
'id' : define_property(str_type, False, True), \
'node1' : define_property(str_type), \
'node2' : define_property(str_type)}
def get_pump_schema(name: str) -> dict[str, dict[str, Any]]:
return schema
return { 'id' : {'type': 'str' , 'optional': False , 'readonly': True },
'node1' : {'type': "str" , 'optional': False , 'readonly': False},
'node2' : {'type': "str" , 'optional': False , 'readonly': False} }
def _query_pump(name: str, id: str) -> Row | None:
return read(name, f"select * from pumps where id = '{id}'")
def get_pump(name: str, id: str) -> dict[str, Any]:
p = read(name, f"select * from pumps where id = '{id}'")
d = {}
d['id'] = str(p['id'])
d['node1'] = str(p['node1'])
d['node2'] = str(p['node2'])
return d
def _get_pump_node1(name: str, id: str) -> str | None:
row = _query_pump(name, id)
return row['node1'] if row != None else None
class Pump(object):
def __init__(self, input: dict[str, Any]) -> None:
self.type = 'pump'
self.id = str(input['id'])
self.node1 = str(input['node1'])
self.node2 = str(input['node2'])
self.f_type = f"'{self.type}'"
self.f_id = f"'{self.id}'"
self.f_node1 = f"'{self.node1}'"
self.f_node2 = f"'{self.node2}'"
def as_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id, 'node1': self.node1, 'node2': self.node2 }
def as_id_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id }
def _get_pump_node2(name: str, id: str) -> str | None:
row = _query_pump(name, id)
return row['node2'] if row != None else None
def set_pump(name: str, cs: ChangeSet) -> ChangeSet:
old = Pump(get_pump(name, cs.operations[0]['id']))
raw_new = get_pump(name, cs.operations[0]['id'])
new_dict = cs.operations[0]
schema = get_pump_schema(name)
for key, value in schema.items():
if key in new_dict and not value['readonly']:
raw_new[key] = new_dict[key]
new = Pump(raw_new)
redo_sql = f"update pumps set node1 = {new.f_node1}, node2 = {new.f_node2} where id = {new.f_id};"
undo_sql = f"update pumps set node1 = {old.f_node1}, node2 = {old.f_node2} where id = {old.f_id};"
redo_cs = g_update_prefix | new.as_dict()
undo_cs = g_update_prefix | old.as_dict()
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
def add_pump(name: str, id: str, node1: str, node2: str) -> ChangeSet:
if is_pump(name, id):
return ChangeSet()
if not is_node(name, node1):
return ChangeSet()
if not is_node(name, node2):
return ChangeSet()
if node1 == node2:
return ChangeSet()
def add_pump(name: str, cs: ChangeSet) -> ChangeSet:
new = Pump(cs.operations[0])
sql = f"insert into _link (id, type) values ('{id}', '{PUMP}');"
sql += f"\ninsert into pumps (id, node1, node2) values ('{id}', '{node1}', '{node2}');"
redo_sql = f"insert into _link (id, type) values ({new.f_id}, {new.f_type});"
redo_sql += f"\ninsert into pumps (id, node1, node2) values ({new.f_id}, {new.f_node1}, {new.f_node2});"
undo = f"delete from pumps where id = ''{id}'';"
undo += f"\ndelete from _link where id = ''{id}'';"
undo_sql = f"delete from pumps where id = {new.f_id};"
undo_sql += f"\ndelete from _link where id = {new.f_id};"
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'add_pump', API_ADD, PUMP, id)
return get_current_change_set(name)
redo_cs = g_add_prefix | new.as_dict()
undo_cs = g_delete_prefix | new.as_id_dict()
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
def get_pump(name: str, id: str) -> dict[str, Any] | None:
row = _query_pump(name, id)
if row == None:
return None
def delete_pump(name: str, cs: ChangeSet) -> ChangeSet:
old = Pump(get_pump(name, cs.operations[0]['id']))
ps: dict[str, str] = {}
ps['id'] = id
ps['node1'] = row['node1']
ps['node2'] = row['node2']
return ps
redo_sql = f"delete from pumps where id = {old.f_id};"
redo_sql += f"\ndelete from _link where id = {old.f_id};"
undo_sql = f"insert into _link (id, type) values ({old.f_id}, {old.f_type});"
undo_sql += f"\ninsert into pumps (id, node1, node2) values ({old.f_id}, {old.f_node1}, {old.f_node2});"
def set_pump(name: str, id: str, properties: dict[str, Any]) -> ChangeSet:
if not is_pump(name, id):
return ChangeSet()
if 'node1' in properties:
if not is_node(name, properties['node1']) or _get_pump_node2(name, id) == properties['node1']:
return ChangeSet()
if 'node2' in properties:
if not is_node(name, properties['node2']) or _get_pump_node1(name, id) == properties['node2']:
return ChangeSet()
if 'node1' in properties and 'node2' in properties:
if properties['node1'] == properties['node2']:
return ChangeSet()
redo_cs = g_delete_prefix | old.as_id_dict()
undo_cs = g_add_prefix | old.as_dict()
old = Serialize(get_pump(name, id), schema).to_storage()
new = get_pump(name, id)
ps: list[str] = []
for key in properties:
if key in schema and schema[key]['readonly'] == False:
new[key] = properties[key]
ps.append(key)
new = Serialize(new, schema).to_execution()
sql = f"update pumps set node1 = {new['node1']}, node2 = {new['node2']} where id = '{id}';"
undo = f"update pumps set node1 = {old['node1']}, node2 = {old['node2']} where id = ''{id}'';"
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'set_pump', API_UPDATE, PUMP, id, ps)
return get_current_change_set(name)
def delete_pump(name: str, id: str) -> ChangeSet:
row = get_pump(name, id)
if row == None:
return ChangeSet()
old = Serialize(get_pump(name, id), schema).to_storage()
sql = f"delete from pumps where id = '{id}';"
sql += f"\ndelete from _link where id = '{id}';"
undo = f"insert into _link (id, type) values (''{id}'', ''{PUMP}'');"
undo += f"\ninsert into pumps (id, node1, node2) values (''{id}'', {old['node1']}, {old['node2']});"
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'delete_pump', API_DELETE, PUMP, id)
return get_current_change_set(name)
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)

View File

@@ -1,10 +1,5 @@
from typing import Any
from psycopg.rows import Row
from .operation import *
from .s0_base import *
from .change_set import ChangeSet
from .s24_coordinates import *
from .utility import *
from .schema import *
VALVES_TYPE_PRV = 'prv'
@@ -15,14 +10,108 @@ VALVES_TYPE_TCV = 'tcv'
VALVES_TYPE_GPV = 'gpv'
schema: dict[str, dict[str, Any]] = { \
'id' : define_property(str_type, False, True), \
'node1' : define_property(str_type), \
'node2' : define_property(str_type), \
'diameter' : define_property(float_type), \
'type' : define_property(str_type), \
'setting' : define_property(float_type), \
'minor_loss': define_property(float_type)}
def get_valve_schema(name: str) -> dict[str, dict[str, Any]]:
return { 'id' : {'type': 'str' , 'optional': False , 'readonly': True },
'node1' : {'type': "str" , 'optional': False , 'readonly': False},
'node2' : {'type': "str" , 'optional': False , 'readonly': False},
'diameter' : {'type': "float" , 'optional': False , 'readonly': False},
'v_type' : {'type': "str" , 'optional': False , 'readonly': False},
'setting' : {'type': "float" , 'optional': False , 'readonly': False},
'minor_loss' : {'type': "float" , 'optional': False , 'readonly': False} }
def get_valve(name: str, id: str) -> dict[str, Any]:
p = read(name, f"select * from valves where id = '{id}'")
d = {}
d['id'] = str(p['id'])
d['node1'] = str(p['node1'])
d['node2'] = str(p['node2'])
d['diameter'] = float(p['diameter'])
d['v_type'] = str(p['type'])
d['setting'] = float(p['setting'])
d['minor_loss'] = float(p['minor_loss'])
return d
class Valve(object):
def __init__(self, input: dict[str, Any]) -> None:
self.type = 'valve'
self.id = str(input['id'])
self.node1 = str(input['node1'])
self.node2 = str(input['node2'])
self.diameter = float(input['diameter'])
self.v_type = str(input['v_type'])
self.setting = float(input['setting'])
self.minor_loss = float(input['minor_loss'])
self.f_type = f"'{self.type}'"
self.f_id = f"'{self.id}'"
self.f_node1 = f"'{self.node1}'"
self.f_node2 = f"'{self.node2}'"
self.f_diameter = self.diameter
self.f_v_type = f"'{self.v_type}'"
self.f_setting = self.setting
self.f_minor_loss = self.minor_loss
def as_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id, 'node1': self.node1, 'node2': self.node2, 'diameter': self.diameter, 'v_type': self.v_type, 'setting': self.setting, 'minor_loss': self.minor_loss }
def as_id_dict(self) -> dict[str, Any]:
return { 'type': self.type, 'id': self.id }
def set_valve(name: str, cs: ChangeSet) -> ChangeSet:
old = Valve(get_valve(name, cs.operations[0]['id']))
raw_new = get_valve(name, cs.operations[0]['id'])
new_dict = cs.operations[0]
schema = get_valve_schema(name)
for key, value in schema.items():
if key in new_dict and not value['readonly']:
raw_new[key] = new_dict[key]
new = Valve(raw_new)
redo_sql = f"update valves set node1 = {new.f_node1}, node2 = {new.f_node2}, diameter = {new.f_diameter}, type = {new.f_v_type}, setting = {new.f_setting}, minor_loss = {new.f_minor_loss} where id = {new.f_id};"
undo_sql = f"update valves set node1 = {old.f_node1}, node2 = {old.f_node2}, diameter = {old.f_diameter}, type = {old.f_v_type}, setting = {old.f_setting}, minor_loss = {old.f_minor_loss} where id = {old.f_id};"
redo_cs = g_update_prefix | new.as_dict()
undo_cs = g_update_prefix | old.as_dict()
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
def add_valve(name: str, cs: ChangeSet) -> ChangeSet:
new = Valve(cs.operations[0])
redo_sql = f"insert into _link (id, type) values ({new.f_id}, {new.f_type});"
redo_sql += f"\ninsert into valves (id, node1, node2, diameter, type, setting, minor_loss) values ({new.f_id}, {new.f_node1}, {new.f_node2}, {new.f_diameter}, {new.f_v_type}, {new.f_setting}, {new.f_minor_loss});"
undo_sql = f"delete from valves where id = {new.f_id};"
undo_sql += f"\ndelete from _link where id = {new.f_id};"
redo_cs = g_add_prefix | new.as_dict()
undo_cs = g_delete_prefix | new.as_id_dict()
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
def delete_valve(name: str, cs: ChangeSet) -> ChangeSet:
old = Valve(get_valve(name, cs.operations[0]['id']))
redo_sql = f"delete from valves where id = {old.f_id};"
redo_sql += f"\ndelete from _link where id = {old.f_id};"
undo_sql = f"insert into _link (id, type) values ({old.f_id}, {old.f_type});"
undo_sql += f"\ninsert into valves (id, node1, node2, diameter, type, setting, minor_loss) values ({old.f_id}, {old.f_node1}, {old.f_node2}, {old.f_diameter}, {old.f_v_type}, {old.f_setting}, {old.f_minor_loss});"
redo_cs = g_delete_prefix | old.as_id_dict()
undo_cs = g_add_prefix | old.as_dict()
return execute_command(name, redo_sql, undo_sql, redo_cs, undo_cs)
'''
schema: dict[str, dict[str, Any]] = {}
def get_valve_schema(name: str) -> dict[str, dict[str, Any]]:
@@ -136,3 +225,4 @@ def delete_valve(name: str, id: str) -> ChangeSet:
write(name, sql)
add_operation(name, sql.replace("'", "''"), undo, 'delete_valve', API_DELETE, VALVE, id)
return get_current_change_set(name)
'''

View File

@@ -1,74 +0,0 @@
from typing import Any
float_type = type(0.0).__name__
str_type = type('').__name__
server_point_type = type((0.0,0.0)).__name__
client_point_type = type({'x': 0.0, 'y': 0.0}).__name__
str_list_type = type(['']).__name__
def define_property(type: str, optional: bool = False, readonly: bool = False) -> dict[str, Any]:
return { 'type': type, 'optional': optional, 'readonly': readonly }
class Serialize(object):
def __init__(self, row, schema) -> None:
self.row = row
self.schema = schema
def to_execution(self, name) -> str:
value = self.row[name]
type = self.schema[name]['type']
if value == None:
return 'null'
if type == float_type:
return value
if type == str_type:
return f"'{value}'"
raise Exception(f"Fail to serialize {name} for execution!")
def to_storage(self, name) -> str:
value = self.row[name]
type = self.schema[name]['type']
if value == None:
return 'null'
if type == float_type:
return value
if type == str_type:
return f"''{value}''"
raise Exception(f"Fail to serialize {name} for storage!")
def to_execution(self):
row = self.row.copy()
for key, value in row.items():
if value == None:
row[key] = 'null'
elif self.schema[key]['type'] == str_type:
row[key] = f"'{row[key]}'"
elif self.schema[key]['type'] == client_point_type:
row[key] = f"'({value['x']},{value['y']})'"
return row
def to_storage(self):
row = self.row.copy()
for key, value in row.items():
if value == None:
row[key] = 'null'
elif self.schema[key] == str_type:
row[key] = f"''{row[key]}''"
elif self.schema[key]['type'] == client_point_type:
row[key] = f"''({value['x']},{value['y']})''"
return row

View File

@@ -1,39 +0,0 @@
from psycopg.rows import dict_row, Row
from .connection import g_conn_dict as conn
from .operation import *
from .change_set import ChangeSet
def read(name: str, sql: str) -> Row | None:
with conn[name].cursor(row_factory=dict_row) as cur:
cur.execute(sql)
return cur.fetchone()
def write(name: str, sql: str) -> None:
with conn[name].cursor() as cur:
cur.execute(sql)
def decorate(value: str | None, type: str, optional: bool) -> str:
if optional:
value = 'NULL' if value == None else value
if type == 'str':
value = f'"{value}"' if value != 'NULL' else value
return value
def update(name: str, type: str, table: str, id_key: str, id_value: str, key: str, key_type: str, raw_old_value: str, value: str, optional: bool = False) -> ChangeSet:
value = f"'{value}'" if key_type is 'str' else value
old = decorate(raw_old_value, key_type, optional)
with conn[name].cursor() as cur:
sql = f"update {table} set {key} = {value} where {id_key} = '{id_value}'"
cur.execute(sql)
redo = sql.replace("'", '"')
undo = f'update {table} set {key} = {old} where {id_key} = "{id_value}"'
add_operation(name, redo, undo)
change = ChangeSet()
change.update(type, id_value, key)
return change

View File

@@ -12,4 +12,5 @@ create table pipes
, roughness numeric not null
, minor_loss numeric not null
, status pipes_status not null
, check (node1 <> node2)
);

View File

@@ -5,11 +5,11 @@ create table operation
, undo text not null
, parent integer references operation(id) on delete cascade
, redo_child integer references operation(id) -- must update before delete
, redo_change_set jsonb
, undo_change_set jsonb
, redo_cs text not null
, undo_cs text not null
);
insert into operation (id, redo, undo) values (0, '', '');
insert into operation (id, redo, undo, redo_cs, undo_cs) values (0, '', '', '', '');
create table current_operation
(

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,10 @@ import api
# ChangeSet
############################################################
API_ADD = api.API_ADD
API_UPDATE = api.API_UPDATE
API_DELETE = api.API_DELETE
ChangeSet = api.ChangeSet
@@ -80,7 +84,7 @@ def execute_redo(name: str) -> ChangeSet:
def have_snapshot(name: str, tag: str) -> bool:
return api.have_snapshot(name, tag)
def take_snapshot(name: str, tag: str) -> int:
def take_snapshot(name: str, tag: str) -> int | None:
return api.take_snapshot(name, tag)
def pick_snapshot(name: str, tag: str, discard: bool = False) -> ChangeSet:
@@ -90,6 +94,14 @@ def sync_with_server(name: str, operation: int) -> ChangeSet:
return api.sync_with_server(name, operation)
############################################################
# operation
############################################################
def execute_batch_commands(name: str, cs: ChangeSet) -> ChangeSet:
return api.execute_batch_commands(name, cs)
############################################################
# type
############################################################
@@ -144,12 +156,15 @@ def get_node_links(name: str, node_id: str) -> list[str]:
# title 1.[TITLE]
############################################################
def set_title(name: str, title: str) -> ChangeSet:
return api.set_title(name, title)
def get_title_schema(name: str) -> dict[str, dict[str, Any]]:
return api.get_title_schema(name)
def get_title(name: str) -> str:
def get_title(name: str) -> dict[str, Any]:
return api.get_title(name)
def set_title(name: str, cs: ChangeSet) -> ChangeSet:
return api.set_title(name, cs)
############################################################
# junction 2.[JUNCTIONS]
@@ -158,17 +173,18 @@ def get_title(name: str) -> str:
def get_junction_schema(name: str) -> dict[str, dict[str, Any]]:
return api.get_junction_schema(name)
def add_junction(name: str, junction_id: str, x: float, y: float, elevation: float) -> ChangeSet:
return api.add_junction(name, junction_id, x, y, elevation)
def get_junction(name: str, id: str) -> dict[str, Any]:
return api.get_junction(name, id)
def get_junction(name: str, junction_id: str) -> dict[str, Any] | None:
return api.get_junction(name, junction_id)
def set_junction(name: str, cs: ChangeSet) -> ChangeSet:
return api.set_junction(name, cs)
def set_junction(name: str, junction_id: str, properties: dict[str, Any]) -> ChangeSet:
return api.set_junction(name, junction_id, properties)
# example: add_junction(p, ChangeSet({'id': 'j0', 'x': 0.0, 'y': 10.0, 'elevation': 20.0}))
def add_junction(name: str, cs: ChangeSet) -> ChangeSet:
return api.add_junction(name, cs)
def delete_junction(name: str, junction_id: str) -> ChangeSet:
return api.delete_junction(name, junction_id)
def delete_junction(name: str, cs: ChangeSet) -> ChangeSet:
return api.delete_junction(name, cs)
############################################################
@@ -178,17 +194,18 @@ def delete_junction(name: str, junction_id: str) -> ChangeSet:
def get_reservoir_schema(name: str) -> dict[str, dict[str, Any]]:
return api.get_reservoir_schema(name)
def add_reservoir(name: str, reservoir_id: str, x: float, y: float, head: float) -> ChangeSet:
return api.add_reservoir(name, reservoir_id, x, y, head)
def get_reservoir(name: str, id: str) -> dict[str, Any]:
return api.get_reservoir(name, id)
def get_reservoir(name: str, reservoir_id: str) -> dict[str, Any] | None:
return api.get_reservoir(name, reservoir_id)
def set_reservoir(name: str, cs: ChangeSet) -> ChangeSet:
return api.set_reservoir(name, cs)
def set_reservoir(name: str, reservoir_id: str, properties: dict[str, Any]) -> ChangeSet:
return api.set_reservoir(name, reservoir_id, properties)
# example: add_reservoir(p, ChangeSet({'id': 'r0', 'x': 0.0, 'y': 10.0, 'head': 20.0}))
def add_reservoir(name: str, cs: ChangeSet) -> ChangeSet:
return api.add_reservoir(name, cs)
def delete_reservoir(name: str, reservoir_id: str) -> ChangeSet:
return api.delete_reservoir(name, reservoir_id)
def delete_reservoir(name: str, cs: ChangeSet) -> ChangeSet:
return api.delete_reservoir(name, cs)
############################################################
@@ -198,17 +215,18 @@ def delete_reservoir(name: str, reservoir_id: str) -> ChangeSet:
def get_tank_schema(name: str) -> dict[str, dict[str, Any]]:
return api.get_tank_schema(name)
def add_tank(name: str, tank_id: str, x: float, y: float, elevation: float, init_level: float = 0, min_level: float = 0, max_level: float = 0, diameter: float = 0, min_vol: float = 0) -> ChangeSet:
return api.add_tank(name, tank_id, x, y, elevation, init_level, min_level, max_level, diameter, min_vol)
def get_tank(name: str, id: str) -> dict[str, Any]:
return api.get_tank(name, id)
def get_tank(name: str, tank_id: str) -> dict[str, Any] | None:
return api.get_tank(name, tank_id)
def set_tank(name: str, cs: ChangeSet) -> ChangeSet:
return api.set_tank(name, cs)
def set_tank(name: str, tank_id: str, properties: dict[str, Any]) -> ChangeSet:
return api.set_tank(name, tank_id, properties)
# example: add_tank(p, ChangeSet({'id': 't0', 'x': 0.0, 'y': 10.0, 'elevation': 20.0, 'init_level': 1.0, 'min_level': 0.0, 'max_level': 2.0, 'diameter': 10.0, 'min_vol': 100.0}))
def add_tank(name: str, cs: ChangeSet) -> ChangeSet:
return api.add_tank(name, cs)
def delete_tank(name: str, tank_id: str) -> ChangeSet:
return api.delete_tank(name, tank_id)
def delete_tank(name: str, cs: ChangeSet) -> ChangeSet:
return api.delete_tank(name, cs)
############################################################
@@ -218,17 +236,18 @@ def delete_tank(name: str, tank_id: str) -> ChangeSet:
def get_pipe_schema(name: str) -> dict[str, dict[str, Any]]:
return api.get_pipe_schema(name)
def add_pipe(name: str, pipe_id: str, node1: str, node2: str, length: float = 0, diameter: float = 0, roughness: float = 0, minor_loss: float = 0, status: str = PIPE_STATUS_OPEN) -> ChangeSet:
return api.add_pipe(name, pipe_id, node1, node2, length, diameter, roughness, minor_loss, status)
def get_pipe(name: str, id: str) -> dict[str, Any]:
return api.get_pipe(name, id)
def get_pipe(name: str, pipe_id: str) -> dict[str, Any] | None:
return api.get_pipe(name, pipe_id)
def set_pipe(name: str, cs: ChangeSet) -> ChangeSet:
return api.set_pipe(name, cs)
def set_pipe(name: str, pipe_id: str, properties: dict[str, Any]) -> ChangeSet:
return api.set_pipe(name, pipe_id, properties)
# example: add_pipe(p, ChangeSet({'id': 'p0', 'node1': 'j1', 'node2': 'j2', 'length': 100.0, 'diameter': 10.0, 'roughness': 0.1, 'minor_loss': 0.5, 'status': PIPE_STATUS_OPEN }))
def add_pipe(name: str, cs: ChangeSet) -> ChangeSet:
return api.add_pipe(name, cs)
def delete_pipe(name: str, pipe_id: str) -> ChangeSet:
return api.delete_pipe(name, pipe_id)
def delete_pipe(name: str, cs: ChangeSet) -> ChangeSet:
return api.delete_pipe(name, cs)
############################################################
@@ -238,17 +257,18 @@ def delete_pipe(name: str, pipe_id: str) -> ChangeSet:
def get_pump_schema(name: str) -> dict[str, dict[str, Any]]:
return api.get_pump_schema(name)
def add_pump(name: str, pump_id: str, node1: str, node2: str) -> ChangeSet:
return api.add_pump(name, pump_id, node1, node2)
def get_pump(name: str, id: str) -> dict[str, Any]:
return api.get_pump(name, id)
def get_pump(name: str, pump_id: str) -> dict[str, Any] | None:
return api.get_pump(name, pump_id)
def set_pump(name: str, cs: ChangeSet) -> ChangeSet:
return api.set_pump(name, cs)
def set_pump(name: str, pump_id: str, properties: dict[str, Any]) -> ChangeSet:
return api.set_pump(name, pump_id, properties)
# example: add_pump(p, ChangeSet({'id': 'p0', 'node1': 'j1', 'node2': 'j2'}))
def add_pump(name: str, cs: ChangeSet) -> ChangeSet:
return api.add_pump(name, cs)
def delete_pump(name: str, pump_id: str) -> ChangeSet:
return api.delete_pump(name, pump_id)
def delete_pump(name: str, cs: ChangeSet) -> ChangeSet:
return api.delete_pump(name, cs)
############################################################
@@ -258,17 +278,18 @@ def delete_pump(name: str, pump_id: str) -> ChangeSet:
def get_valve_schema(name: str) -> dict[str, dict[str, Any]]:
return api.get_valve_schema(name)
def add_valve(name: str, valve_id: str, node1: str, node2: str, diameter: float = 0, type: str = VALVES_TYPE_PRV, setting: float = 0, minor_loss: float = 0) -> ChangeSet:
return api.add_valve(name, valve_id, node1, node2, diameter, type, setting, minor_loss)
def get_valve(name: str, id: str) -> dict[str, Any]:
return api.get_valve(name, id)
def get_valve(name: str, valve_id: str) -> dict[str, Any] | None:
return api.get_valve(name, valve_id)
def set_valve(name: str, cs: ChangeSet) -> ChangeSet:
return api.set_valve(name, cs)
def set_valve(name: str, valve_id: str, properties: dict[str, Any]) -> ChangeSet:
return api.set_valve(name, valve_id, properties)
#example: add_valve(p, ChangeSet({'id': 'v0', 'node1': 'j1', 'node2': 'j2', 'diameter': 10.0, 'v_type': VALVES_TYPE_FCV, 'setting': 0.1, 'minor_loss': 0.5 }))
def add_valve(name: str, cs: ChangeSet) -> ChangeSet:
return api.add_valve(name, cs)
def delete_valve(name: str, valve_id: str) -> ChangeSet:
return api.delete_valve(name, valve_id)
def delete_valve(name: str, cs: ChangeSet) -> ChangeSet:
return api.delete_valve(name, cs)
############################################################

View File

@@ -1,51 +0,0 @@
from tjnetwork import *
p = "demo"
if is_project_open(p):
close_project(p)
if have_project(p):
delete_project(p)
create_project(p)
open_project(p)
print(get_junction_schema(p))
print(get_reservoir_schema(p))
print(get_tank_schema(p))
print(add_junction(p, 'j1', 0.0, 0.0, 0.0).operations)
print(add_junction(p, 'j2', 0.0, 0.0, 0.0).operations)
print(add_junction(p, 'j3', 0.0, 0.0, 0.0).operations)
print(add_junction(p, 'j4', 0.0, 0.0, 0.0).operations)
client_op = get_current_operation(p)
print(client_op)
print(take_snapshot(p, 'x'))
print(execute_undo(p).operations)
print(execute_undo(p).operations)
print(add_junction(p, 'j5', 0.0, 0.0, 0.0).operations)
print(add_junction(p, 'j6', 0.0, 0.0, 0.0).operations)
print(take_snapshot(p, 'xx'))
print(sync_with_server(p, client_op).operations)
print(pick_snapshot(p, 'x').operations)
print(get_junction(p, 'j1'))
print(get_junction(p, 'j2'))
print(get_junction(p, 'j3'))
print(get_junction(p, 'j4'))
print(get_junction(p, 'j5'))
print(get_junction(p, 'j6'))
print(set_junction(p, 'j1', {'elevation': 10.0, 'coord': {'x': 10.0, 'y': 10.0}, 'demand': 10.0}).operations)
print(get_junction(p, 'j1'))
print(execute_undo(p).operations)
print(get_junction(p, 'j1'))
close_project(p)
delete_project(p)