Merge branch 'master' into region

This commit is contained in:
WQY\qiong
2023-03-31 15:00:30 +08:00
13 changed files with 605 additions and 126 deletions

View File

@@ -16,10 +16,10 @@ from .database import update_snapshot, update_snapshot_for_current_operation
from .database import delete_snapshot, delete_snapshot_by_operation
from .database import get_operation_by_snapshot, get_snapshot_by_operation
from .database import pick_snapshot
from .database import pick_operation, sync_with_server, get_restore_operation, set_restore_operation, set_restore_operation_to_current
from .database import pick_operation, sync_with_server
from .database import get_restore_operation, set_restore_operation, set_restore_operation_to_current, restore
from .batch_cmd import execute_batch_command
from .batch_cmds import execute_batch_commands
from .batch_cmds import execute_batch_commands, execute_batch_command
from .s0_base import JUNCTION, RESERVOIR, TANK, PIPE, PUMP, VALVE, PATTERN, CURVE
from .s0_base import is_node, is_junction, is_reservoir, is_tank

View File

@@ -1,5 +1,6 @@
from typing import Any
from .sections import *
from .database import API_ADD, API_UPDATE, API_DELETE, ChangeSet
from .database import API_ADD, API_UPDATE, API_DELETE, ChangeSet, write, read, read_all, get_current_operation
from .s1_title import set_title
from .s2_junctions import set_junction, add_junction, delete_junction
from .s3_reservoirs import set_reservoir, add_reservoir, delete_reservoir
@@ -290,7 +291,71 @@ def execute_batch_commands(name: str, cs: ChangeSet) -> ChangeSet:
elif operation == API_DELETE:
result.merge(execute_delete_command(name, ChangeSet(op)))
except:
print(f'ERROR: Fail to execute {todo}!')
pass
print(f'ERROR: Fail to execute {todo}')
return result
def execute_batch_command(name: str, cs: ChangeSet) -> ChangeSet:
write(name, 'delete from batch_operation where id > 0')
write(name, "update operation_table set option = 'batch_operation' where option = 'operation'")
# for delete, generate cascade command
new_cs = ChangeSet()
for op in cs.operations:
if op['operation'] == API_DELETE:
new_cs.merge(del_cascade_cmd(name, ChangeSet(op)))
else:
new_cs.merge(ChangeSet(op))
result = ChangeSet()
todo = {}
try:
for op in new_cs.operations:
todo = op
operation = op['operation']
if operation == API_ADD:
result.merge(execute_add_command(name, ChangeSet(op)))
elif operation == API_UPDATE:
result.merge(execute_update_command(name, ChangeSet(op)))
elif operation == API_DELETE:
result.merge(execute_delete_command(name, ChangeSet(op)))
except:
print(f'ERROR: Fail to execute {todo}!')
count = read(name, 'select count(*) as count from batch_operation')['count']
if count == 1:
write(name, 'delete from batch_operation where id > 0')
write(name, "update operation_table set option = 'operation' where option = 'batch_operation'")
return ChangeSet()
redo_list: list[str] = []
redo_cs_list: list[dict[str, Any]] = []
redo_rows = read_all(name, 'select redo, redo_cs from batch_operation where id > 0 order by id asc')
for row in redo_rows:
redo_list.append(row['redo'])
redo_cs_list += eval(row['redo_cs'])
undo_list: list[str] = []
undo_cs_list: list[dict[str, Any]] = []
undo_rows = read_all(name, 'select undo, undo_cs from batch_operation where id > 0 order by id desc')
for row in undo_rows:
undo_list.append(row['undo'])
undo_cs_list += eval(row['undo_cs'])
redo = '\n'.join(redo_list).replace("'", "''")
redo_cs = str(redo_cs_list).replace("'", "''")
undo = '\n'.join(undo_list).replace("'", "''")
undo_cs = str(undo_cs_list).replace("'", "''")
parent = get_current_operation(name)
write(name, f"insert into operation (id, redo, undo, parent, redo_cs, undo_cs) values (default, '{redo}', '{undo}', {parent}, '{redo_cs}', '{undo_cs}')")
current = read(name, 'select max(id) as id from operation')['id']
write(name, f"update current_operation set id = {current}")
write(name, 'delete from batch_operation where id > 0')
write(name, "update operation_table set option = 'operation' where option = 'batch_operation'")
return result

View File

@@ -113,6 +113,8 @@ def get_current_operation(name: str) -> int:
def execute_command(name: str, command: DbChangeSet) -> ChangeSet:
op_table = read(name, "select * from operation_table")['option']
write(name, command.redo_sql)
parent = get_current_operation(name)
@@ -120,10 +122,11 @@ def execute_command(name: str, command: DbChangeSet) -> ChangeSet:
undo_sql = command.undo_sql.replace("'", "''")
redo_cs_str = str(command.redo_cs).replace("'", "''")
undo_cs_str = str(command.undo_cs).replace("'", "''")
write(name, f"insert into operation (id, redo, undo, parent, redo_cs, undo_cs) values (default, '{redo_sql}', '{undo_sql}', {parent}, '{redo_cs_str}', '{undo_cs_str}')")
write(name, f"insert into {op_table} (id, redo, undo, parent, redo_cs, undo_cs) values (default, '{redo_sql}', '{undo_sql}', {parent}, '{redo_cs_str}', '{undo_cs_str}')")
current = read(name, 'select max(id) as id from operation')['id']
write(name, f"update current_operation set id = {current}")
if op_table == 'operation':
current = read(name, 'select max(id) as id from operation')['id']
write(name, f"update current_operation set id = {current}")
return ChangeSet.from_list(command.redo_cs)
@@ -339,3 +342,8 @@ def set_restore_operation(name: str, operation: int) -> None:
def set_restore_operation_to_current(name: str) -> None:
return set_restore_operation(name, get_current_operation(name))
def restore(name: str, discard: bool) -> ChangeSet:
op = get_restore_operation(name)
return pick_operation(name, op, discard)

108
create_template.py Normal file
View File

@@ -0,0 +1,108 @@
import psycopg as pg
sql_create = [
"script/sql/create/0.base.sql",
"script/sql/create/1.title.sql",
"script/sql/create/2.junctions.sql",
"script/sql/create/3.reservoirs.sql",
"script/sql/create/4.tanks.sql",
"script/sql/create/5.pipes.sql",
"script/sql/create/6.pumps.sql",
"script/sql/create/7.valves.sql",
"script/sql/create/8.tags.sql",
"script/sql/create/9.demands.sql",
"script/sql/create/10.status.sql",
"script/sql/create/11.patterns.sql",
"script/sql/create/12.curves.sql",
"script/sql/create/13.controls.sql",
"script/sql/create/14.rules.sql",
"script/sql/create/15.energy.sql",
"script/sql/create/16.emitters.sql",
"script/sql/create/17.quality.sql",
"script/sql/create/18.sources.sql",
"script/sql/create/19.reactions.sql",
"script/sql/create/20.mixing.sql",
"script/sql/create/21.times.sql",
"script/sql/create/22.report.sql",
"script/sql/create/23.options.sql",
"script/sql/create/24.coordinates.sql",
"script/sql/create/25.vertices.sql",
"script/sql/create/26.labels.sql",
"script/sql/create/27.backdrop.sql",
"script/sql/create/28.end.sql",
"script/sql/create/29.scada_device.sql",
"script/sql/create/30.scada_device_data.sql",
"script/sql/create/31.scada_element.sql",
"script/sql/create/operation.sql"
]
sql_drop = [
"script/sql/drop/operation.sql",
"script/sql/drop/31.scada_element.sql",
"script/sql/drop/30.scada_device_data.sql",
"script/sql/drop/29.scada_device.sql",
"script/sql/drop/28.end.sql",
"script/sql/drop/27.backdrop.sql",
"script/sql/drop/26.labels.sql",
"script/sql/drop/25.vertices.sql",
"script/sql/drop/24.coordinates.sql",
"script/sql/drop/23.options.sql",
"script/sql/drop/22.report.sql",
"script/sql/drop/21.times.sql",
"script/sql/drop/20.mixing.sql",
"script/sql/drop/19.reactions.sql",
"script/sql/drop/18.sources.sql",
"script/sql/drop/17.quality.sql",
"script/sql/drop/16.emitters.sql",
"script/sql/drop/15.energy.sql",
"script/sql/drop/14.rules.sql",
"script/sql/drop/13.controls.sql",
"script/sql/drop/12.curves.sql",
"script/sql/drop/11.patterns.sql",
"script/sql/drop/10.status.sql",
"script/sql/drop/9.demands.sql",
"script/sql/drop/8.tags.sql",
"script/sql/drop/7.valves.sql",
"script/sql/drop/6.pumps.sql",
"script/sql/drop/5.pipes.sql",
"script/sql/drop/4.tanks.sql",
"script/sql/drop/3.reservoirs.sql",
"script/sql/drop/2.junctions.sql",
"script/sql/drop/1.title.sql",
"script/sql/drop/0.base.sql"
]
def create_template():
with pg.connect(conninfo="dbname=postgres host=127.0.0.1", autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute("create database project")
with pg.connect(conninfo="dbname=project host=127.0.0.1") as conn:
with conn.cursor() as cur:
for sql in sql_create:
with open(sql, "r") as f:
cur.execute(f.read())
print(f'executed {sql}')
conn.commit()
def have_template():
with pg.connect(conninfo="dbname=postgres host=127.0.0.1", autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute("select * from pg_database where datname = 'project'")
return cur.rowcount > 0
def delete_template():
with pg.connect(conninfo="dbname=project host=127.0.0.1") as conn:
with conn.cursor() as cur:
for sql in sql_drop:
with open(sql, "r") as f:
cur.execute(f.read())
print(f'executed {sql}')
conn.commit()
with pg.connect(conninfo="dbname=postgres host=127.0.0.1", autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute("drop database project")
if __name__ == "__main__":
if (have_template()):
delete_template()
create_template()

View File

@@ -1379,6 +1379,10 @@ async def fastapi_set_backdrop_properties(network: str, req: Request) -> ChangeS
async def fastapi_get_scada_device_schema(network: str) -> dict[str, dict[str, Any]]:
return get_scada_device_schema(network)
@app.get('/getscadadevices/')
async def fastapi_get_scada_devices(network: str) -> list[str]:
return get_scada_devices(network)
@app.get('/getscadadevice/')
async def fastapi_get_scada_device(network: str, id: str) -> dict[str, Any]:
return get_scada_device(network, id)
@@ -1445,6 +1449,10 @@ async def fastapi_clean_scada_device_data(network: str) -> ChangeSet:
async def fastapi_get_scada_element_schema(network: str) -> dict[str, dict[str, Any]]:
return get_scada_element_schema(network)
@app.get('/getscadaelements/')
async def fastapi_get_scada_elements(network: str) -> list[str]:
return get_scada_elements(network)
@app.get('/getscadaelement/')
async def fastapi_get_scada_element(network: str, id: str) -> dict[str, Any]:
return get_scada_element(network, id)

15
restore_project.py Normal file
View File

@@ -0,0 +1,15 @@
import sys
from tjnetwork import *
def main():
if len(sys.argv) != 2:
print("restore_project name")
return
p = sys.argv[1]
open_project(p)
restore(p)
close_project(p)
if __name__ == '__main__':
main()

11
restore_projects.py Normal file
View File

@@ -0,0 +1,11 @@
from tjnetwork import *
def main():
for p in list_project():
print(f'restore {p}...')
open_project(p)
restore(p)
close_project(p)
if __name__ == '__main__':
main()

View File

@@ -30,3 +30,26 @@ create table restore_operation
);
insert into restore_operation (id) values (0);
create table batch_operation
(
id bigserial primary key
, redo text not null
, undo text not null
, parent integer references operation(id) on delete cascade
, redo_child integer references operation(id) -- must update before delete
, redo_cs text not null
, undo_cs text not null
);
insert into batch_operation (id, redo, undo, redo_cs, undo_cs) values (0, '', '', '', '');
create type operation_table_option as enum ('operation', 'batch_operation');
create table operation_table
(
option operation_table_option primary key
);
insert into operation_table (option) values ('operation');

View File

@@ -1,3 +1,9 @@
drop table if exists operation_table;
drop type if exists operation_table_option;
drop table if exists batch_operation;
drop table if exists restore_operation;
drop table if exists snapshot_operation;

View File

@@ -1,108 +0,0 @@
import psycopg as pg
sql_create = [
"sql/create/0.base.sql",
"sql/create/1.title.sql",
"sql/create/2.junctions.sql",
"sql/create/3.reservoirs.sql",
"sql/create/4.tanks.sql",
"sql/create/5.pipes.sql",
"sql/create/6.pumps.sql",
"sql/create/7.valves.sql",
"sql/create/8.tags.sql",
"sql/create/9.demands.sql",
"sql/create/10.status.sql",
"sql/create/11.patterns.sql",
"sql/create/12.curves.sql",
"sql/create/13.controls.sql",
"sql/create/14.rules.sql",
"sql/create/15.energy.sql",
"sql/create/16.emitters.sql",
"sql/create/17.quality.sql",
"sql/create/18.sources.sql",
"sql/create/19.reactions.sql",
"sql/create/20.mixing.sql",
"sql/create/21.times.sql",
"sql/create/22.report.sql",
"sql/create/23.options.sql",
"sql/create/24.coordinates.sql",
"sql/create/25.vertices.sql",
"sql/create/26.labels.sql",
"sql/create/27.backdrop.sql",
"sql/create/28.end.sql",
"sql/create/29.scada_device.sql",
"sql/create/30.scada_device_data.sql",
"sql/create/31.scada_element.sql",
"sql/create/operation.sql"
]
sql_drop = [
"sql/drop/operation.sql",
"sql/drop/31.scada_element.sql",
"sql/drop/30.scada_device_data.sql",
"sql/drop/29.scada_device.sql",
"sql/drop/28.end.sql",
"sql/drop/27.backdrop.sql",
"sql/drop/26.labels.sql",
"sql/drop/25.vertices.sql",
"sql/drop/24.coordinates.sql",
"sql/drop/23.options.sql",
"sql/drop/22.report.sql",
"sql/drop/21.times.sql",
"sql/drop/20.mixing.sql",
"sql/drop/19.reactions.sql",
"sql/drop/18.sources.sql",
"sql/drop/17.quality.sql",
"sql/drop/16.emitters.sql",
"sql/drop/15.energy.sql",
"sql/drop/14.rules.sql",
"sql/drop/13.controls.sql",
"sql/drop/12.curves.sql",
"sql/drop/11.patterns.sql",
"sql/drop/10.status.sql",
"sql/drop/9.demands.sql",
"sql/drop/8.tags.sql",
"sql/drop/7.valves.sql",
"sql/drop/6.pumps.sql",
"sql/drop/5.pipes.sql",
"sql/drop/4.tanks.sql",
"sql/drop/3.reservoirs.sql",
"sql/drop/2.junctions.sql",
"sql/drop/1.title.sql",
"sql/drop/0.base.sql"
]
def create_template():
with pg.connect(conninfo="dbname=postgres host=127.0.0.1", autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute("create database project")
with pg.connect(conninfo="dbname=project host=127.0.0.1") as conn:
with conn.cursor() as cur:
for sql in sql_create:
with open(sql, "r") as f:
cur.execute(f.read())
print(f'executed {sql}')
conn.commit()
def have_template():
with pg.connect(conninfo="dbname=postgres host=127.0.0.1", autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute("select * from pg_database where datname = 'project'")
return cur.rowcount > 0
def delete_template():
with pg.connect(conninfo="dbname=project host=127.0.0.1") as conn:
with conn.cursor() as cur:
for sql in sql_drop:
with open(sql, "r") as f:
cur.execute(f.read())
print(f'executed {sql}')
conn.commit()
with pg.connect(conninfo="dbname=postgres host=127.0.0.1", autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute("drop database project")
if __name__ == "__main__":
if (have_template()):
delete_template()
create_template()

View File

@@ -1,4 +1,5 @@
import pytest
import random
from tjnetwork import *
class TestApi:
@@ -156,15 +157,7 @@ class TestApi:
cs.add({'type': JUNCTION, 'id': 'j2', 'x': 0.0, 'y': 10.0, 'elevation': 20.0}) # fail
cs = execute_batch_command(p, cs)
assert len(cs.operations) == 0
assert get_current_operation(p) == 0
cs = ChangeSet()
cs.add({'type': JUNCTION, 'id': 'j1', 'x': 0.0, 'y': 10.0, 'elevation': 20.0})
cs.add({'type': JUNCTION, 'id': 'j2', 'x': 0.0, 'y': 10.0, 'elevation': 20.0})
cs = execute_batch_command(p, cs)
assert len(cs.operations) == 2
assert get_current_operation(p) == 1
@@ -242,6 +235,353 @@ class TestApi:
self.leave(p)
def test_delete_nodes_then_restore(self):
p = 'test_delete_nodes_then_restore'
read_inp(p, f'./inp/net3.inp', '2')
open_project(p)
nodes = get_nodes(p)
links = get_links(p)
for _ in range(10):
random.shuffle(nodes)
for node in nodes:
if is_junction(p, node):
delete_junction(p, ChangeSet({'id': node}))
if is_reservoir(p, node):
delete_reservoir(p, ChangeSet({'id': node}))
if is_tank(p, node):
delete_tank(p, ChangeSet({'id': node}))
for node in nodes:
assert is_node(p, node) == False
for link in links:
assert is_link(p, link) == False
assert get_nodes(p) == []
assert get_links(p) == []
op = get_restore_operation(p)
pick_operation(p, op)
for node in nodes:
assert is_node(p, node)
for link in links:
assert is_link(p, link)
self.leave(p)
def test_delete_links_then_restore(self):
p = 'test_delete_links_then_restore'
read_inp(p, f'./inp/net3.inp', '2')
open_project(p)
nodes = get_nodes(p)
links = get_links(p)
for _ in range(10):
random.shuffle(links)
for link in links:
if is_pipe(p, link):
delete_pipe(p, ChangeSet({'id': link}))
if is_pump(p, link):
delete_pump(p, ChangeSet({'id': link}))
if is_valve(p, link):
delete_valve(p, ChangeSet({'id': link}))
for node in nodes:
assert is_node(p, node)
for link in links:
assert is_link(p, link) == False
assert get_links(p) == []
op = get_restore_operation(p)
pick_operation(p, op)
for node in nodes:
assert is_node(p, node)
for link in links:
assert is_link(p, link)
self.leave(p)
def test_delete_nodes_then_restore_commands(self):
p = 'test_delete_nodes_then_restore_commands'
read_inp(p, f'./inp/net3.inp', '2')
open_project(p)
nodes = get_nodes(p)
links = get_links(p)
for _ in range(10):
random.shuffle(nodes)
batch = ChangeSet()
for node in nodes:
if is_junction(p, node):
batch.delete({'type' : 'junction', 'id': node })
if is_reservoir(p, node):
batch.delete({'type' : 'reservoir', 'id': node })
if is_tank(p, node):
batch.delete({'type' : 'tank', 'id': node })
execute_batch_commands(p, batch)
for node in nodes:
assert is_node(p, node) == False
for link in links:
assert is_link(p, link) == False
assert get_nodes(p) == []
assert get_links(p) == []
op = get_restore_operation(p)
pick_operation(p, op)
for node in nodes:
assert is_node(p, node)
for link in links:
assert is_link(p, link)
self.leave(p)
def test_delete_links_then_restore_commands(self):
p = 'test_delete_links_then_restore_commands'
read_inp(p, f'./inp/net3.inp', '2')
open_project(p)
nodes = get_nodes(p)
links = get_links(p)
for _ in range(10):
random.shuffle(links)
batch = ChangeSet()
for link in links:
if is_pipe(p, link):
batch.delete({'type' : 'pipe', 'id': link })
if is_pump(p, link):
batch.delete({'type' : 'pump', 'id': link })
if is_valve(p, link):
batch.delete({'type' : 'valve', 'id': link })
execute_batch_commands(p, batch)
for node in nodes:
assert is_node(p, node)
for link in links:
assert is_link(p, link) == False
assert get_links(p) == []
op = get_restore_operation(p)
pick_operation(p, op)
for node in nodes:
assert is_node(p, node)
for link in links:
assert is_link(p, link)
self.leave(p)
def test_delete_nodes_then_restore_command(self):
p = 'test_delete_nodes_then_restore_commands'
read_inp(p, f'./inp/net3.inp', '2')
open_project(p)
nodes = get_nodes(p)
links = get_links(p)
for _ in range(10):
random.shuffle(nodes)
batch = ChangeSet()
for node in nodes:
if is_junction(p, node):
batch.delete({'type' : 'junction', 'id': node })
if is_reservoir(p, node):
batch.delete({'type' : 'reservoir', 'id': node })
if is_tank(p, node):
batch.delete({'type' : 'tank', 'id': node })
execute_batch_command(p, batch)
for node in nodes:
assert is_node(p, node) == False
for link in links:
assert is_link(p, link) == False
assert get_nodes(p) == []
assert get_links(p) == []
op = get_restore_operation(p)
pick_operation(p, op)
for node in nodes:
assert is_node(p, node)
for link in links:
assert is_link(p, link)
self.leave(p)
def test_delete_links_then_restore_command(self):
p = 'test_delete_links_then_restore_commands'
read_inp(p, f'./inp/net3.inp', '2')
open_project(p)
nodes = get_nodes(p)
links = get_links(p)
for _ in range(10):
random.shuffle(links)
batch = ChangeSet()
for link in links:
if is_pipe(p, link):
batch.delete({'type' : 'pipe', 'id': link })
if is_pump(p, link):
batch.delete({'type' : 'pump', 'id': link })
if is_valve(p, link):
batch.delete({'type' : 'valve', 'id': link })
execute_batch_command(p, batch)
for node in nodes:
assert is_node(p, node)
for link in links:
assert is_link(p, link) == False
assert get_links(p) == []
op = get_restore_operation(p)
pick_operation(p, op)
for node in nodes:
assert is_node(p, node)
for link in links:
assert is_link(p, link)
self.leave(p)
def test_delete_nodes_links_then_restore_v2(self):
p = 'test_delete_nodes_links_then_restore_v2'
read_inp(p, f'./inp/net3.inp', '2')
open_project(p)
nls : list[tuple[str, str]] = []
nodes = get_nodes(p)
for node in nodes:
nls.append(('node', node))
links = get_links(p)
for link in links:
nls.append(('link', link))
for _ in range(10):
random.shuffle(nls)
for nl in nls:
if nl[0] == 'node':
node = nl[1]
if is_junction(p, node):
delete_junction(p, ChangeSet({'id': node}))
if is_reservoir(p, node):
delete_reservoir(p, ChangeSet({'id': node}))
if is_tank(p, node):
delete_tank(p, ChangeSet({'id': node}))
else:
link = nl[1]
if is_pipe(p, link):
delete_pipe(p, ChangeSet({'id': link}))
if is_pump(p, link):
delete_pump(p, ChangeSet({'id': link}))
if is_valve(p, link):
delete_valve(p, ChangeSet({'id': link}))
for node in nodes:
assert is_node(p, node) == False
for link in links:
assert is_link(p, link) == False
assert get_nodes(p) == []
assert get_links(p) == []
op = get_restore_operation(p)
pick_operation(p, op)
for node in nodes:
assert is_node(p, node)
for link in links:
assert is_link(p, link)
self.leave(p)
def test_delete_nodes_links_then_restore_v3(self):
p = 'test_delete_nodes_links_then_restore_v3'
read_inp(p, f'./inp/net3.inp', '3')
open_project(p)
nls : list[tuple[str, str]] = []
nodes = get_nodes(p)
for node in nodes:
nls.append(('node', node))
links = get_links(p)
for link in links:
nls.append(('link', link))
for _ in range(10):
random.shuffle(nls)
for nl in nls:
if nl[0] == 'node':
node = nl[1]
if is_junction(p, node):
delete_junction(p, ChangeSet({'id': node}))
if is_reservoir(p, node):
delete_reservoir(p, ChangeSet({'id': node}))
if is_tank(p, node):
delete_tank(p, ChangeSet({'id': node}))
else:
link = nl[1]
if is_pipe(p, link):
delete_pipe(p, ChangeSet({'id': link}))
if is_pump(p, link):
delete_pump(p, ChangeSet({'id': link}))
if is_valve(p, link):
delete_valve(p, ChangeSet({'id': link}))
for node in nodes:
assert is_node(p, node) == False
for link in links:
assert is_link(p, link) == False
assert get_nodes(p) == []
assert get_links(p) == []
op = get_restore_operation(p)
pick_operation(p, op)
for node in nodes:
assert is_node(p, node)
for link in links:
assert is_link(p, link)
self.leave(p)
# 1 title

View File

@@ -291,6 +291,9 @@ def set_restore_operation(name: str, operation: int) -> None:
def set_restore_operation_to_current(name: str) -> None:
return api.set_restore_operation_to_current(name)
def restore(name: str, discard: bool = False) -> ChangeSet:
return api.restore(name, discard)
############################################################
# type