From 8681a56ed76c362b7d278b3a55aba46ccea5c8d9 Mon Sep 17 00:00:00 2001 From: "WQY\\qiong" Date: Tue, 21 Mar 2023 21:08:20 +0800 Subject: [PATCH] Replace inp in file --- api/__init__.py | 1 - api/inp_in.py | 484 +++++++++++++++++++++++----------------------- api/inp_in_new.py | 282 --------------------------- tjnetwork.py | 4 +- 4 files changed, 247 insertions(+), 524 deletions(-) delete mode 100644 api/inp_in_new.py diff --git a/api/__init__.py b/api/__init__.py index 06230ad..ad08e19 100644 --- a/api/__init__.py +++ b/api/__init__.py @@ -3,7 +3,6 @@ from .project import is_project_open, get_project_open_count, open_project, clos from .project import copy_project from .inp_in import read_inp, import_inp -from .inp_in_new import read_inp_new from .inp_out import dump_inp, export_inp from .database import API_ADD, API_UPDATE, API_DELETE diff --git a/api/inp_in.py b/api/inp_in.py index a9ebb0d..f33b5ab 100644 --- a/api/inp_in.py +++ b/api/inp_in.py @@ -1,244 +1,252 @@ +import datetime +import os from .project import * -from .database import ChangeSet, get_current_operation, set_restore_operation -from .sections import section_name -from .batch_cmds import execute_batch_commands -from .s1_title import inp_in_title -from .s2_junctions import inp_in_junction -from .s3_reservoirs import inp_in_reservoir -from .s4_tanks import inp_in_tank -from .s5_pipes import inp_in_pipe -from .s6_pumps import inp_in_pump -from .s7_valves import inp_in_valve -from .s8_tags import inp_in_tag -from .s9_demands import inp_in_demand -from .s10_status import inp_in_status -from .s11_patterns import inp_in_pattern -from .s12_curves import inp_in_curve -from .s13_controls import inp_in_control -from .s14_rules import inp_in_rule -from .s15_energy import inp_in_energy -from .s16_emitters import inp_in_emitter -from .s17_quality import inp_in_quality -from .s18_sources import inp_in_source -from .s19_reactions import inp_in_reaction -from .s20_mixing import inp_in_mixing -from .s21_times import inp_in_time -from .s22_report import inp_in_report -from .s23_options import inp_in_option -from .s24_coordinates import inp_in_coord -from .s25_vertices import inp_in_vertex -from .s26_labels import inp_in_label -from .s27_backdrop import inp_in_backdrop -#from .s28_end import * +from .database import ChangeSet, write +from .sections import * +from .s1_title import inp_in_title_new +from .s2_junctions import inp_in_junction_new +from .s3_reservoirs import inp_in_reservoir_new +from .s4_tanks import inp_in_tank_new +from .s5_pipes import inp_in_pipe_new +from .s6_pumps import inp_in_pump_new +from .s7_valves import inp_in_valve_new +from .s8_tags import inp_in_tag_new +from .s9_demands import inp_in_demand_new +from .s10_status import inp_in_status_new +from .s11_patterns import inp_in_pattern_new +from .s12_curves import CURVE_TYPE_PUMP, inp_in_curve_new +from .s13_controls import inp_in_control_new +from .s14_rules import inp_in_rule_new +from .s15_energy import inp_in_energy_new +from .s16_emitters import inp_in_emitter_new +from .s17_quality import inp_in_quality_new +from .s18_sources import inp_in_source_new +from .s19_reactions import inp_in_reaction_new +from .s20_mixing import inp_in_mixing_new +from .s21_times import inp_in_time_new +from .s22_report import inp_in_report_new +from .s23_options import inp_in_option_new +from .s24_coordinates import inp_in_coord_new +from .s25_vertices import inp_in_vertex_new +from .s26_labels import inp_in_label_new +from .s27_backdrop import inp_in_backdrop_new + +_S = 'S' +_L = 'L' + +_handler = { + TITLE : (_S, inp_in_title_new), + JUNCTIONS : (_L, inp_in_junction_new), + RESERVOIRS : (_L, inp_in_reservoir_new), + TANKS : (_L, inp_in_tank_new), + PIPES : (_L, inp_in_pipe_new), + PUMPS : (_L, inp_in_pump_new), + VALVES : (_L, inp_in_valve_new), + TAGS : (_L, inp_in_tag_new), + DEMANDS : (_L, inp_in_demand_new), + STATUS : (_L, inp_in_status_new), + PATTERNS : (_L, inp_in_pattern_new), + CURVES : (_L, inp_in_curve_new), + CONTROLS : (_L, inp_in_control_new), + RULES : (_L, inp_in_rule_new), + ENERGY : (_L, inp_in_energy_new), + EMITTERS : (_L, inp_in_emitter_new), + QUALITY : (_L, inp_in_quality_new), + SOURCES : (_L, inp_in_source_new), + REACTIONS : (_L, inp_in_reaction_new), + MIXING : (_L, inp_in_mixing_new), + TIMES : (_S, inp_in_time_new), + REPORT : (_S, inp_in_report_new), + OPTIONS : (_S, inp_in_option_new), + COORDINATES : (_L, inp_in_coord_new), + VERTICES : (_L, inp_in_vertex_new), + LABELS : (_L, inp_in_label_new), + BACKDROP : (_S, inp_in_backdrop_new), + #END : 'END', +} + +_level_1 = [ + TITLE, + PATTERNS, + CURVES, + CONTROLS, + RULES, + TIMES, + REPORT, + OPTIONS, + BACKDROP, +] + +_level_2 = [ + JUNCTIONS, + RESERVOIRS, + TANKS, +] + +_level_3 = [ + PIPES, + PUMPS, + VALVES, + DEMANDS, + EMITTERS, + QUALITY, + SOURCES, + MIXING, + COORDINATES, + LABELS, +] + +_level_4 = [ + TAGS, + STATUS, + ENERGY, + REACTIONS, + VERTICES, +] -def _parse_inp(inp: str) -> dict[str, list[str]]: - file: dict[str, list[str]] = {} - for s in section_name: - file[s] = [] +class SQLBatch: + def __init__(self, project: str, count: int = 100) -> None: + self.batch: list[str] = [] + self.project = project + self.count = count - section = '' + def add(self, sql: str) -> None: + self.batch.append(sql) + if len(self.batch) == self.count: + self.flush() - for line in open(inp): - line = line.strip() - if line == '': - # skip empty line for control and rule - if section == 'CONTROLS' or section == 'RULES': - pass - else: - section = '' - continue + def flush(self) -> None: + write(self.project, ''.join(self.batch)) + self.batch.clear() - if line.startswith('['): - is_section = False - for s in section_name: - if line.startswith(f'[{s}'): - section = s - is_section = True - break - if is_section: + +def _print_time(desc: str) -> datetime.datetime: + now = datetime.datetime.now() + time = now.strftime('%Y-%m-%d %H:%M:%S') + print(f"{time}: {desc}") + return now + + +def _get_file_offset(inp: str) -> tuple[dict[str, list[int]], bool]: + offset: dict[str, list[int]] = {} + + current = '' + demand_outside = False + + with open(inp) as f: + while True: + line = f.readline() + if not line: + break + + line = line.strip() + if line.startswith('['): + for s in section_name: + if line.startswith(f'[{s}'): + if s not in offset: + offset[s] = [] + offset[s].append(f.tell()) + current = s + break + elif line != '' and line.startswith(';') == False: + if current == DEMANDS: + demand_outside = True + + return (offset, demand_outside) + + +def parse_file(project: str, inp: str) -> None: + start = _print_time(f'Start reading file "{inp}"...') + + _print_time("First scan...") + offset, demand_outside = _get_file_offset(inp) + + levels = _level_1 + _level_2 + _level_3 + _level_4 + + # parse the whole section rather than line + sections : dict[str, list[str]]= {} + for [s, t] in _handler.items(): + if t[0] == _S: + sections[s] = [] + + current_pattern = None + current_curve = None + curve_type_desc_line = None + + sql_batch = SQLBatch(project) + + _print_time("Second scan...") + with open(inp) as f: + for s in levels: + if s not in offset: continue - if section != '': - file[section].append(line) - - return file - - -def _parse_cs(cs: ChangeSet) -> dict[str, list[str]]: - file: dict[str, list[str]] = {} - for s in section_name: - file[s] = [] - - section = '' - - for line in str(cs.operations[0]['inp']).split('\n'): - line = line.strip() - if line == '': - # skip empty line for control and rule - if section == 'CONTROLS' or section == 'RULES': - pass - else: - section = '' - continue - - if line.startswith('['): - is_section = False - for s in section_name: - if line.startswith(f'[{s}'): - section = s - is_section = True - break - if is_section: + if s == DEMANDS and demand_outside == False: continue - if section != '': - file[section].append(line) + _print_time(f"[{s}]") - return file + is_s = _handler[s][0] == _S + handler = _handler[s][1] + for ptr in offset[s]: + f.seek(ptr) -def _read_inp(file: dict[str, list[str]]) -> ChangeSet: - file_cs: dict[str, ChangeSet] = {} - for s in section_name: - file_cs[s] = ChangeSet() + while True: + line = f.readline() + if not line: + break - for name, section in file.items(): - if name == 'TITLE': - file_cs[name].merge(inp_in_title(section)) - - elif name == 'JUNCTIONS': # + coords - file_cs[name].merge(inp_in_junction(section)) - - elif name == 'RESERVOIRS': # + coords - file_cs[name].merge(inp_in_reservoir(section)) - - elif name == 'TANKS': # + coords - file_cs[name].merge(inp_in_tank(section)) - - elif name == 'PIPES': - file_cs[name].merge(inp_in_pipe(section)) - - elif name == 'PUMPS': - file_cs[name].merge(inp_in_pump(section)) - - elif name == 'VALVES': - file_cs[name].merge(inp_in_valve(section)) - - elif name == 'TAGS': - file_cs[name].merge(inp_in_tag(section)) - - elif name == 'DEMANDS': - file_cs[name].merge(inp_in_demand(section)) - - elif name == 'STATUS': - file_cs[name].merge(inp_in_status(section)) - - elif name == 'PATTERNS': - file_cs[name].merge(inp_in_pattern(section)) - - elif name == 'CURVES': - file_cs[name].merge(inp_in_curve(section)) - - elif name == 'CONTROLS': - file_cs[name].merge(inp_in_control(section)) - - elif name == 'RULES': - file_cs[name].merge(inp_in_rule(section)) - - elif name == 'ENERGY': - file_cs[name].merge(inp_in_energy(section)) - - elif name == 'EMITTERS': - file_cs[name].merge(inp_in_emitter(section)) - - elif name == 'QUALITY': - file_cs[name].merge(inp_in_quality(section)) - - elif name == 'SOURCES': - file_cs[name].merge(inp_in_source(section)) - - elif name == 'REACTIONS': - file_cs[name].merge(inp_in_reaction(section)) - - elif name == 'MIXING': - file_cs[name].merge(inp_in_mixing(section)) - - elif name == 'TIMES': - file_cs[name].merge(inp_in_time(section)) - - elif name == 'REPORT': - file_cs[name].merge(inp_in_report(section)) - - elif name == 'OPTIONS': - file_cs[name].merge(inp_in_option(section)) - - elif name == 'COORDINATES': - coords = inp_in_coord(section) - for s in ['JUNCTIONS', 'RESERVOIRS', 'TANKS']: - for node in file_cs[s].operations: - if node['type'] == 'demand': + line = line.strip() + if line.startswith('['): + break + elif line == '': continue - if node['id'] in coords: - coord = coords[node['id']] - node |= { 'x' : coord['x'], 'y' : coord['y'] } + + if is_s: + sections[s].append(line) else: - print(f"WARNING: [{s}] {node['id']} has no coordinate, set it at origin!") - node |= { 'x' : 0.0, 'y' : 0.0 } + if line.startswith(';'): + line = line.removeprefix(';') + if s == PATTERNS: # ;desc + pass + elif s == CURVES: # ;type: desc + curve_type_desc_line = line + continue - elif name == 'VERTICES': - file_cs[name].merge(inp_in_vertex(section)) + if s == PATTERNS: + tokens = line.split() + if current_pattern != tokens[0]: + sql_batch.add(f"insert into _pattern (id) values ('{tokens[0]}');") + current_pattern = tokens[0] + elif s == CURVES: + tokens = line.split() + if current_curve != tokens[0]: + type = CURVE_TYPE_PUMP + if curve_type_desc_line != None: + type = curve_type_desc_line.split(':')[0].strip() + sql_batch.add(f"insert into _curve (id, type) values ('{tokens[0]}', '{type}');") + current_curve = tokens[0] + curve_type_desc_line = None - elif name == 'LABELS': - file_cs[name].merge(inp_in_label(section)) + if s == JUNCTIONS: + sql_batch.add(handler(line, demand_outside)) + else: + sql_batch.add(handler(line)) - elif name == 'BACKDROP': - file_cs[name].merge(inp_in_backdrop(section)) + f.seek(0) - elif name == 'END': - pass # :) + if is_s: + sql_batch.add(handler(sections[s])) - # release file - file = {} + sql_batch.flush() - cs = ChangeSet() - priorities = [ - 'PATTERNS', - 'CURVES', - 'JUNCTIONS', - 'RESERVOIRS', - 'TANKS', - 'COORDINATES', - 'PIPES', - 'PUMPS', - 'VALVES', - 'DEMANDS', - 'STATUS', - 'OPTIONS', - 'TIMES', - 'EMITTERS', - 'QUALITY', - 'SOURCES', - 'REACTIONS', - 'MIXING', - 'ENERGY', - 'REPORT', - 'VERTICES', - 'CONTROLS', - 'RULES', - 'TITLE', - 'TAGS', - 'LABELS', - 'BACKDROP', - 'END', - ] - for s in priorities: - cs.merge(file_cs[s]) - - return cs + end = _print_time(f'End reading file "{inp}"') + print(f"Total (in second): {(end-start).seconds}(s)") -def read_inp(project: str, inp: str): +def read_inp(project: str, inp: str) -> bool: if is_project_open(project): close_project(project) @@ -248,34 +256,32 @@ def read_inp(project: str, inp: str): create_project(project) open_project(project) - file = _parse_inp(inp) - cs = _read_inp(file) - - execute_batch_commands(project, cs) - op = get_current_operation(project) - set_restore_operation(project, op) + try: + parse_file(project, inp) + except: + close_project(project) + delete_project(project) + return False close_project(project) + return True -def import_inp(project: str, cs: ChangeSet) -> ChangeSet: - if is_project_open(project): - close_project(project) +def import_inp(project: str, cs: ChangeSet) -> bool: + if 'inp' not in cs.operations[0]: + return False - if have_project(project): - delete_project(project) + filename = f'inp/{project}_temp.inp' + if os.path.exists(filename): + os.remove(filename) - create_project(project) - open_project(project) + _print_time(f'Start writing temp file "{filename}"...') + with open(filename, 'w') as f: + f.write(str(cs.operations[0]['inp'])) + _print_time(f'End writing temp file "{filename}"...') - file = _parse_cs(cs) - new_cs = _read_inp(file) + result = read_inp(project, filename) - success_cs = execute_batch_commands(project, new_cs) - op = get_current_operation(project) - set_restore_operation(project, op) + os.remove(filename) - close_project(project) - - # return ? - return success_cs + return result diff --git a/api/inp_in_new.py b/api/inp_in_new.py deleted file mode 100644 index c3aad8d..0000000 --- a/api/inp_in_new.py +++ /dev/null @@ -1,282 +0,0 @@ -import datetime -from .project import * -from .database import ChangeSet, write -from .sections import * -from .s1_title import inp_in_title_new -from .s2_junctions import inp_in_junction_new -from .s3_reservoirs import inp_in_reservoir_new -from .s4_tanks import inp_in_tank_new -from .s5_pipes import inp_in_pipe_new -from .s6_pumps import inp_in_pump_new -from .s7_valves import inp_in_valve_new -from .s8_tags import inp_in_tag_new -from .s9_demands import inp_in_demand_new -from .s10_status import inp_in_status_new -from .s11_patterns import inp_in_pattern_new -from .s12_curves import CURVE_TYPE_PUMP, inp_in_curve_new -from .s13_controls import inp_in_control_new -from .s14_rules import inp_in_rule_new -from .s15_energy import inp_in_energy_new -from .s16_emitters import inp_in_emitter_new -from .s17_quality import inp_in_quality_new -from .s18_sources import inp_in_source_new -from .s19_reactions import inp_in_reaction_new -from .s20_mixing import inp_in_mixing_new -from .s21_times import inp_in_time_new -from .s22_report import inp_in_report_new -from .s23_options import inp_in_option_new -from .s24_coordinates import inp_in_coord_new -from .s25_vertices import inp_in_vertex_new -from .s26_labels import inp_in_label_new -from .s27_backdrop import inp_in_backdrop_new - -_S = 'S' -_L = 'L' - -_handler = { - TITLE : (_S, inp_in_title_new), - JUNCTIONS : (_L, inp_in_junction_new), - RESERVOIRS : (_L, inp_in_reservoir_new), - TANKS : (_L, inp_in_tank_new), - PIPES : (_L, inp_in_pipe_new), - PUMPS : (_L, inp_in_pump_new), - VALVES : (_L, inp_in_valve_new), - TAGS : (_L, inp_in_tag_new), - DEMANDS : (_L, inp_in_demand_new), - STATUS : (_L, inp_in_status_new), - PATTERNS : (_L, inp_in_pattern_new), - CURVES : (_L, inp_in_curve_new), - CONTROLS : (_L, inp_in_control_new), - RULES : (_L, inp_in_rule_new), - ENERGY : (_L, inp_in_energy_new), - EMITTERS : (_L, inp_in_emitter_new), - QUALITY : (_L, inp_in_quality_new), - SOURCES : (_L, inp_in_source_new), - REACTIONS : (_L, inp_in_reaction_new), - MIXING : (_L, inp_in_mixing_new), - TIMES : (_S, inp_in_time_new), - REPORT : (_S, inp_in_report_new), - OPTIONS : (_S, inp_in_option_new), - COORDINATES : (_L, inp_in_coord_new), - VERTICES : (_L, inp_in_vertex_new), - LABELS : (_L, inp_in_label_new), - BACKDROP : (_S, inp_in_backdrop_new), - #END : 'END', -} - -_level_1 = [ - TITLE, - PATTERNS, - CURVES, - CONTROLS, - RULES, - TIMES, - REPORT, - OPTIONS, - BACKDROP, -] - -_level_2 = [ - JUNCTIONS, - RESERVOIRS, - TANKS, -] - -_level_3 = [ - PIPES, - PUMPS, - VALVES, - DEMANDS, - EMITTERS, - QUALITY, - SOURCES, - MIXING, - COORDINATES, - LABELS, -] - -_level_4 = [ - TAGS, - STATUS, - ENERGY, - REACTIONS, - VERTICES, -] - -def _get_offset(inp: str) -> tuple[dict[str, list[int]], bool]: - offset: dict[str, list[int]] = {} - - current = '' - demand_outside = False - - with open(inp) as f: - while True: - line = f.readline() - if not line: - break - - line = line.strip() - if line.startswith('['): - for s in section_name: - if line.startswith(f'[{s}'): - if s not in offset: - offset[s] = [] - offset[s].append(f.tell()) - current = s - break - elif line != '' and line.startswith(';') == False: - if current == DEMANDS: - demand_outside = True - - return (offset, demand_outside) - - -def print_time(desc: str) -> datetime.datetime: - now = datetime.datetime.now() - time = now.strftime('%Y-%m-%d %H:%M:%S') - print(f"{time}: {desc}") - return now - - -class SQLBatch: - def __init__(self, project: str, count: int = 100) -> None: - self.batch: list[str] = [] - self.project = project - self.count = count - - def add(self, sql: str) -> None: - self.batch.append(sql) - if len(self.batch) == self.count: - self.flush() - - def flush(self) -> None: - write(self.project, ''.join(self.batch)) - self.batch.clear() - - -def parse_file(project: str, inp: str) -> None: - start = print_time(f'Start reading "{inp}"...') - - print_time("First scan...") - offset, demand_outside = _get_offset(inp) - - levels = _level_1 + _level_2 + _level_3 + _level_4 - - # parse the whole section rather than line - sections : dict[str, list[str]]= {} - for [s, t] in _handler.items(): - if t[0] == _S: - sections[s] = [] - - current_pattern = None - current_curve = None - curve_type_desc_line = None - - sql_batch = SQLBatch(project) - - print_time("Second scan...") - with open(inp) as f: - for s in levels: - if s not in offset: - continue - - if s == DEMANDS and demand_outside == False: - continue - - print_time(f"[{s}]") - - is_s = _handler[s][0] == _S - handler = _handler[s][1] - - for ptr in offset[s]: - f.seek(ptr) - - while True: - line = f.readline() - if not line: - break - - line = line.strip() - if line.startswith('['): - break - elif line == '': - continue - - if is_s: - sections[s].append(line) - else: - if line.startswith(';'): - line = line.removeprefix(';') - if s == PATTERNS: # ;desc - pass - elif s == CURVES: # ;type: desc - curve_type_desc_line = line - continue - - if s == PATTERNS: - tokens = line.split() - if current_pattern != tokens[0]: - sql_batch.add(f"insert into _pattern (id) values ('{tokens[0]}');") - current_pattern = tokens[0] - elif s == CURVES: - tokens = line.split() - if current_curve != tokens[0]: - type = CURVE_TYPE_PUMP - if curve_type_desc_line != None: - type = curve_type_desc_line.split(':')[0].strip() - sql_batch.add(f"insert into _curve (id, type) values ('{tokens[0]}', '{type}');") - current_curve = tokens[0] - curve_type_desc_line = None - - if s == JUNCTIONS: - sql_batch.add(handler(line, demand_outside)) - else: - sql_batch.add(handler(line)) - - f.seek(0) - - if is_s: - sql_batch.add(handler(sections[s])) - - sql_batch.flush() - - end = print_time(f'End reading "{inp}"') - print(f"Total (in second): {(end-start).seconds}(s)") - - -def read_inp_new(project: str, inp: str) -> bool: - if is_project_open(project): - close_project(project) - - if have_project(project): - delete_project(project) - - create_project(project) - open_project(project) - - parse_file(project, inp) - - '''try: - parse_inp(project, inp) - except: - close_project(project) - delete_project(project) - return False''' - - close_project(project) - return True - - -def import_inp_new(project: str, cs: ChangeSet) -> bool: - if is_project_open(project): - close_project(project) - - if have_project(project): - delete_project(project) - - create_project(project) - open_project(project) - - close_project(project) - - return True diff --git a/tjnetwork.py b/tjnetwork.py index 719d5c0..ade223f 100644 --- a/tjnetwork.py +++ b/tjnetwork.py @@ -186,12 +186,12 @@ def copy_project(source: str, new: str) -> None: return api.copy_project(source, new) def read_inp(name: str, inp: str) -> bool: - return api.read_inp_new(name, inp) + return api.read_inp(name, inp) def dump_inp(name: str, inp: str) -> None: return api.dump_inp(name, inp) -def import_inp(name: str, cs: ChangeSet) -> ChangeSet: +def import_inp(name: str, cs: ChangeSet) -> bool: return api.import_inp(name, cs) def export_inp(name: str) -> ChangeSet: