From 19750c3540c1b28b17a926eddaca3ffcc407a340 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Thu, 14 Jun 2018 12:37:38 +0200 Subject: [PATCH 001/215] Fixes to include weather cell ids and use GeneratorFluctuatingDing0 Objects, export mv_cb states also --- ding0/tools/results.py | 287 +++++++++++++++++++++++++---------------- 1 file changed, 174 insertions(+), 113 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 5722e98f..30ec8c72 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -7,11 +7,10 @@ DING0 lives at github: https://github.com/openego/ding0/ The documentation is available on RTD: http://ding0.readthedocs.io""" -__copyright__ = "Reiner Lemoine Institut gGmbH" -__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" -__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" -__author__ = "nesnoj, gplssm" - +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "nesnoj, gplssm" import pickle import numpy as np @@ -26,7 +25,7 @@ from egoio.tools import db from ding0.core import NetworkDing0 -from ding0.core import GeneratorDing0 +from ding0.core import GeneratorDing0, GeneratorFluctuatingDing0 from ding0.core import LVCableDistributorDing0, MVCableDistributorDing0 from ding0.core import MVStationDing0, LVStationDing0 from ding0.core import CircuitBreakerDing0 @@ -539,7 +538,7 @@ def calculate_mvgd_stats(nw): if not isinstance(node, MVCableDistributorDing0) and not isinstance(node, CircuitBreakerDing0): if not nx.has_path(G, root, node): continue - #print(node, node.lv_load_area.is_aggregated) # only debug + # print(node, node.lv_load_area.is_aggregated) # only debug else: path = nx.shortest_path(G, root, node) for i in range(len(path) - 1): @@ -564,7 +563,7 @@ def calculate_mvgd_stats(nw): # add impedance of transformers in LV station lvstation_impedance = 0. for trafo in node.transformers(): - lvstation_impedance += 1. / np.hypot(trafo.r,trafo.x) # transformers operating in parallel + lvstation_impedance += 1. / np.hypot(trafo.r, trafo.x) # transformers operating in parallel if lvstation_impedance > 0.: # avoid dividing by zero lvstation_impedance = 1. / lvstation_impedance else: @@ -580,12 +579,16 @@ def calculate_mvgd_stats(nw): path = nx.shortest_path(G_lv, node, lv_node) lv_impedance = lvstation_impedance lv_path_length = 0. - for i in range(len(path)-1): - lv_impedance += np.sqrt((G_lv.edge[path[i]][path[i+1]]['branch'].type['L'] * 1e-3 * omega * \ - G_lv.edge[path[i]][path[i+1]]['branch'].length)**2. + \ - (G_lv.edge[path[i]][path[i+1]]['branch'].type['R'] * \ - G_lv.edge[path[i]][path[i+1]]['branch'].length)**2.) - lv_path_length += G_lv.edge[path[i]][path[i+1]]['branch'].length + for i in range(len(path) - 1): + lv_impedance += np.sqrt((G_lv.edge[path[i]][path[i + 1]]['branch'].type[ + 'L'] * 1e-3 * omega * \ + G_lv.edge[path[i]][path[i + 1]][ + 'branch'].length) ** 2. + \ + (G_lv.edge[path[i]][path[i + 1]]['branch'].type[ + 'R'] * \ + G_lv.edge[path[i]][path[i + 1]][ + 'branch'].length) ** 2.) + lv_path_length += G_lv.edge[path[i]][path[i + 1]]['branch'].length lv_thermal_limit = G_lv.edge[path[0]][path[1]]['branch'].type['I_max_th'] mvlv_impedances[lv_node] = mv_impedance + lv_impedance @@ -1320,8 +1323,8 @@ def process_stats(mv_districts, ####################################################################### clusters = [mv_districts[x:x + n_of_districts] for x in range(0, len(mv_districts), n_of_districts)] - mv_stats = [] - lv_stats = [] + mv_stats = [] + lv_stats = [] mv_crit_nodes = [] mv_crit_edges = [] lv_crit_nodes = [] @@ -1626,7 +1629,7 @@ def export_network(nw, mode=''): mvcd_idx = 0 mv_cd_dict = {} mvstations_idx = 0 - hvmv_stations_dict = {} + mv_stations_dict = {} mvtrafos_idx = 0 hvmv_trafos_dict = {} lvgen_idx = 0 @@ -1634,7 +1637,7 @@ def export_network(nw, mode=''): lvcd_idx = 0 lv_cd_dict = {} lvstations_idx = 0 - mvlv_stations_dict = {} + lv_stations_dict = {} lvtrafos_idx = 0 mvlv_trafos_dict = {} areacenter_idx = 0 @@ -1686,7 +1689,7 @@ def aggregate_loads(la_center, aggr): if s not in aggr['load']: aggr['load'][s] = {} - for t in ['nominal','peak']: + for t in ['nominal', 'peak']: if t not in aggr['load'][s]: aggr['load'][s][t] = 0 @@ -1758,7 +1761,7 @@ def aggregate_loads(la_center, aggr): if isinstance(node, LVStationDing0): if not node.lv_load_area.is_aggregated: lvstations_idx += 1 - mvlv_stations_dict[lvstations_idx] = { + lv_stations_dict[lvstations_idx] = { 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), 'geom': geom, @@ -1792,7 +1795,7 @@ def aggregate_loads(la_center, aggr): # MVStation elif isinstance(node, MVStationDing0): mvstations_idx += 1 - hvmv_stations_dict[mvstations_idx] = { + mv_stations_dict[mvstations_idx] = { 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, @@ -1814,24 +1817,39 @@ def aggregate_loads(la_center, aggr): } # MVGenerator - elif isinstance(node, GeneratorDing0): + elif (isinstance(node, GeneratorDing0) or isinstance(node, GeneratorFluctuatingDing0)): if node.subtype == None: subtype = 'other' else: subtype = node.subtype - type = node.type - mvgen_idx += 1 - mv_gen_dict[mvgen_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), - 'MV_grid_id_db': mv_grid_id_db, - 'geom': geom, - 'type': type, - 'subtype': subtype, - 'v_level': node.v_level, - 'nominal_capacity': node.capacity, - 'run_id': run_id, - 'is_aggregated': False, - } + if isinstance(node, GeneratorFluctuatingDing0): + type = node.type + mvgen_idx += 1 + mv_gen_dict[mvgen_idx] ={ + 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), + 'MV_grid_id_db': mv_grid_id_db, + 'geom': geom, + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'is_aggregated': False, + 'weather_cell_id': node.weather_cell_id, + } + else: + type = node.type + mvgen_idx += 1 + mv_gen_dict[mvgen_idx] = { + 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), + 'MV_grid_id_db': mv_grid_id_db, + 'geom': geom, + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'is_aggregated': False} # MVBranchTees elif isinstance(node, MVCableDistributorDing0): @@ -1855,16 +1873,36 @@ def aggregate_loads(la_center, aggr): # Determine aggregated generation in LV grid for lvgd in node.lv_load_area._lv_grid_districts: - + weather_cell_ids = {} for aggr_gen in lvgd.lv_grid.generators(): aggr = aggregate_generators(aggr_gen, aggr) + # Get the aggregated weather cell id of the area + # b + if isinstance(aggr_gen, GeneratorFluctuatingDing0): + if aggr_gen.weather_cell_id not in weather_cell_ids.keys(): + weather_cell_ids[aggr_gen.weather_cell_id] = 1 + else: + weather_cell_ids[aggr_gen.weather_cell_id] += 1 + if aggr_gen.subtype == None: subtype = 'other' else: subtype = aggr_gen.subtype type = aggr_gen.type + # Get the weather cell id that occurs the most + weather_cell_id = list(weather_cell_ids.keys())[ + list(weather_cell_ids.values()).index(max(weather_cell_ids.values()))] + + for v_level in aggr['generation']: + for type in aggr['generation'][v_level]: + for subtype in aggr['generation'][v_level][type]: + aggr['generation'][v_level][type][subtype]['weather_cell_id'] = \ + weather_cell_id + + + # Determine aggregated load in MV grid # -> Implement once loads in Ding0 MV grids exist @@ -1877,7 +1915,7 @@ def aggregate_loads(la_center, aggr): 'geom': node.lv_load_area.geo_area} aggr_line_type = nw._static_data['MV_cables'].iloc[ nw._static_data['MV_cables']['I_max_th'].idxmax()] - geom = wkt_dumps(node.lv_load_area.geo_area) + geom = wkt_dumps(node.geo_data) for aggr_node in aggr: if aggr_node == 'generation': @@ -1893,11 +1931,13 @@ def aggregate_loads(la_center, aggr): [str(aggr_gen.__class__.__name__), 'MV', str(mv_grid_id), str(aggr_gen.id_db), str(mvgenaggr_idx)]), # , str(mvgen_idx) 'MV_grid_id_db': mv_grid_id_db, - 'geom': geom,#from_shape(Point(mv_district.mv_grid.station().geo_data), srid=srid),#lv_load_area.geo_area,#geom, #?? Polygon # hvmv_stations_dict[mvstations_idx]['geom'], # + 'geom': geom, + # from_shape(Point(mv_district.mv_grid.station().geo_data), srid=srid),#lv_load_area.geo_area,#geom, #?? Polygon # mv_stations_dict[mvstations_idx]['geom'], # 'type': type, 'subtype': subtype, 'v_level': v_level, 'nominal_capacity': aggr['generation'][v_level][type][subtype]['capacity'], + 'weather_cell_id': aggr['generation'][v_level][type][subtype]['weather_cell_id'], 'is_aggregated': True, 'run_id': run_id, } @@ -1937,7 +1977,7 @@ def aggregate_loads(la_center, aggr): 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, # from_shape(Point(mv_district.mv_grid.station().geo_data), srid=srid), - 'consumption_{}'.format(type): aggr['load'][type]['nominal'], + 'consumption': json.dumps({type: aggr['load'][type]['nominal']}), 'is_aggregated': True, 'run_id': run_id, } @@ -2049,24 +2089,40 @@ def aggregate_loads(la_center, aggr): # geom = wkt_dumps(node.geo_data) # LVGenerator - if isinstance(node, GeneratorDing0): + if (isinstance(node, GeneratorDing0) or isinstance(node, GeneratorFluctuatingDing0)): if node.subtype == None: subtype = 'other' else: subtype = node.subtype - type = node.type - lvgen_idx += 1 - lv_gen_dict[lvgen_idx] = { - 'id_db': '_'.join( - [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), - 'LV_grid_id_db': lv_grid_id_db, - 'geom': wkt_dumps(node.geo_data), - 'type': type, - 'subtype': subtype, - 'v_level': node.v_level, - 'nominal_capacity': node.capacity, - 'run_id': run_id, - } + if isinstance(node, GeneratorFluctuatingDing0): + type = node.type + lvgen_idx += 1 + lv_gen_dict[lvgen_idx] = { + 'id_db': '_'.join( + [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), + 'LV_grid_id_db': lv_grid_id_db, + 'geom': wkt_dumps(node.geo_data), + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'weather_cell_id': node.weather_cell_id, + } + else: + type = node.type + lvgen_idx += 1 + lv_gen_dict[lvgen_idx] = { + 'id_db': '_'.join( + [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), + 'LV_grid_id_db': lv_grid_id_db, + 'geom': wkt_dumps(node.geo_data), + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + } # LVcd elif isinstance(node, LVCableDistributorDing0): @@ -2082,34 +2138,24 @@ def aggregate_loads(la_center, aggr): # LVload elif isinstance(node, LVLoadDing0): - consumption_dict = {} - for k in ['residential', 'retail', 'agricultural', 'industrial']: - if k in node.consumption.keys(): - consumption_dict[k] = node.consumption[k] - else: - consumption_dict[k] = None lvloads_idx += 1 lv_loads_dict[lvloads_idx] = { 'id_db': '_'.join( [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), 'LV_grid_id_db': lv_grid_id_db, - 'geom': None,#wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? - # 'consumption': json.dumps(node.consumption), - 'consumption_residential': consumption_dict['residential'], - 'consumption_retail': consumption_dict['retail'], - 'consumption_agricultural': consumption_dict['agricultural'], - 'consumption_industrial': consumption_dict['industrial'], + 'geom': None, + # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? + 'consumption': json.dumps(node.consumption), 'run_id': run_id, } - del consumption_dict else: type = 'Unknown' # LVedges for branch in lv_district.lv_grid.graph_edges(): - # geom = from_shape( - # LineString([branch['adj_nodes'][0].geo_data, branch['adj_nodes'][1].geo_data]), srid=srid) + # geom = from_shape( + # LineString([branch['adj_nodes'][0].geo_data, branch['adj_nodes'][1].geo_data]), srid=srid) if not any([isinstance(branch['adj_nodes'][0], LVLoadAreaCentreDing0), isinstance(branch['adj_nodes'][1], LVLoadAreaCentreDing0)]): lines_idx += 1 @@ -2142,14 +2188,14 @@ def aggregate_loads(la_center, aggr): lv_grid = pd.DataFrame.from_dict(lv_grid_dict, orient='index') lv_gen = pd.DataFrame.from_dict(lv_gen_dict, orient='index') lv_cd = pd.DataFrame.from_dict(lv_cd_dict, orient='index') - mvlv_stations = pd.DataFrame.from_dict(mvlv_stations_dict, orient='index') + lv_stations = pd.DataFrame.from_dict(lv_stations_dict, orient='index') mvlv_trafos = pd.DataFrame.from_dict(mvlv_trafos_dict, orient='index') lv_loads = pd.DataFrame.from_dict(lv_loads_dict, orient='index') mv_grid = pd.DataFrame.from_dict(mv_grid_dict, orient='index') mv_gen = pd.DataFrame.from_dict(mv_gen_dict, orient='index') - # mv_cb = pd.DataFrame.from_dict(mvcb_dict, orient='index') + mv_cb = pd.DataFrame.from_dict(mvcb_dict, orient='index') mv_cd = pd.DataFrame.from_dict(mv_cd_dict, orient='index') - hvmv_stations = pd.DataFrame.from_dict(hvmv_stations_dict, orient='index') + mv_stations = pd.DataFrame.from_dict(mv_stations_dict, orient='index') # mv_areacenter= pd.DataFrame.from_dict(areacenter_dict, orient='index') hvmv_trafos = pd.DataFrame.from_dict(hvmv_trafos_dict, orient='index') mv_loads = pd.DataFrame.from_dict(mv_loads_dict, orient='index') @@ -2158,33 +2204,40 @@ def aggregate_loads(la_center, aggr): lines = lines[sorted(lines.columns.tolist())] - return run_id, lv_grid, lv_gen, lv_cd, mvlv_stations, mvlv_trafos, lv_loads, mv_grid, mv_gen, mv_cd, \ - hvmv_stations, hvmv_trafos, mv_loads, lines, mvlv_mapping # mv_areacenter, + return run_id, \ + lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ + mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ + lines, mvlv_mapping # mv_areacenter, ####################################################### -def export_data_tocsv(path, run_id, lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, mv_grid, mv_gen, mv_cb, - mv_cd, mv_stations, hvmv_trafos, mv_loads, lines, mapping): +def export_data_tocsv(path, run_id, + lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, + mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, + lines, mvlv_mapping, csv_sep=','): + # make directory with run_id if it doesn't exist + os.makedirs(os.path.join(path, run_id), exist_ok=True) + # Exports data to csv def export_network_tocsv(path, table, tablename): - return table.to_csv(''.join([path, '/', run_id, '/', tablename, '.csv']), ';') + return table.to_csv(os.path.join(path, run_id, tablename + '.csv'), sep=csv_sep) export_network_tocsv(path, lv_grid, 'lv_grid') export_network_tocsv(path, lv_gen, 'lv_generator') export_network_tocsv(path, lv_cd, 'lv_branchtee') - export_network_tocsv(path, lv_stations, 'lvmv_station') - export_network_tocsv(path, mvlv_trafos, 'lv_transformer') + export_network_tocsv(path, lv_stations, 'lv_station') + export_network_tocsv(path, mvlv_trafos, 'mvlv_transformer') export_network_tocsv(path, lv_loads, 'lv_load') export_network_tocsv(path, mv_grid, 'mv_grid') export_network_tocsv(path, mv_gen, 'mv_generator') export_network_tocsv(path, mv_cd, 'mv_branchtee') - export_network_tocsv(path, mv_stations, 'mvhv_station') - export_network_tocsv(path, hvmv_trafos, 'mv_transformer') + export_network_tocsv(path, mv_stations, 'mv_station') + export_network_tocsv(path, hvmv_trafos, 'hvmv_transformer') export_network_tocsv(path, mv_cb, 'mv_circuitbreaker') export_network_tocsv(path, mv_loads, 'mv_load') export_network_tocsv(path, lines, 'line') - export_network_tocsv(path, mapping, 'mvlv_mapping') + export_network_tocsv(path, mvlv_mapping, 'mvlv_mapping') # export_network_tocsv(path, areacenter, 'areacenter') @@ -2198,7 +2251,7 @@ def export_network_to_oedb(session, table, tabletype, srid): dataset = [] engine = create_engine("sqlite:///myexample.db") print("Exporting table type : {}".format(tabletype)) - if tabletype == 'lines': + if tabletype == 'line': table.apply(lambda row: session.add(md.EgoGridDing0Line( run_id=row['run_id'], @@ -2241,17 +2294,14 @@ def export_network_to_oedb(session, table, tabletype, srid): )) , axis=1) - elif tabletype == 'lv_loads': + elif tabletype == 'lv_load': table.apply(lambda row: session.add(md.EgoGridDing0LvLoad( run_id=row['run_id'], id_db=row['id_db'], lv_grid_id_db=row['LV_grid_id_db'], geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - consumption_residential=row['consumption_residential'], - consumption_retail=row['consumption_retail'], - consumption_agricultural=row['consumption_agricultural'], - consumption_industrial=row['consumption_industrial'], + consumption=row['consumption'] )) , axis=1) @@ -2267,9 +2317,9 @@ def export_network_to_oedb(session, table, tabletype, srid): )) , axis=1) - elif tabletype == 'mvlv_stations': + elif tabletype == 'lv_station': table.apply(lambda row: - session.add(md.EgoGridDing0MvlvStation( + session.add(md.EgoGridDing0LvStation( run_id=row['run_id'], id_db=row['id_db'], lv_grid_id_db=row['LV_grid_id_db'], @@ -2277,7 +2327,7 @@ def export_network_to_oedb(session, table, tabletype, srid): )) , axis=1) - elif tabletype == 'mvlv_trafos': + elif tabletype == 'mvlv_trafo': table.apply(lambda row: session.add(md.EgoGridDing0MvlvTransformer( run_id=row['run_id'], @@ -2312,6 +2362,17 @@ def export_network_to_oedb(session, table, tabletype, srid): )) , axis=1) + elif tabletype == 'mv_cb': + table.apply(lambda row: + session.add(md.EgoGridDing0MvCircuitbreaker( + run_id=row['run_id'], + id_db=row['id_db'], + mv_grid_id_db=row['MV_grid_id_db'], + geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + status=row['status'], + )) + , axis=1) + elif tabletype == 'mv_gen': table.apply(lambda row: session.add(md.EgoGridDing0MvGenerator( @@ -2327,7 +2388,7 @@ def export_network_to_oedb(session, table, tabletype, srid): )) , axis=1) - elif tabletype == 'mv_loads': + elif tabletype == 'mv_load': table.apply(lambda row: session.add(md.EgoGridDing0MvLoad( run_id=row['run_id'], @@ -2335,10 +2396,7 @@ def export_network_to_oedb(session, table, tabletype, srid): mv_grid_id_db=row['MV_grid_id_db'], geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, is_aggregated=row['is_aggregated'], - consumption_residential=row['consumption_residential'], - consumption_retail=row['consumption_retail'], - consumption_agricultural=row['consumption_agricultural'], - consumption_industrial=row['consumption_industrial'], + consumption=row['consumption'], )) , axis=1) @@ -2354,9 +2412,9 @@ def export_network_to_oedb(session, table, tabletype, srid): )) , axis=1) - elif tabletype == 'hvmv_stations': + elif tabletype == 'mv_station': table.apply(lambda row: - session.add(md.EgoGridDing0HvmvStation( + session.add(md.EgoGridDing0MvStation( run_id=row['run_id'], id_db=row['id_db'], mv_grid_id_db=row['MV_grid_id_db'], @@ -2364,7 +2422,7 @@ def export_network_to_oedb(session, table, tabletype, srid): )) , axis=1) - elif tabletype == 'hvmv_trafos': + elif tabletype == 'hvmv_trafo': table.apply(lambda row: session.add(md.EgoGridDing0HvmvTransformer( run_id=row['run_id'], @@ -2383,23 +2441,24 @@ def export_network_to_oedb(session, table, tabletype, srid): session.commit() -def export_data_to_oedb(session, srid, lv_grid, lv_gen, lv_cd, mvlv_stations, mvlv_trafos, lv_loads, mv_grid, mv_gen, - mv_cd, hvmv_stations, hvmv_trafos, mv_loads, lines, mvlv_mapping): +def export_data_to_oedb(session, srid, lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, mv_grid, mv_gen, + mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, lines, mvlv_mapping): # only for testing # engine = create_engine('sqlite:///:memory:') export_network_to_oedb(session, lv_grid, 'lv_grid', srid) export_network_to_oedb(session, lv_gen, 'lv_gen', srid) export_network_to_oedb(session, lv_cd, 'lv_cd', srid) - export_network_to_oedb(session, mvlv_stations, 'mvlv_stations', srid) - export_network_to_oedb(session, mvlv_trafos, 'mvlv_trafos', srid) - export_network_to_oedb(session, lv_loads, 'lv_loads', srid) + export_network_to_oedb(session, lv_stations, 'lv_station', srid) + export_network_to_oedb(session, mvlv_trafos, 'mvlv_trafo', srid) + export_network_to_oedb(session, lv_loads, 'lv_load', srid) export_network_to_oedb(session, mv_grid, 'mv_grid', srid) export_network_to_oedb(session, mv_gen, 'mv_gen', srid) + export_network_to_oedb(session, mv_cb, 'mv_cb', srid) export_network_to_oedb(session, mv_cd, 'mv_cd', srid) - export_network_to_oedb(session, hvmv_stations, 'hvmv_stations', srid) - export_network_to_oedb(session, hvmv_trafos, 'hvmv_trafos', srid) - export_network_to_oedb(session, mv_loads, 'mv_loads', srid) - export_network_to_oedb(session, lines, 'lines', srid) + export_network_to_oedb(session, mv_stations, 'mv_station', srid) + export_network_to_oedb(session, hvmv_trafos, 'hvmv_trafo', srid) + export_network_to_oedb(session, mv_loads, 'mv_load', srid) + export_network_to_oedb(session, lines, 'line', srid) export_network_to_oedb(session, mvlv_mapping, 'mvlv_mapping', srid) @@ -2409,14 +2468,15 @@ def create_ding0_db_tables(engine): md.EgoGridDing0LvGenerator, md.EgoGridDing0LvLoad, md.EgoGridDing0LvGrid, - md.EgoGridDing0MvlvStation, + md.EgoGridDing0LvStation, md.EgoGridDing0MvlvTransformer, md.EgoGridDing0MvlvMapping, md.EgoGridDing0MvBranchtee, + md.EgoGridDing0MvCircuitbreaker, md.EgoGridDing0MvGenerator, md.EgoGridDing0MvLoad, md.EgoGridDing0MvGrid, - md.EgoGridDing0HvmvStation, + md.EgoGridDing0MvStation, md.EgoGridDing0HvmvTransformer] for tab in tables: @@ -2429,14 +2489,15 @@ def drop_ding0_db_tables(engine): md.EgoGridDing0LvGenerator, md.EgoGridDing0LvLoad, md.EgoGridDing0LvGrid, - md.EgoGridDing0MvlvStation, + md.EgoGridDing0LvStation, md.EgoGridDing0MvlvTransformer, md.EgoGridDing0MvlvMapping, md.EgoGridDing0MvBranchtee, + md.EgoGridDing0MvCircuitbreaker, md.EgoGridDing0MvGenerator, md.EgoGridDing0MvLoad, md.EgoGridDing0MvGrid, - md.EgoGridDing0HvmvStation, + md.EgoGridDing0MvStation, md.EgoGridDing0HvmvTransformer] print("Please confirm that you would like to drop the following tables:") @@ -2444,8 +2505,8 @@ def drop_ding0_db_tables(engine): print("{: 3d}. {}".format(n, tab)) print("Please confirm with either of the choices below:\n" + \ - "- yes\n" +\ - "- no\n" +\ + "- yes\n" + \ + "- no\n" + \ "- the indexes to drop in the format 0, 2, 3, 5") confirmation = input("Please type the choice completely as there is no default choice.") if re.fullmatch('[Yy]es', confirmation): From 7ea422c9dd2f8ae6150574e239a2c2da7167c346 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Tue, 19 Jun 2018 14:37:26 +0200 Subject: [PATCH 002/215] modifications to include metadata as an output which goes into oedb or as a json file when putting out csv --- ding0/tools/results.py | 87 +++++++++++++++++++++++++++++------------- 1 file changed, 61 insertions(+), 26 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 30ec8c72..d034d7be 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1611,6 +1611,7 @@ def export_network(nw, mode=''): ############################## # from datetime import datetime run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") + metadata_json = json.dumps(nw.metadata) ############################## ############################# # go through the grid collecting info @@ -2204,7 +2205,7 @@ def aggregate_loads(la_center, aggr): lines = lines[sorted(lines.columns.tolist())] - return run_id, \ + return run_id, metadata_json,\ lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ lines, mvlv_mapping # mv_areacenter, @@ -2212,13 +2213,18 @@ def aggregate_loads(la_center, aggr): ####################################################### -def export_data_tocsv(path, run_id, +def export_data_tocsv(path, run_id, metadata_json, lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, lines, mvlv_mapping, csv_sep=','): # make directory with run_id if it doesn't exist os.makedirs(os.path.join(path, run_id), exist_ok=True) + # put a text file with the metadata + metadata = json.loads(metadata_json) + with open(os.path.join(path, run_id, 'metadata.json'), 'w') as metafile: + json.dump(metadata, metafile) + # Exports data to csv def export_network_tocsv(path, table, tablename): return table.to_csv(os.path.join(path, run_id, tablename + '.csv'), sep=csv_sep) @@ -2441,29 +2447,57 @@ def export_network_to_oedb(session, table, tabletype, srid): session.commit() -def export_data_to_oedb(session, srid, lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, mv_grid, mv_gen, - mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, lines, mvlv_mapping): +def export_data_to_oedb(session, run_id, metadata_json, srid, + lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, + mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, lines, mvlv_mapping): # only for testing # engine = create_engine('sqlite:///:memory:') - export_network_to_oedb(session, lv_grid, 'lv_grid', srid) - export_network_to_oedb(session, lv_gen, 'lv_gen', srid) - export_network_to_oedb(session, lv_cd, 'lv_cd', srid) - export_network_to_oedb(session, lv_stations, 'lv_station', srid) - export_network_to_oedb(session, mvlv_trafos, 'mvlv_trafo', srid) - export_network_to_oedb(session, lv_loads, 'lv_load', srid) - export_network_to_oedb(session, mv_grid, 'mv_grid', srid) - export_network_to_oedb(session, mv_gen, 'mv_gen', srid) - export_network_to_oedb(session, mv_cb, 'mv_cb', srid) - export_network_to_oedb(session, mv_cd, 'mv_cd', srid) - export_network_to_oedb(session, mv_stations, 'mv_station', srid) - export_network_to_oedb(session, hvmv_trafos, 'hvmv_trafo', srid) - export_network_to_oedb(session, mv_loads, 'mv_load', srid) - export_network_to_oedb(session, lines, 'line', srid) - export_network_to_oedb(session, mvlv_mapping, 'mvlv_mapping', srid) + + # get the run_id from model_draft.ego_grid_ding0_versioning + # compare the run_id from table to the current run_id + + oedb_versioning_query = session.query( + md.EgoGridDing0Versioning.run_id, + md.EgoGridDing0Versioning.description + ).filter(md.EgoGridDing0Versioning.run_id == run_id) + + oedb_versioning = pd.read_sql_query(oedb_versioning_query.statement, + session.bind) + + if oedb_versioning.empty: + # if the run_id doesn't exist then + # create entry into ego_grid_ding0_versioning: + metadata_df = pd.DataFrame({'run_id': run_id, + 'description': metadata_json}, + index=[0]) + metadata_df.apply(lambda row: + session.add(md.EgoGridDing0Versioning( + run_id=row['run_id'], + description=row['description'], + )) + , axis=1) + export_network_to_oedb(session, lv_grid, 'lv_grid', srid) + export_network_to_oedb(session, lv_gen, 'lv_gen', srid) + export_network_to_oedb(session, lv_cd, 'lv_cd', srid) + export_network_to_oedb(session, lv_stations, 'lv_station', srid) + export_network_to_oedb(session, mvlv_trafos, 'mvlv_trafo', srid) + export_network_to_oedb(session, lv_loads, 'lv_load', srid) + export_network_to_oedb(session, mv_grid, 'mv_grid', srid) + export_network_to_oedb(session, mv_gen, 'mv_gen', srid) + export_network_to_oedb(session, mv_cb, 'mv_cb', srid) + export_network_to_oedb(session, mv_cd, 'mv_cd', srid) + export_network_to_oedb(session, mv_stations, 'mv_station', srid) + export_network_to_oedb(session, hvmv_trafos, 'hvmv_trafo', srid) + export_network_to_oedb(session, mv_loads, 'mv_load', srid) + export_network_to_oedb(session, lines, 'line', srid) + export_network_to_oedb(session, mvlv_mapping, 'mvlv_mapping', srid) + else: + raise KeyError("run_id already present! No tables are input!") def create_ding0_db_tables(engine): - tables = [md.EgoGridDing0Line, + tables = [md.EgoGridDing0Versioning, + md.EgoGridDing0Line, md.EgoGridDing0LvBranchtee, md.EgoGridDing0LvGenerator, md.EgoGridDing0LvLoad, @@ -2498,15 +2532,16 @@ def drop_ding0_db_tables(engine): md.EgoGridDing0MvLoad, md.EgoGridDing0MvGrid, md.EgoGridDing0MvStation, - md.EgoGridDing0HvmvTransformer] + md.EgoGridDing0HvmvTransformer, + md.EgoGridDing0Versioning] print("Please confirm that you would like to drop the following tables:") for n, tab in enumerate(tables): print("{: 3d}. {}".format(n, tab)) - print("Please confirm with either of the choices below:\n" + \ - "- yes\n" + \ - "- no\n" + \ + print("Please confirm with either of the choices below:\n" + + "- yes\n" + + "- no\n" + "- the indexes to drop in the format 0, 2, 3, 5") confirmation = input("Please type the choice completely as there is no default choice.") if re.fullmatch('[Yy]es', confirmation): @@ -2522,8 +2557,8 @@ def drop_ding0_db_tables(engine): tablist = np.array(tables)[indlist].tolist() for n, tab in enumerate(tablist): print("{: 3d}. {}".format(n, tab)) - con2 = input("Please confirm with either of the choices below:\n" + \ - "- yes\n" + \ + con2 = input("Please confirm with either of the choices below:\n" + + "- yes\n" + "- no") if re.fullmatch('[Yy]es', con2): for tab in tablist: From a740703ccac3419ad0eaab35fa3bd975a881fcf7 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Wed, 20 Jun 2018 10:10:04 +0200 Subject: [PATCH 003/215] Added function to grant access to ding0 db tables --- ding0/tools/results.py | 45 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index d034d7be..0814a777 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -2571,6 +2571,51 @@ def drop_ding0_db_tables(engine): print("Confirmation unclear, no action taken") +def grant_access_ding0_db_tables(engine): + + tables = [md.EgoGridDing0Line, + md.EgoGridDing0LvBranchtee, + md.EgoGridDing0LvGenerator, + md.EgoGridDing0LvLoad, + md.EgoGridDing0LvGrid, + md.EgoGridDing0LvStation, + md.EgoGridDing0MvlvTransformer, + md.EgoGridDing0MvlvMapping, + md.EgoGridDing0MvBranchtee, + md.EgoGridDing0MvCircuitbreaker, + md.EgoGridDing0MvGenerator, + md.EgoGridDing0MvLoad, + md.EgoGridDing0MvGrid, + md.EgoGridDing0MvStation, + md.EgoGridDing0HvmvTransformer, + md.EgoGridDing0Versioning] + + def grant_db_access(conn, table, role): + r"""Gives access to database users/ groups + Parameters + ---------- + conn : sqlalchemy connection object + A valid connection to a database + table : sqlalchmy Table class definition + The database table + role : str + database role that access is granted to + """ + tablename = table.__table__.name + schema = table.__table__.schema + + + grant_str = """GRANT ALL ON TABLE {schema}.{table} + TO {role} WITH GRANT OPTION;""".format(schema=schema, table=tablename, + role=role) + + conn.execute(grant_str) + + session = sessionmaker(bind=engine)() + + for tab in tables: + grant_db_access(session.bind, tab, 'oeuser') + ######################################################## if __name__ == "__main__": # nw = init_mv_grid(mv_grid_districts=[3544, 3545]) From 1f213aefe19e3a2a78815c3c4ed71f208e044278 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Thu, 21 Jun 2018 10:16:29 +0200 Subject: [PATCH 004/215] Fixed bug with writing ding0 versioning data --- ding0/tools/results.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 0814a777..46e67999 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -2476,6 +2476,8 @@ def export_data_to_oedb(session, run_id, metadata_json, srid, description=row['description'], )) , axis=1) + session.commit() + export_network_to_oedb(session, lv_grid, 'lv_grid', srid) export_network_to_oedb(session, lv_gen, 'lv_gen', srid) export_network_to_oedb(session, lv_cd, 'lv_cd', srid) From 308c13f2ed1d004de5a4d7cbb881fc986e45f335 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Thu, 21 Jun 2018 11:41:38 +0200 Subject: [PATCH 005/215] Modifications to Grant access Database, Todo: Test if it properly works as expected --- ding0/tools/results.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 46e67999..7068bcbd 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -2592,11 +2592,11 @@ def grant_access_ding0_db_tables(engine): md.EgoGridDing0HvmvTransformer, md.EgoGridDing0Versioning] - def grant_db_access(conn, table, role): + def grant_db_access(engine, table, role): r"""Gives access to database users/ groups Parameters ---------- - conn : sqlalchemy connection object + session : sqlalchemy session object A valid connection to a database table : sqlalchmy Table class definition The database table @@ -2607,16 +2607,18 @@ def grant_db_access(conn, table, role): schema = table.__table__.schema - grant_str = """GRANT ALL ON TABLE {schema}.{table} - TO {role} WITH GRANT OPTION;""".format(schema=schema, table=tablename, + grant_str = """BEGIN; + GRANT ALL PRIVILEGES ON TABLE {schema}.{table} + TO {role} WITH GRANT OPTION; + COMMIT;""".format(schema=schema, table=tablename, role=role) - conn.execute(grant_str) + engine.execute(grant_str) - session = sessionmaker(bind=engine)() + # engine.echo=True for tab in tables: - grant_db_access(session.bind, tab, 'oeuser') + grant_db_access(engine, tab, 'oeuser') ######################################################## if __name__ == "__main__": From 84d9944714e9c9b91a9dc247975244a164f378fb Mon Sep 17 00:00:00 2001 From: gplessm Date: Thu, 28 Jun 2018 13:16:02 +0200 Subject: [PATCH 006/215] Remove 'run_id' from results folder path --- ding0/tools/results.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 7068bcbd..3e730ba1 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -2218,16 +2218,16 @@ def export_data_tocsv(path, run_id, metadata_json, mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, lines, mvlv_mapping, csv_sep=','): # make directory with run_id if it doesn't exist - os.makedirs(os.path.join(path, run_id), exist_ok=True) + os.makedirs(path, exist_ok=True) # put a text file with the metadata metadata = json.loads(metadata_json) - with open(os.path.join(path, run_id, 'metadata.json'), 'w') as metafile: + with open(os.path.join(path, 'metadata.json'), 'w') as metafile: json.dump(metadata, metafile) # Exports data to csv def export_network_tocsv(path, table, tablename): - return table.to_csv(os.path.join(path, run_id, tablename + '.csv'), sep=csv_sep) + return table.to_csv(os.path.join(path, tablename + '.csv'), sep=csv_sep) export_network_tocsv(path, lv_grid, 'lv_grid') export_network_tocsv(path, lv_gen, 'lv_generator') From 567d15cb75c4c7ce6f450df50c96255ee1a74e75 Mon Sep 17 00:00:00 2001 From: gplessm Date: Thu, 28 Jun 2018 13:16:31 +0200 Subject: [PATCH 007/215] Add weather cell only if at least one generator exists --- ding0/tools/results.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 3e730ba1..da9dbb11 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1892,15 +1892,16 @@ def aggregate_loads(la_center, aggr): subtype = aggr_gen.subtype type = aggr_gen.type - # Get the weather cell id that occurs the most - weather_cell_id = list(weather_cell_ids.keys())[ - list(weather_cell_ids.values()).index(max(weather_cell_ids.values()))] - - for v_level in aggr['generation']: - for type in aggr['generation'][v_level]: - for subtype in aggr['generation'][v_level][type]: - aggr['generation'][v_level][type][subtype]['weather_cell_id'] = \ - weather_cell_id + if weather_cell_ids: + # Get the weather cell id that occurs the most + weather_cell_id = list(weather_cell_ids.keys())[ + list(weather_cell_ids.values()).index(max(weather_cell_ids.values()))] + + for v_level in aggr['generation']: + for type in aggr['generation'][v_level]: + for subtype in aggr['generation'][v_level][type]: + aggr['generation'][v_level][type][subtype]['weather_cell_id'] = \ + weather_cell_id From 15fdc78d8f4a2170717804892bedf4d5c0cec652 Mon Sep 17 00:00:00 2001 From: gplessm Date: Thu, 28 Jun 2018 13:16:43 +0200 Subject: [PATCH 008/215] Upgrade to DP v0.4.2 --- ding0/config/config_db_tables.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/config/config_db_tables.cfg b/ding0/config/config_db_tables.cfg index 8fad7572..b6a76c14 100644 --- a/ding0/config/config_db_tables.cfg +++ b/ding0/config/config_db_tables.cfg @@ -24,7 +24,7 @@ mv_stations = EgoDpHvmvSubstation lv_stations = EgoDpMvlvSubstation re_generators = t_ego_dp_res_powerplant_sq_mview conv_generators = t_ego_dp_conv_powerplant_sq_mview -version = v0.4.1 +version = v0.4.2 [input_data_source] input_data = versioned \ No newline at end of file From 370a65b9136da863fabbc23f9f2c86eadc184093 Mon Sep 17 00:00:00 2001 From: gplessm Date: Fri, 6 Jul 2018 13:48:59 +0200 Subject: [PATCH 009/215] Export aggregated generators individually --- ding0/tools/results.py | 253 ++++++++++++----------------------------- 1 file changed, 71 insertions(+), 182 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index da9dbb11..3c460869 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1648,35 +1648,6 @@ def export_network(nw, mode=''): LVMVmapping_idx = 0 mvlv_mapping_dict = {} - def aggregate_generators(gen, aggr): - """Aggregate generation capacity per voltage level - Parameters - ---------- - gen: ding0.core.GeneratorDing0 - Ding0 Generator object - aggr: dict - Aggregated generation capacity. For structure see - `_determine_aggregated_nodes()`. - Returns - ------- - """ - - if gen.v_level not in aggr['generation']: - aggr['generation'][gen.v_level] = {} - if gen.type not in aggr['generation'][gen.v_level]: - aggr['generation'][gen.v_level][gen.type] = {} - if gen.subtype not in aggr['generation'][gen.v_level][gen.type]: - aggr['generation'][gen.v_level][gen.type].update( - {gen.subtype: {'ids': [gen.id_db], - 'capacity': gen.capacity}}) - else: - aggr['generation'][gen.v_level][gen.type][gen.subtype][ - 'ids'].append(gen.id_db) - aggr['generation'][gen.v_level][gen.type][gen.subtype][ - 'capacity'] += gen.capacity - - return aggr - def aggregate_loads(la_center, aggr): """Aggregate consumption in load area per sector Parameters @@ -1827,7 +1798,8 @@ def aggregate_loads(la_center, aggr): type = node.type mvgen_idx += 1 mv_gen_dict[mvgen_idx] ={ - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), + # 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), + 'id_db': node.id_db, 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, 'type': type, @@ -1842,7 +1814,8 @@ def aggregate_loads(la_center, aggr): type = node.type mvgen_idx += 1 mv_gen_dict[mvgen_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), + # 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), + 'id_db': node.id_db, 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, 'type': type, @@ -1872,39 +1845,6 @@ def aggregate_loads(la_center, aggr): aggr = {'generation': {}, 'load': {}, 'aggregates': []} - # Determine aggregated generation in LV grid - for lvgd in node.lv_load_area._lv_grid_districts: - weather_cell_ids = {} - for aggr_gen in lvgd.lv_grid.generators(): - aggr = aggregate_generators(aggr_gen, aggr) - - # Get the aggregated weather cell id of the area - # b - if isinstance(aggr_gen, GeneratorFluctuatingDing0): - if aggr_gen.weather_cell_id not in weather_cell_ids.keys(): - weather_cell_ids[aggr_gen.weather_cell_id] = 1 - else: - weather_cell_ids[aggr_gen.weather_cell_id] += 1 - - if aggr_gen.subtype == None: - subtype = 'other' - else: - subtype = aggr_gen.subtype - type = aggr_gen.type - - if weather_cell_ids: - # Get the weather cell id that occurs the most - weather_cell_id = list(weather_cell_ids.keys())[ - list(weather_cell_ids.values()).index(max(weather_cell_ids.values()))] - - for v_level in aggr['generation']: - for type in aggr['generation'][v_level]: - for subtype in aggr['generation'][v_level][type]: - aggr['generation'][v_level][type][subtype]['weather_cell_id'] = \ - weather_cell_id - - - # Determine aggregated load in MV grid # -> Implement once loads in Ding0 MV grids exist @@ -1921,54 +1861,7 @@ def aggregate_loads(la_center, aggr): for aggr_node in aggr: if aggr_node == 'generation': - mvgenaggr_idx = 0 - - for v_level in aggr['generation']: - for type in aggr['generation'][v_level]: - for subtype in aggr['generation'][v_level][type]: - mvgen_idx += 1 - mvgenaggr_idx += 1 - mv_gen_dict[mvgen_idx] = { - 'id_db': '_'.join( - [str(aggr_gen.__class__.__name__), 'MV', str(mv_grid_id), - str(aggr_gen.id_db), str(mvgenaggr_idx)]), # , str(mvgen_idx) - 'MV_grid_id_db': mv_grid_id_db, - 'geom': geom, - # from_shape(Point(mv_district.mv_grid.station().geo_data), srid=srid),#lv_load_area.geo_area,#geom, #?? Polygon # mv_stations_dict[mvstations_idx]['geom'], # - 'type': type, - 'subtype': subtype, - 'v_level': v_level, - 'nominal_capacity': aggr['generation'][v_level][type][subtype]['capacity'], - 'weather_cell_id': aggr['generation'][v_level][type][subtype]['weather_cell_id'], - 'is_aggregated': True, - 'run_id': run_id, - } - - lines_idx += 1 - aggr_lines += 1 - lines_dict[lines_idx] = { - # ToDo: Rename edge_name - 'edge_name': '_'.join( - [str(mv_grid_id), 'aggr', str(node.lv_load_area.id_db), - str(aggr_lines)]), - # , 'vlevel', str(v_level), 'subtype', str(subtype)]),#}'.format(v_level=v_level, subtype=subtype), - 'grid_id_db': mv_grid_id_db, - # ToDo: read type_name from aggr_line_type - 'type_name': 'NA2XS2Y 3x1x500 RM/35', # aggr_line_type.name, - 'type_kind': 'cable', # branch['branch'].kind, - 'length': 1, - 'U_n': aggr_line_type.U_n, - 'I_max_th': aggr_line_type.I_max_th, - 'R': aggr_line_type.R, - 'L': aggr_line_type.L, - 'C': aggr_line_type.C, - 'node1': '_'.join( - [str(aggr_gen.__class__.__name__), 'MV', str(mv_grid_id), - str(aggr_gen.id_db), str(mvgenaggr_idx)]), - 'node2': '_'.join([ - 'MVStationDing0', 'MV', str(mv_grid_id), str(mv_grid_id)]), - 'run_id': run_id, - } + pass elif aggr_node == 'load': for type in aggr['load']: @@ -1978,7 +1871,6 @@ def aggregate_loads(la_center, aggr): ['AggregatedLoad', 'MV', str(mv_grid_id), str(mvloads_idx)]), 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, - # from_shape(Point(mv_district.mv_grid.station().geo_data), srid=srid), 'consumption': json.dumps({type: aggr['load'][type]['nominal']}), 'is_aggregated': True, 'run_id': run_id, @@ -2011,15 +1903,6 @@ def aggregate_loads(la_center, aggr): 'run_id': run_id, } - # areacenter_dict[areacenter_idx] = { - # 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]),#node.id_db, - # 'MV_grid_id':node.grid, - # 'geom':node.geo_data, - # 'lv_load_area': node.lv_load_area, - # 'run_id': run_id,# - - # } - # DisconnectingPoints elif isinstance(node, CircuitBreakerDing0): mvcb_idx += 1 @@ -2063,11 +1946,8 @@ def aggregate_loads(la_center, aggr): if lv_info: for LA in mv_district.lv_load_areas(): for lv_district in LA.lv_grid_districts(): - if not lv_district.lv_grid.grid_district.lv_load_area.is_aggregated: - # ding0_grid.grid_district._lv_load_areas._lv_grid_districts _.lv_grid - # LV-grid - # ToDo: geom <- Polygon + if not lv_district.lv_grid.grid_district.lv_load_area.is_aggregated: lvgrid_idx += 1 lv_grid_dict[lvgrid_idx] = { 'LV_grid_id': lv_district.lv_grid.id_db, @@ -2080,54 +1960,61 @@ def aggregate_loads(la_center, aggr): 'run_id': run_id } - lv_grid_id = lv_district.lv_grid.id_db - lv_grid_id_db = '_'.join( - [str(lv_district.lv_grid.__class__.__name__), 'LV', str(lv_district.lv_grid.id_db), - str(lv_district.lv_grid.id_db)]) - - # geom = from_shape(Point(lv_district.lv_grid.station().geo_data), srid=srid) - # geom = wkt_dumps(lv_district.geo_data)# lv_grid.station() #ding0_lv_grid.grid_district.geo_data - for node in lv_district.lv_grid.graph_nodes_sorted(): - # geom = wkt_dumps(node.geo_data) - - # LVGenerator - if (isinstance(node, GeneratorDing0) or isinstance(node, GeneratorFluctuatingDing0)): - if node.subtype == None: - subtype = 'other' - else: - subtype = node.subtype - if isinstance(node, GeneratorFluctuatingDing0): - type = node.type - lvgen_idx += 1 - lv_gen_dict[lvgen_idx] = { - 'id_db': '_'.join( - [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), - 'LV_grid_id_db': lv_grid_id_db, - 'geom': wkt_dumps(node.geo_data), - 'type': type, - 'subtype': subtype, - 'v_level': node.v_level, - 'nominal_capacity': node.capacity, - 'run_id': run_id, - 'weather_cell_id': node.weather_cell_id, - } - else: - type = node.type - lvgen_idx += 1 - lv_gen_dict[lvgen_idx] = { - 'id_db': '_'.join( - [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), - 'LV_grid_id_db': lv_grid_id_db, - 'geom': wkt_dumps(node.geo_data), - 'type': type, - 'subtype': subtype, - 'v_level': node.v_level, - 'nominal_capacity': node.capacity, - 'run_id': run_id, - } - - # LVcd - elif isinstance(node, LVCableDistributorDing0): + lv_grid_id = lv_district.lv_grid.id_db + lv_grid_id_db = '_'.join( + [str(lv_district.lv_grid.__class__.__name__), 'LV', str(lv_district.lv_grid.id_db), + str(lv_district.lv_grid.id_db)]) + + # geom = from_shape(Point(lv_district.lv_grid.station().geo_data), srid=srid) + # geom = wkt_dumps(lv_district.geo_data)# lv_grid.station() #ding0_lv_grid.grid_district.geo_data + for node in lv_district.lv_grid.graph_nodes_sorted(): + # geom = wkt_dumps(node.geo_data) + + # LVGenerator + if (isinstance(node, GeneratorDing0) or isinstance(node, GeneratorFluctuatingDing0)): + if node.subtype == None: + subtype = 'other' + else: + subtype = node.subtype + if isinstance(node, GeneratorFluctuatingDing0): + type = node.type + lvgen_idx += 1 + lv_gen_dict[lvgen_idx] = { + # 'id_db': '_'.join( + # [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), + 'id_db': node.id_db, + 'la_id': LA.id_db, + 'LV_grid_id_db': lv_grid_id_db, + 'geom': wkt_dumps(node.geo_data), + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'is_aggregated': node.lv_load_area.is_aggregated, + 'weather_cell_id': node.weather_cell_id, + } + else: + type = node.type + lvgen_idx += 1 + lv_gen_dict[lvgen_idx] = { + # 'id_db': '_'.join( + # [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), + 'id_db': node.id_db, + 'la_id': LA.id_db, + 'LV_grid_id_db': lv_grid_id_db, + 'geom': wkt_dumps(node.geo_data), + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'is_aggregated': node.lv_load_area.is_aggregated, + } + + # LVcd + elif isinstance(node, LVCableDistributorDing0): + if not node.grid.grid_district.lv_load_area.is_aggregated: lvcd_idx += 1 lv_cd_dict[lvcd_idx] = { 'id_db': '_'.join( @@ -2138,8 +2025,9 @@ def aggregate_loads(la_center, aggr): 'run_id': run_id, } - # LVload - elif isinstance(node, LVLoadDing0): + # LVload + elif isinstance(node, LVLoadDing0): + if not node.grid.grid_district.lv_load_area.is_aggregated: lvloads_idx += 1 lv_loads_dict[lvloads_idx] = { 'id_db': '_'.join( @@ -2151,13 +2039,14 @@ def aggregate_loads(la_center, aggr): 'run_id': run_id, } - else: - type = 'Unknown' + else: + type = 'Unknown' - # LVedges - for branch in lv_district.lv_grid.graph_edges(): - # geom = from_shape( - # LineString([branch['adj_nodes'][0].geo_data, branch['adj_nodes'][1].geo_data]), srid=srid) + # LVedges + for branch in lv_district.lv_grid.graph_edges(): + if not branch['branch'].connects_aggregated: + # geom = from_shape( + # LineString([branch['adj_nodes'][0].geo_data, branch['adj_nodes'][1].geo_data]), srid=srid) if not any([isinstance(branch['adj_nodes'][0], LVLoadAreaCentreDing0), isinstance(branch['adj_nodes'][1], LVLoadAreaCentreDing0)]): lines_idx += 1 From 2b22ff0fc600f19defd78a8dab934f345ebb8031 Mon Sep 17 00:00:00 2001 From: gplessm Date: Mon, 9 Jul 2018 16:30:41 +0200 Subject: [PATCH 010/215] Remove commented lines --- ding0/tools/results.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 3c460869..70bbad46 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1798,7 +1798,6 @@ def aggregate_loads(la_center, aggr): type = node.type mvgen_idx += 1 mv_gen_dict[mvgen_idx] ={ - # 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), 'id_db': node.id_db, 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, @@ -1814,7 +1813,6 @@ def aggregate_loads(la_center, aggr): type = node.type mvgen_idx += 1 mv_gen_dict[mvgen_idx] = { - # 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), 'id_db': node.id_db, 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, @@ -1980,8 +1978,6 @@ def aggregate_loads(la_center, aggr): type = node.type lvgen_idx += 1 lv_gen_dict[lvgen_idx] = { - # 'id_db': '_'.join( - # [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), 'id_db': node.id_db, 'la_id': LA.id_db, 'LV_grid_id_db': lv_grid_id_db, @@ -1998,8 +1994,6 @@ def aggregate_loads(la_center, aggr): type = node.type lvgen_idx += 1 lv_gen_dict[lvgen_idx] = { - # 'id_db': '_'.join( - # [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), 'id_db': node.id_db, 'la_id': LA.id_db, 'LV_grid_id_db': lv_grid_id_db, From 61268518918bb3afde1a398a45834330cc2b1111 Mon Sep 17 00:00:00 2001 From: gplessm Date: Mon, 9 Jul 2018 16:55:07 +0200 Subject: [PATCH 011/215] Adapt OEDB export according to changes in https://github.com/openego/ding0/issues/265 --- ding0/tools/results.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 70bbad46..f686a46f 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -2175,12 +2175,16 @@ def export_network_to_oedb(session, table, tabletype, srid): session.add(md.EgoGridDing0LvGenerator( run_id=row['run_id'], id_db=row['id_db'], + la_id=row['la_id'], lv_grid_id_db=str(row['LV_grid_id_db']), geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, type=row['type'], subtype=row['subtype'], v_level=row['v_level'], nominal_capacity=row['nominal_capacity'], + is_aggregated=row['is_aggregated'], + weather_cell_id=row['weather_cell_id'] + )) , axis=1) @@ -2275,6 +2279,7 @@ def export_network_to_oedb(session, table, tabletype, srid): v_level=row['v_level'], nominal_capacity=row['nominal_capacity'], is_aggregated=row['is_aggregated'], + weather_cell_id=row['weather_cell_id'] )) , axis=1) From 5f4f033acb85997ed45a3842ddbb7629b8dd4a6b Mon Sep 17 00:00:00 2001 From: gplessm Date: Mon, 9 Jul 2018 18:25:48 +0200 Subject: [PATCH 012/215] Let IDs be IDs (and don't mix with other information) --- ding0/tools/results.py | 28 +++++++++++----------------- 1 file changed, 11 insertions(+), 17 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index f686a46f..18c610f5 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1707,8 +1707,7 @@ def aggregate_loads(la_center, aggr): mvgrid_idx += 1 mv_grid_dict[mvgrid_idx] = { 'MV_grid_id': mv_district.mv_grid.id_db, - 'id_db': '_'.join([str(mv_district.mv_grid.__class__.__name__), 'MV', str(mv_grid_id), - str(mv_district.mv_grid.id_db)]), + 'id_db': mv_grid_id, # 'network': mv_district.mv_grid.network, 'geom': wkt_dumps(mv_district.geo_data), 'population': # None, @@ -1734,8 +1733,8 @@ def aggregate_loads(la_center, aggr): if not node.lv_load_area.is_aggregated: lvstations_idx += 1 lv_stations_dict[lvstations_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), + 'id_db': node.id_db, 'geom': geom, 'run_id': run_id, } @@ -1754,7 +1753,7 @@ def aggregate_loads(la_center, aggr): for t in node.transformers(): lvtrafos_idx += 1 mvlv_trafos_dict[lvtrafos_idx] = { - 'id_db': '_'.join([str(t.__class__.__name__), 'LV', str(mv_grid_id), str(node.id_db)]), + 'id_db': node.id_db, 'geom': geom, 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), 'voltage_op': t.v_level, @@ -1768,7 +1767,7 @@ def aggregate_loads(la_center, aggr): elif isinstance(node, MVStationDing0): mvstations_idx += 1 mv_stations_dict[mvstations_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), + 'id_db': node.id_db, 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, 'run_id': run_id, @@ -1778,7 +1777,7 @@ def aggregate_loads(la_center, aggr): for t in node.transformers(): mvtrafos_idx += 1 hvmv_trafos_dict[mvtrafos_idx] = { - 'id_db': '_'.join([str(t.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), + 'id_db': node.id_db, 'geom': geom, 'MV_grid_id_db': mv_grid_id_db, 'voltage_op': t.v_level, @@ -1827,7 +1826,7 @@ def aggregate_loads(la_center, aggr): elif isinstance(node, MVCableDistributorDing0): mvcd_idx += 1 mv_cd_dict[mvcd_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), + 'id_db': node.id_db, 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, 'run_id': run_id, @@ -1865,8 +1864,7 @@ def aggregate_loads(la_center, aggr): for type in aggr['load']: mvloads_idx += 1 mv_loads_dict[mvloads_idx] = { - 'id_db': '_'.join( - ['AggregatedLoad', 'MV', str(mv_grid_id), str(mvloads_idx)]), + 'id_db': mvloads_idx, 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, 'consumption': json.dumps({type: aggr['load'][type]['nominal']}), @@ -1905,7 +1903,7 @@ def aggregate_loads(la_center, aggr): elif isinstance(node, CircuitBreakerDing0): mvcb_idx += 1 mvcb_dict[mvcb_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), + 'id_db': node.id_db, 'MV_grid_id': mv_grid_id, 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, @@ -1949,9 +1947,7 @@ def aggregate_loads(la_center, aggr): lvgrid_idx += 1 lv_grid_dict[lvgrid_idx] = { 'LV_grid_id': lv_district.lv_grid.id_db, - 'id_db': '_'.join( - [str(lv_district.lv_grid.__class__.__name__), 'LV', str(lv_district.lv_grid.id_db), - str(lv_district.lv_grid.id_db)]), + 'id_db': lv_district.lv_grid.id_db, 'geom': wkt_dumps(lv_district.geo_data), 'population': lv_district.population, 'voltage_nom': lv_district.lv_grid.v_level / 1e3, @@ -2011,8 +2007,7 @@ def aggregate_loads(la_center, aggr): if not node.grid.grid_district.lv_load_area.is_aggregated: lvcd_idx += 1 lv_cd_dict[lvcd_idx] = { - 'id_db': '_'.join( - [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), + 'id_db': node.id_db, 'LV_grid_id_db': lv_grid_id_db, 'geom': None, # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? @@ -2024,8 +2019,7 @@ def aggregate_loads(la_center, aggr): if not node.grid.grid_district.lv_load_area.is_aggregated: lvloads_idx += 1 lv_loads_dict[lvloads_idx] = { - 'id_db': '_'.join( - [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), + 'id_db': node.id_db, 'LV_grid_id_db': lv_grid_id_db, 'geom': None, # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? From 9735f0161ecb69da97533cf5b1bd1797dae8f03b Mon Sep 17 00:00:00 2001 From: gplessm Date: Mon, 9 Jul 2018 18:26:34 +0200 Subject: [PATCH 013/215] Replace string in MV/LV_grid_id_db by int --- ding0/tools/results.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 18c610f5..2a703edb 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1696,8 +1696,7 @@ def aggregate_loads(la_center, aggr): for mv_district in nw.mv_grid_districts(): from shapely.wkt import dumps as wkt_dumps mv_grid_id = mv_district.mv_grid.id_db - mv_grid_id_db = '_'.join( - [str(mv_district.mv_grid.__class__.__name__), 'MV', str(mv_grid_id), str(mv_district.mv_grid.id_db)]) + mv_grid_id_db = mv_district.mv_grid.id_db if mv_info: lv_grid_id = 0 @@ -1733,8 +1732,8 @@ def aggregate_loads(la_center, aggr): if not node.lv_load_area.is_aggregated: lvstations_idx += 1 lv_stations_dict[lvstations_idx] = { - 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), 'id_db': node.id_db, + 'LV_grid_id_db': node.id_db, 'geom': geom, 'run_id': run_id, } @@ -1745,7 +1744,7 @@ def aggregate_loads(la_center, aggr): 'MV_grid_id': mv_grid_id, 'MV_grid_id_db': mv_grid_id_db, 'LV_grid_id': node.id_db, - 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), + 'LV_grid_id_db': node.id_db, 'run_id': run_id, } @@ -1755,7 +1754,7 @@ def aggregate_loads(la_center, aggr): mvlv_trafos_dict[lvtrafos_idx] = { 'id_db': node.id_db, 'geom': geom, - 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), + 'LV_grid_id_db': node.id_db, 'voltage_op': t.v_level, 'S_nom': t.s_max_a, 'X': t.x, @@ -1955,9 +1954,7 @@ def aggregate_loads(la_center, aggr): } lv_grid_id = lv_district.lv_grid.id_db - lv_grid_id_db = '_'.join( - [str(lv_district.lv_grid.__class__.__name__), 'LV', str(lv_district.lv_grid.id_db), - str(lv_district.lv_grid.id_db)]) + lv_grid_id_db = lv_district.lv_grid.id_db # geom = from_shape(Point(lv_district.lv_grid.station().geo_data), srid=srid) # geom = wkt_dumps(lv_district.geo_data)# lv_grid.station() #ding0_lv_grid.grid_district.geo_data From 3807a2c113162a390dee899c5e00db9f049008e4 Mon Sep 17 00:00:00 2001 From: gplessm Date: Mon, 9 Jul 2018 18:32:50 +0200 Subject: [PATCH 014/215] Revert "Replace string in MV/LV_grid_id_db by int" This reverts commit 9735f0161ecb69da97533cf5b1bd1797dae8f03b. --- ding0/tools/results.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 2a703edb..18c610f5 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1696,7 +1696,8 @@ def aggregate_loads(la_center, aggr): for mv_district in nw.mv_grid_districts(): from shapely.wkt import dumps as wkt_dumps mv_grid_id = mv_district.mv_grid.id_db - mv_grid_id_db = mv_district.mv_grid.id_db + mv_grid_id_db = '_'.join( + [str(mv_district.mv_grid.__class__.__name__), 'MV', str(mv_grid_id), str(mv_district.mv_grid.id_db)]) if mv_info: lv_grid_id = 0 @@ -1732,8 +1733,8 @@ def aggregate_loads(la_center, aggr): if not node.lv_load_area.is_aggregated: lvstations_idx += 1 lv_stations_dict[lvstations_idx] = { + 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), 'id_db': node.id_db, - 'LV_grid_id_db': node.id_db, 'geom': geom, 'run_id': run_id, } @@ -1744,7 +1745,7 @@ def aggregate_loads(la_center, aggr): 'MV_grid_id': mv_grid_id, 'MV_grid_id_db': mv_grid_id_db, 'LV_grid_id': node.id_db, - 'LV_grid_id_db': node.id_db, + 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), 'run_id': run_id, } @@ -1754,7 +1755,7 @@ def aggregate_loads(la_center, aggr): mvlv_trafos_dict[lvtrafos_idx] = { 'id_db': node.id_db, 'geom': geom, - 'LV_grid_id_db': node.id_db, + 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), 'voltage_op': t.v_level, 'S_nom': t.s_max_a, 'X': t.x, @@ -1954,7 +1955,9 @@ def aggregate_loads(la_center, aggr): } lv_grid_id = lv_district.lv_grid.id_db - lv_grid_id_db = lv_district.lv_grid.id_db + lv_grid_id_db = '_'.join( + [str(lv_district.lv_grid.__class__.__name__), 'LV', str(lv_district.lv_grid.id_db), + str(lv_district.lv_grid.id_db)]) # geom = from_shape(Point(lv_district.lv_grid.station().geo_data), srid=srid) # geom = wkt_dumps(lv_district.geo_data)# lv_grid.station() #ding0_lv_grid.grid_district.geo_data From 5c0fd43c30cd607143b52ccc33015496a3e7e99c Mon Sep 17 00:00:00 2001 From: gplessm Date: Mon, 9 Jul 2018 18:33:17 +0200 Subject: [PATCH 015/215] Revert "Let IDs be IDs (and don't mix with other information)" This reverts commit 5f4f033acb85997ed45a3842ddbb7629b8dd4a6b. --- ding0/tools/results.py | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 18c610f5..f686a46f 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1707,7 +1707,8 @@ def aggregate_loads(la_center, aggr): mvgrid_idx += 1 mv_grid_dict[mvgrid_idx] = { 'MV_grid_id': mv_district.mv_grid.id_db, - 'id_db': mv_grid_id, + 'id_db': '_'.join([str(mv_district.mv_grid.__class__.__name__), 'MV', str(mv_grid_id), + str(mv_district.mv_grid.id_db)]), # 'network': mv_district.mv_grid.network, 'geom': wkt_dumps(mv_district.geo_data), 'population': # None, @@ -1733,8 +1734,8 @@ def aggregate_loads(la_center, aggr): if not node.lv_load_area.is_aggregated: lvstations_idx += 1 lv_stations_dict[lvstations_idx] = { + 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), - 'id_db': node.id_db, 'geom': geom, 'run_id': run_id, } @@ -1753,7 +1754,7 @@ def aggregate_loads(la_center, aggr): for t in node.transformers(): lvtrafos_idx += 1 mvlv_trafos_dict[lvtrafos_idx] = { - 'id_db': node.id_db, + 'id_db': '_'.join([str(t.__class__.__name__), 'LV', str(mv_grid_id), str(node.id_db)]), 'geom': geom, 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), 'voltage_op': t.v_level, @@ -1767,7 +1768,7 @@ def aggregate_loads(la_center, aggr): elif isinstance(node, MVStationDing0): mvstations_idx += 1 mv_stations_dict[mvstations_idx] = { - 'id_db': node.id_db, + 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, 'run_id': run_id, @@ -1777,7 +1778,7 @@ def aggregate_loads(la_center, aggr): for t in node.transformers(): mvtrafos_idx += 1 hvmv_trafos_dict[mvtrafos_idx] = { - 'id_db': node.id_db, + 'id_db': '_'.join([str(t.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), 'geom': geom, 'MV_grid_id_db': mv_grid_id_db, 'voltage_op': t.v_level, @@ -1826,7 +1827,7 @@ def aggregate_loads(la_center, aggr): elif isinstance(node, MVCableDistributorDing0): mvcd_idx += 1 mv_cd_dict[mvcd_idx] = { - 'id_db': node.id_db, + 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, 'run_id': run_id, @@ -1864,7 +1865,8 @@ def aggregate_loads(la_center, aggr): for type in aggr['load']: mvloads_idx += 1 mv_loads_dict[mvloads_idx] = { - 'id_db': mvloads_idx, + 'id_db': '_'.join( + ['AggregatedLoad', 'MV', str(mv_grid_id), str(mvloads_idx)]), 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, 'consumption': json.dumps({type: aggr['load'][type]['nominal']}), @@ -1903,7 +1905,7 @@ def aggregate_loads(la_center, aggr): elif isinstance(node, CircuitBreakerDing0): mvcb_idx += 1 mvcb_dict[mvcb_idx] = { - 'id_db': node.id_db, + 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), 'MV_grid_id': mv_grid_id, 'MV_grid_id_db': mv_grid_id_db, 'geom': geom, @@ -1947,7 +1949,9 @@ def aggregate_loads(la_center, aggr): lvgrid_idx += 1 lv_grid_dict[lvgrid_idx] = { 'LV_grid_id': lv_district.lv_grid.id_db, - 'id_db': lv_district.lv_grid.id_db, + 'id_db': '_'.join( + [str(lv_district.lv_grid.__class__.__name__), 'LV', str(lv_district.lv_grid.id_db), + str(lv_district.lv_grid.id_db)]), 'geom': wkt_dumps(lv_district.geo_data), 'population': lv_district.population, 'voltage_nom': lv_district.lv_grid.v_level / 1e3, @@ -2007,7 +2011,8 @@ def aggregate_loads(la_center, aggr): if not node.grid.grid_district.lv_load_area.is_aggregated: lvcd_idx += 1 lv_cd_dict[lvcd_idx] = { - 'id_db': node.id_db, + 'id_db': '_'.join( + [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), 'LV_grid_id_db': lv_grid_id_db, 'geom': None, # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? @@ -2019,7 +2024,8 @@ def aggregate_loads(la_center, aggr): if not node.grid.grid_district.lv_load_area.is_aggregated: lvloads_idx += 1 lv_loads_dict[lvloads_idx] = { - 'id_db': node.id_db, + 'id_db': '_'.join( + [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), 'LV_grid_id_db': lv_grid_id_db, 'geom': None, # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? From d822430d632c6c93431723cbc50efdb7a9c453b7 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Wed, 11 Jul 2018 07:48:42 +0200 Subject: [PATCH 016/215] Add Line Geoms --- ding0/tools/results.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index f686a46f..7b84ae46 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1939,6 +1939,9 @@ def aggregate_loads(la_center, aggr): 'node2': '_'.join([str(branch['adj_nodes'][1].__class__.__name__), 'MV', str(mv_grid_id), str(branch['adj_nodes'][1].id_db)]), 'run_id': run_id, + 'geom': from_shape(LineString([branch['adj_nodes'][0].geo_data, + branch['adj_nodes'][1].geo_data]), + srid=srid) } if lv_info: @@ -2068,6 +2071,9 @@ def aggregate_loads(la_center, aggr): [str(branch['adj_nodes'][1].__class__.__name__), 'MV', str(mv_grid_id), str(branch['adj_nodes'][1].id_db)]), 'run_id': run_id, + 'geom': from_shape(LineString([branch['adj_nodes'][0].geo_data, + branch['adj_nodes'][1].geo_data]), + srid=srid) } lv_grid = pd.DataFrame.from_dict(lv_grid_dict, orient='index') From 9c97c885663b23af0ba20d53b3e9c0594d86d6c0 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Wed, 11 Jul 2018 12:07:23 +0200 Subject: [PATCH 017/215] Removed geom from LV grids, as there are no geo_data stored in adj_nodes in LV grid edges, Todo: need to use distances to generate them --- ding0/tools/results.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 7b84ae46..0de7a795 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -2071,9 +2071,9 @@ def aggregate_loads(la_center, aggr): [str(branch['adj_nodes'][1].__class__.__name__), 'MV', str(mv_grid_id), str(branch['adj_nodes'][1].id_db)]), 'run_id': run_id, - 'geom': from_shape(LineString([branch['adj_nodes'][0].geo_data, - branch['adj_nodes'][1].geo_data]), - srid=srid) + #'geom': from_shape(LineString([branch['adj_nodes'][0].geo_data, + # branch['adj_nodes'][1].geo_data]), + # srid=srid) } lv_grid = pd.DataFrame.from_dict(lv_grid_dict, orient='index') From 14524b4cb096003637966a94991941d0a9e2e49e Mon Sep 17 00:00:00 2001 From: gplessm Date: Thu, 12 Jul 2018 21:31:43 +0200 Subject: [PATCH 018/215] Formatting corrections --- ding0/tools/results.py | 338 ++++++++++++++++++++++++++++------------- 1 file changed, 230 insertions(+), 108 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index f686a46f..e6028553 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -114,7 +114,8 @@ def save_nd_to_pickle(nd, path='', filename=None): number=nd._mv_grid_districts[0].id_db, number2=nd._mv_grid_districts[-1].id_db) else: - name_extension = '_{number}'.format(number=nd._mv_grid_districts[0].id_db) + name_extension = '_{number}'.format( + number=nd._mv_grid_districts[0].id_db) if filename is None: filename = "ding0_grids_{ext}.pkl".format( @@ -313,7 +314,9 @@ def calculate_lvgd_stats(nw): for LA in mv_district.lv_load_areas(): for lv_district in LA.lv_grid_districts(): lv_dist_idx += 1 - branches_from_station = len(lv_district.lv_grid.graph_branches_from_node(lv_district.lv_grid.station())) + branches_from_station = len( + lv_district.lv_grid.graph_branches_from_node( + lv_district.lv_grid.station())) lv_dist_dict[lv_dist_idx] = { 'MV_grid_id': mv_district.mv_grid.id_db, 'LV_grid_id': lv_district.lv_grid.id_db, @@ -372,7 +375,8 @@ def calculate_lvgd_stats(nw): branches_dict[branch_idx] = { 'LV_grid_id': lv_district.lv_grid.id_db, 'length': branch['branch'].length / 1e3, - 'type_name': branch['branch'].type.to_frame().columns[0], + 'type_name': branch['branch'].type.to_frame().columns[ + 0], 'type_kind': branch['branch'].kind, } # Transformers @@ -387,7 +391,8 @@ def calculate_lvgd_stats(nw): district_geo = transform(proj, lv_district.geo_data) lv_dist_dict[lv_dist_idx].update({'Area': district_geo.area}) - lvgd_stats = pd.DataFrame.from_dict(lv_dist_dict, orient='index').set_index('LV_grid_id') + lvgd_stats = pd.DataFrame.from_dict(lv_dist_dict, orient='index').set_index( + 'LV_grid_id') # generate partial dataframes gen_df = pd.DataFrame.from_dict(lv_gen_dict, orient='index') load_df = pd.DataFrame.from_dict(lv_load_dict, orient='index') @@ -397,62 +402,87 @@ def calculate_lvgd_stats(nw): # resque desired data if not gen_df.empty: # generation by voltage level - lv_generation = gen_df.groupby(['LV_grid_id', 'v_level'])['GenCap'].sum().to_frame().unstack(level=-1) - lv_generation.columns = ['Gen. Cap. v_level ' + str(_[1]) if isinstance(_, tuple) else str(_) for _ in - lv_generation.columns] + lv_generation = gen_df.groupby(['LV_grid_id', 'v_level'])[ + 'GenCap'].sum().to_frame().unstack(level=-1) + lv_generation.columns = [ + 'Gen. Cap. v_level ' + str(_[1]) if isinstance(_, tuple) else str(_) + for _ in + lv_generation.columns] lvgd_stats = pd.concat([lvgd_stats, lv_generation], axis=1) # generation by type/subtype - lv_generation = gen_df.groupby(['LV_grid_id', 'subtype'])['GenCap'].sum().to_frame().unstack(level=-1) - lv_generation.columns = ['Gen. Cap. type ' + str(_[1]) if isinstance(_, tuple) else str(_) for _ in - lv_generation.columns] + lv_generation = gen_df.groupby(['LV_grid_id', 'subtype'])[ + 'GenCap'].sum().to_frame().unstack(level=-1) + lv_generation.columns = [ + 'Gen. Cap. type ' + str(_[1]) if isinstance(_, tuple) else str(_) + for _ in + lv_generation.columns] lvgd_stats = pd.concat([lvgd_stats, lv_generation], axis=1) if not load_df.empty: # number of residential loads - lv_loads = load_df[load_df['load_type'] == 'residential'].groupby(['LV_grid_id'])[ + lv_loads = \ + load_df[load_df['load_type'] == 'residential'].groupby(['LV_grid_id'])[ 'load_type'].count().to_frame() # .unstack(level=-1) lv_loads.columns = ['N° of loads residential'] lvgd_stats = pd.concat([lvgd_stats, lv_loads], axis=1) # number of agricultural loads - lv_loads = load_df[load_df['load_type'] == 'agricultural'].groupby(['LV_grid_id'])[ + lv_loads = \ + load_df[load_df['load_type'] == 'agricultural'].groupby(['LV_grid_id'])[ 'load_type'].count().to_frame() # .unstack(level=-1) lv_loads.columns = ['N° of loads agricultural'] lvgd_stats = pd.concat([lvgd_stats, lv_loads], axis=1) # number of mixed industrial / retail loads - lv_loads = load_df[load_df['load_type'] == 'ind_ret'].groupby(['LV_grid_id'])[ + lv_loads = \ + load_df[load_df['load_type'] == 'ind_ret'].groupby(['LV_grid_id'])[ 'load_type'].count().to_frame() # .unstack(level=-1) lv_loads.columns = ['N° of loads mixed industrial/retail'] lvgd_stats = pd.concat([lvgd_stats, lv_loads], axis=1) if not branch_df.empty: # branches by type name - lv_branches = branch_df.groupby(['LV_grid_id', 'type_name'])['length'].sum().to_frame().unstack(level=-1) - lv_branches.columns = ['Length Type ' + _[1] if isinstance(_, tuple) else _ for _ in lv_branches.columns] + lv_branches = branch_df.groupby(['LV_grid_id', 'type_name'])[ + 'length'].sum().to_frame().unstack(level=-1) + lv_branches.columns = [ + 'Length Type ' + _[1] if isinstance(_, tuple) else _ for _ in + lv_branches.columns] lvgd_stats = pd.concat([lvgd_stats, lv_branches], axis=1) # branches by kind - lv_branches = branch_df[branch_df['type_kind'] == 'line'].groupby(['LV_grid_id'])['length'].sum().to_frame() + lv_branches = \ + branch_df[branch_df['type_kind'] == 'line'].groupby(['LV_grid_id'])[ + 'length'].sum().to_frame() lv_branches.columns = ['Length of overhead lines'] lvgd_stats = pd.concat([lvgd_stats, lv_branches], axis=1) - lv_branches = branch_df[branch_df['type_kind'] == 'cable'].groupby(['LV_grid_id'])['length'].sum().to_frame() + lv_branches = \ + branch_df[branch_df['type_kind'] == 'cable'].groupby(['LV_grid_id'])[ + 'length'].sum().to_frame() lv_branches.columns = ['Length of underground cables'] lvgd_stats = pd.concat([lvgd_stats, lv_branches], axis=1) # N°of branches - lv_branches = branch_df.groupby(['LV_grid_id', 'type_name'])['length'].count().to_frame().unstack(level=-1) - lv_branches.columns = ['N° of branches Type ' + _[1] if isinstance(_, tuple) else _ for _ in - lv_branches.columns] + lv_branches = branch_df.groupby(['LV_grid_id', 'type_name'])[ + 'length'].count().to_frame().unstack(level=-1) + lv_branches.columns = [ + 'N° of branches Type ' + _[1] if isinstance(_, tuple) else _ for _ + in + lv_branches.columns] lvgd_stats = pd.concat([lvgd_stats, lv_branches], axis=1) - lv_branches = branch_df[branch_df['type_kind'] == 'line'].groupby(['LV_grid_id'])['length'].count().to_frame() + lv_branches = \ + branch_df[branch_df['type_kind'] == 'line'].groupby(['LV_grid_id'])[ + 'length'].count().to_frame() lv_branches.columns = ['N° of branches overhead lines'] lvgd_stats = pd.concat([lvgd_stats, lv_branches], axis=1) - lv_branches = branch_df[branch_df['type_kind'] == 'cable'].groupby(['LV_grid_id'])['length'].count().to_frame() + lv_branches = \ + branch_df[branch_df['type_kind'] == 'cable'].groupby(['LV_grid_id'])[ + 'length'].count().to_frame() lv_branches.columns = ['N° of branches underground cables'] lvgd_stats = pd.concat([lvgd_stats, lv_branches], axis=1) if not trafos_df.empty: # N of trafos - lv_trafos = trafos_df.groupby(['LV_grid_id'])['s_max_a'].count().to_frame() + lv_trafos = trafos_df.groupby(['LV_grid_id'])[ + 's_max_a'].count().to_frame() lv_trafos.columns = ['N° of MV/LV Trafos'] lvgd_stats = pd.concat([lvgd_stats, lv_trafos], axis=1) # Capacity of trafos - lv_trafos = trafos_df.groupby(['LV_grid_id'])['s_max_a'].sum().to_frame() + lv_trafos = trafos_df.groupby(['LV_grid_id'])[ + 's_max_a'].sum().to_frame() lv_trafos.columns = ['Accumulated s_max_a in MVLV trafos'] lvgd_stats = pd.concat([lvgd_stats, lv_trafos], axis=1) @@ -535,7 +565,8 @@ def calculate_mvgd_stats(nw): continue mv_impedance = 0 mv_path_length = 0 - if not isinstance(node, MVCableDistributorDing0) and not isinstance(node, CircuitBreakerDing0): + if not isinstance(node, MVCableDistributorDing0) and not isinstance( + node, CircuitBreakerDing0): if not nx.has_path(G, root, node): continue # print(node, node.lv_load_area.is_aggregated) # only debug @@ -556,14 +587,16 @@ def calculate_mvgd_stats(nw): mv_impedances[node] = mv_impedance mv_path_lengths[node] = mv_path_length - mv_thermal_limit = G.edge[path[0]][path[1]]['branch'].type['I_max_th'] + mv_thermal_limit = G.edge[path[0]][path[1]]['branch'].type[ + 'I_max_th'] mv_thermal_limits[node] = mv_thermal_limit if isinstance(node, LVStationDing0): # add impedance of transformers in LV station lvstation_impedance = 0. for trafo in node.transformers(): - lvstation_impedance += 1. / np.hypot(trafo.r, trafo.x) # transformers operating in parallel + lvstation_impedance += 1. / np.hypot(trafo.r, + trafo.x) # transformers operating in parallel if lvstation_impedance > 0.: # avoid dividing by zero lvstation_impedance = 1. / lvstation_impedance else: @@ -575,29 +608,62 @@ def calculate_mvgd_stats(nw): G_lv = lv_dist.lv_grid._graph # loop over all LV terminal nodes belonging to LV station for lv_node in G_lv.nodes(): - if isinstance(lv_node, GeneratorDing0) or isinstance(lv_node, LVLoadDing0): - path = nx.shortest_path(G_lv, node, lv_node) + if isinstance(lv_node, + GeneratorDing0) or isinstance( + lv_node, LVLoadDing0): + path = nx.shortest_path(G_lv, node, + lv_node) lv_impedance = lvstation_impedance lv_path_length = 0. for i in range(len(path) - 1): - lv_impedance += np.sqrt((G_lv.edge[path[i]][path[i + 1]]['branch'].type[ - 'L'] * 1e-3 * omega * \ - G_lv.edge[path[i]][path[i + 1]][ - 'branch'].length) ** 2. + \ - (G_lv.edge[path[i]][path[i + 1]]['branch'].type[ - 'R'] * \ - G_lv.edge[path[i]][path[i + 1]][ - 'branch'].length) ** 2.) - lv_path_length += G_lv.edge[path[i]][path[i + 1]]['branch'].length - lv_thermal_limit = G_lv.edge[path[0]][path[1]]['branch'].type['I_max_th'] - - mvlv_impedances[lv_node] = mv_impedance + lv_impedance - mvlv_path_lengths[lv_node] = mv_path_length + lv_path_length - lv_thermal_limits[lv_node] = lv_thermal_limit - mvlv_thermal_limits[lv_node] = mv_thermal_limit - - elif isinstance(lv_node, LVStationDing0): - n_outgoing_LV += len(G_lv.neighbors(lv_node)) + lv_impedance += np.sqrt(( + G_lv.edge[ + path[ + i]][ + path[ + i + 1]][ + 'branch'].type[ + 'L'] * 1e-3 * omega * \ + G_lv.edge[ + path[ + i]][ + path[ + i + 1]][ + 'branch'].length) ** 2. + \ + ( + G_lv.edge[ + path[ + i]][ + path[ + i + 1]][ + 'branch'].type[ + 'R'] * \ + G_lv.edge[ + path[ + i]][ + path[ + i + 1]][ + 'branch'].length) ** 2.) + lv_path_length += \ + G_lv.edge[path[i]][path[i + 1]][ + 'branch'].length + lv_thermal_limit = \ + G_lv.edge[path[0]][path[1]][ + 'branch'].type['I_max_th'] + + mvlv_impedances[ + lv_node] = mv_impedance + lv_impedance + mvlv_path_lengths[ + lv_node] = mv_path_length + lv_path_length + lv_thermal_limits[ + lv_node] = lv_thermal_limit + mvlv_thermal_limits[ + lv_node] = mv_thermal_limit + + elif isinstance(lv_node, + LVStationDing0): + n_outgoing_LV += len( + G_lv.neighbors(lv_node)) n_stations_LV += 1 # compute mean values by looping over terminal nodes @@ -608,7 +674,8 @@ def calculate_mvgd_stats(nw): # terminal nodes on MV for terminal_node in mv_impedances.keys(): # neglect LVStations here because already part of MVLV paths below - if not isinstance(terminal_node, LVStationDing0) and not isinstance(terminal_node, MVStationDing0): + if not isinstance(terminal_node, LVStationDing0) and not isinstance( + terminal_node, MVStationDing0): sum_impedances += mv_impedances[terminal_node] sum_thermal_limits += mv_thermal_limits[terminal_node] sum_path_lengths += mv_path_lengths[terminal_node] @@ -712,7 +779,8 @@ def calculate_mvgd_stats(nw): elif isinstance(node, LVStationDing0): LVs_count += 1 lv_trafo_count += len([trafo for trafo in node.transformers()]) - lv_trafo_cap += np.sum([trafo.s_max_a for trafo in node.transformers()]) + lv_trafo_cap += np.sum( + [trafo.s_max_a for trafo in node.transformers()]) if not node.lv_load_area.is_aggregated: mv_path_length = district.mv_grid.graph_path_length( @@ -726,7 +794,8 @@ def calculate_mvgd_stats(nw): lv_path_length = lv_dist.lv_grid.graph_path_length( node_source=node, node_target=lv_node) - max_lv_path = max(max_lv_path, lv_path_length) + max_lv_path = max(max_lv_path, + lv_path_length) mvlv_path_length = mv_path_length + max_lv_path elif isinstance(node, CircuitBreakerDing0): @@ -798,7 +867,8 @@ def calculate_mvgd_stats(nw): lv_branches_dict[lv_branches_idx] = { 'grid_id': district.mv_grid.id_db, 'length': br['branch'].length / 1e3, - 'type_name': br['branch'].type.to_frame().columns[0], # why is it different as for MV grids? + 'type_name': br['branch'].type.to_frame().columns[0], + # why is it different as for MV grids? 'type_kind': br['branch'].kind, } @@ -824,7 +894,8 @@ def calculate_mvgd_stats(nw): pyproj.Proj(init='epsg:4326'), # source coordinate system pyproj.Proj(init='epsg:3035')) # destination coordinate system district_geo = transform(proj, district.geo_data) - other_nodes_dict[district.mv_grid.id_db].update({'Dist_area': district_geo.area}) + other_nodes_dict[district.mv_grid.id_db].update( + {'Dist_area': district_geo.area}) mvgd_stats = pd.DataFrame.from_dict({}, orient='index') ################################### @@ -840,22 +911,29 @@ def calculate_mvgd_stats(nw): ################################### # Aggregated data HV/MV Trafos if not trafos_df.empty: - mvgd_stats = pd.concat([mvgd_stats, trafos_df.groupby('grid_id').count()['s_max_a']], axis=1) - mvgd_stats = pd.concat([mvgd_stats, trafos_df.groupby('grid_id').sum()[['s_max_a']]], axis=1) + mvgd_stats = pd.concat( + [mvgd_stats, trafos_df.groupby('grid_id').count()['s_max_a']], + axis=1) + mvgd_stats = pd.concat( + [mvgd_stats, trafos_df.groupby('grid_id').sum()[['s_max_a']]], + axis=1) mvgd_stats.columns = ['N° of HV/MV Trafos', 'Trafos HV/MV Acc s_max_a'] ################################### # Aggregated data Generators if not generators_df.empty: # MV generation per sub_type - mv_generation = generators_df.groupby(['grid_id', 'sub_type'])['gen_cap'].sum().to_frame().unstack(level=-1) - mv_generation.columns = ['Gen. Cap. of MV ' + _[1] if isinstance(_, tuple) else _ - for _ in mv_generation.columns] + mv_generation = generators_df.groupby(['grid_id', 'sub_type'])[ + 'gen_cap'].sum().to_frame().unstack(level=-1) + mv_generation.columns = [ + 'Gen. Cap. of MV ' + _[1] if isinstance(_, tuple) else _ + for _ in mv_generation.columns] mvgd_stats = pd.concat([mvgd_stats, mv_generation], axis=1) # MV generation at V levels mv_generation = generators_df.groupby( - ['grid_id', 'v_level'])['gen_cap'].sum().to_frame().unstack(level=-1) + ['grid_id', 'v_level'])['gen_cap'].sum().to_frame().unstack( + level=-1) mv_generation.columns = ['Gen. Cap. of MV at v_level ' + str(_[1]) if isinstance(_, tuple) else _ for _ in mv_generation.columns] @@ -870,21 +948,31 @@ def calculate_mvgd_stats(nw): # Aggregated data of other nodes if not other_nodes_df.empty: # print(other_nodes_df['CD_count'].to_frame()) - mvgd_stats['N° of Cable Distr'] = other_nodes_df['CD_count'].to_frame().astype(int) - mvgd_stats['N° of LV Stations'] = other_nodes_df['LV_count'].to_frame().astype(int) - mvgd_stats['N° of Circuit Breakers'] = other_nodes_df['CB_count'].to_frame().astype(int) + mvgd_stats['N° of Cable Distr'] = other_nodes_df[ + 'CD_count'].to_frame().astype(int) + mvgd_stats['N° of LV Stations'] = other_nodes_df[ + 'LV_count'].to_frame().astype(int) + mvgd_stats['N° of Circuit Breakers'] = other_nodes_df[ + 'CB_count'].to_frame().astype(int) mvgd_stats['District Area'] = other_nodes_df['Dist_area'].to_frame() - mvgd_stats['N° of MV/LV Trafos'] = other_nodes_df['MVLV_trafo_count'].to_frame().astype(int) - mvgd_stats['Trafos MV/LV Acc s_max_a'] = other_nodes_df['MVLV_trafo_cap'].to_frame() - mvgd_stats['Length of MV max path'] = other_nodes_df['max_mv_path'].to_frame() - mvgd_stats['Length of MVLV max path'] = other_nodes_df['max_mvlv_path'].to_frame() + mvgd_stats['N° of MV/LV Trafos'] = other_nodes_df[ + 'MVLV_trafo_count'].to_frame().astype(int) + mvgd_stats['Trafos MV/LV Acc s_max_a'] = other_nodes_df[ + 'MVLV_trafo_cap'].to_frame() + mvgd_stats['Length of MV max path'] = other_nodes_df[ + 'max_mv_path'].to_frame() + mvgd_stats['Length of MVLV max path'] = other_nodes_df[ + 'max_mvlv_path'].to_frame() mvgd_stats['Impedance Z of path to terminal node (mean value)'] = \ other_nodes_df['mean_impedance'].to_frame() - mvgd_stats['I_max of first segment of path from MV station to terminal node (mean value)'] = \ + mvgd_stats[ + 'I_max of first segment of path from MV station to terminal node (mean value)'] = \ other_nodes_df['mean_thermal_limit'].to_frame() - mvgd_stats['I_max of first segment of path from LV station to terminal node (mean value)'] = \ + mvgd_stats[ + 'I_max of first segment of path from LV station to terminal node (mean value)'] = \ other_nodes_df['mean_thermal_limit_LV'].to_frame() - mvgd_stats['Length of path from MV station to terminal node (mean value)'] = \ + mvgd_stats[ + 'Length of path from MV station to terminal node (mean value)'] = \ other_nodes_df['mean_path_length'].to_frame() mvgd_stats['Number of lines and cables going out from LV stations'] = \ other_nodes_df['number_outgoing_LV'].to_frame() @@ -895,7 +983,8 @@ def calculate_mvgd_stats(nw): # Aggregated data of MV Branches if not branches_df.empty: # km of underground cable - branches_data = branches_df[branches_df['type_kind'] == 'cable'].groupby( + branches_data = \ + branches_df[branches_df['type_kind'] == 'cable'].groupby( ['grid_id'])['length'].sum().to_frame() branches_data.columns = ['Length of MV underground cables'] mvgd_stats = pd.concat([mvgd_stats, branches_data], axis=1) @@ -908,9 +997,11 @@ def calculate_mvgd_stats(nw): # km of different wire types branches_data = branches_df.groupby( - ['grid_id', 'type_name'])['length'].sum().to_frame().unstack(level=-1) - branches_data.columns = ['Length of MV type ' + _[1] if isinstance(_, tuple) else _ - for _ in branches_data.columns] + ['grid_id', 'type_name'])['length'].sum().to_frame().unstack( + level=-1) + branches_data.columns = [ + 'Length of MV type ' + _[1] if isinstance(_, tuple) else _ + for _ in branches_data.columns] mvgd_stats = pd.concat([mvgd_stats, branches_data], axis=1) # branches not in ring @@ -927,26 +1018,31 @@ def calculate_mvgd_stats(nw): # Aggregated data of LV Branches if not lv_branches_df.empty: # km of underground cable - lv_branches_data = lv_branches_df[lv_branches_df['type_kind'] == 'cable'].groupby( + lv_branches_data = \ + lv_branches_df[lv_branches_df['type_kind'] == 'cable'].groupby( ['grid_id'])['length'].sum().to_frame() lv_branches_data.columns = ['Length of LV underground cables'] mvgd_stats = pd.concat([mvgd_stats, lv_branches_data], axis=1) # km of overhead lines - lv_branches_data = lv_branches_df[lv_branches_df['type_kind'] == 'line'].groupby( + lv_branches_data = \ + lv_branches_df[lv_branches_df['type_kind'] == 'line'].groupby( ['grid_id'])['length'].sum().to_frame() lv_branches_data.columns = ['Length of LV overhead lines'] mvgd_stats = pd.concat([mvgd_stats, lv_branches_data], axis=1) # km of different wire types lv_branches_data = lv_branches_df.groupby( - ['grid_id', 'type_name'])['length'].sum().to_frame().unstack(level=-1) - lv_branches_data.columns = ['Length of LV type ' + _[1] if isinstance(_, tuple) else _ - for _ in lv_branches_data.columns] + ['grid_id', 'type_name'])['length'].sum().to_frame().unstack( + level=-1) + lv_branches_data.columns = [ + 'Length of LV type ' + _[1] if isinstance(_, tuple) else _ + for _ in lv_branches_data.columns] mvgd_stats = pd.concat([mvgd_stats, lv_branches_data], axis=1) # n° of branches - total_lv_br = lv_branches_df.groupby(['grid_id'])['length'].count().to_frame() + total_lv_br = lv_branches_df.groupby(['grid_id'])[ + 'length'].count().to_frame() total_lv_br.columns = ['N° of LV branches'] mvgd_stats = pd.concat([mvgd_stats, total_lv_br], axis=1) @@ -965,7 +1061,8 @@ def calculate_mvgd_stats(nw): ring_data = ring_df.groupby(['grid_id'])['ring_length'].max().to_frame() ring_data.columns = ['Length of MV Ring max'] mvgd_stats = pd.concat([mvgd_stats, ring_data], axis=1) - ring_data = ring_df.groupby(['grid_id'])['ring_length'].mean().to_frame() + ring_data = ring_df.groupby(['grid_id'])[ + 'ring_length'].mean().to_frame() ring_data.columns = ['Length of MV Ring mean'] mvgd_stats = pd.concat([mvgd_stats, ring_data], axis=1) @@ -975,14 +1072,16 @@ def calculate_mvgd_stats(nw): mvgd_stats = pd.concat([mvgd_stats, ring_data], axis=1) # km of non-ring - non_ring_data = branches_df.groupby(['grid_id'])['length'].sum().to_frame() + non_ring_data = branches_df.groupby(['grid_id'])[ + 'length'].sum().to_frame() non_ring_data.columns = ['Length of MV Rings total'] ring_data = non_ring_data - ring_data ring_data.columns = ['Length of MV Non-Rings total'] mvgd_stats = pd.concat([mvgd_stats, ring_data.round(1).abs()], axis=1) # rings generation capacity - ring_data = ring_df.groupby(['grid_id'])['ring_capacity'].sum().to_frame() + ring_data = ring_df.groupby(['grid_id'])[ + 'ring_capacity'].sum().to_frame() ring_data.columns = ['Gen. Cap. Connected to MV Rings'] mvgd_stats = pd.concat([mvgd_stats, ring_data], axis=1) ################################### @@ -1032,7 +1131,8 @@ def calculate_mvgd_stats(nw): 'lv_generation', 'total_peak_load'].sum() agg_LA_data.columns = ['LA Aggregated Population', - 'LA Aggregated LV Gen. Cap.', 'LA Aggregated LV Peak Load total' + 'LA Aggregated LV Gen. Cap.', + 'LA Aggregated LV Peak Load total' ] mvgd_stats = pd.concat([mvgd_stats, agg_LA_data], axis=1) @@ -1154,7 +1254,8 @@ def calculate_lvgd_voltage_current_stats(nw): if not LA.is_aggregated: for lv_district in LA.lv_grid_districts(): # nodes voltage - crit_nodes = get_critical_voltage_at_nodes(lv_district.lv_grid) + crit_nodes = get_critical_voltage_at_nodes( + lv_district.lv_grid) for node in crit_nodes: nodes_idx += 1 nodes_dict[nodes_idx] = { @@ -1169,7 +1270,8 @@ def calculate_lvgd_voltage_current_stats(nw): 'V nominal': lv_district.lv_grid.v_level, } # branches currents - critical_branches, critical_stations = get_critical_line_loading(lv_district.lv_grid) + critical_branches, critical_stations = get_critical_line_loading( + lv_district.lv_grid) for branch in critical_branches: branches_idx += 1 branches_dict[branches_idx] = { @@ -1321,7 +1423,8 @@ def process_stats(mv_districts, calc_mv = True calc_lv = True ####################################################################### - clusters = [mv_districts[x:x + n_of_districts] for x in range(0, len(mv_districts), n_of_districts)] + clusters = [mv_districts[x:x + n_of_districts] for x in + range(0, len(mv_districts), n_of_districts)] mv_stats = [] lv_stats = [] @@ -1377,7 +1480,8 @@ def process_stats(mv_districts, lv_crit_nodes.append(stats[0]) lv_crit_edges.append(stats[1]) ####################################################################### - salida = (mv_stats, lv_stats, mv_crit_nodes, mv_crit_edges, lv_crit_nodes, lv_crit_edges) + salida = (mv_stats, lv_stats, mv_crit_nodes, mv_crit_edges, lv_crit_nodes, + lv_crit_edges) output.put(salida) @@ -1465,7 +1569,8 @@ def parallel_running_stats(districts_list, processes = [] for districts in threats: - args = (districts, n_of_districts, source, mode, critical, nw_name, output_stats) + args = (districts, n_of_districts, source, mode, critical, nw_name, + output_stats) processes.append(mp.Process(target=process_stats, args=args)) ####################################################################### # Run processes @@ -2098,8 +2203,10 @@ def aggregate_loads(la_center, aggr): ####################################################### def export_data_tocsv(path, run_id, metadata_json, - lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, - mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, + lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, + lv_loads, + mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, + mv_loads, lines, mvlv_mapping, csv_sep=','): # make directory with run_id if it doesn't exist os.makedirs(path, exist_ok=True) @@ -2166,7 +2273,8 @@ def export_network_to_oedb(session, table, tabletype, srid): run_id=row['run_id'], id_db=row['id_db'], lv_grid_id_db=row['LV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, )) , axis=1) @@ -2177,7 +2285,8 @@ def export_network_to_oedb(session, table, tabletype, srid): id_db=row['id_db'], la_id=row['la_id'], lv_grid_id_db=str(row['LV_grid_id_db']), - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, type=row['type'], subtype=row['subtype'], v_level=row['v_level'], @@ -2194,7 +2303,8 @@ def export_network_to_oedb(session, table, tabletype, srid): run_id=row['run_id'], id_db=row['id_db'], lv_grid_id_db=row['LV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, consumption=row['consumption'] )) , axis=1) @@ -2205,7 +2315,8 @@ def export_network_to_oedb(session, table, tabletype, srid): run_id=row['run_id'], id_db=row['id_db'], lv_grid_id=row['LV_grid_id'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, population=row['population'], voltage_nom=row['voltage_nom'], )) @@ -2217,7 +2328,8 @@ def export_network_to_oedb(session, table, tabletype, srid): run_id=row['run_id'], id_db=row['id_db'], lv_grid_id_db=row['LV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, )) , axis=1) @@ -2227,7 +2339,8 @@ def export_network_to_oedb(session, table, tabletype, srid): run_id=row['run_id'], id_db=row['id_db'], lv_grid_id_db=row['LV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, voltage_op=row['voltage_op'], S_nom=row['S_nom'], X=row['X'], @@ -2252,7 +2365,8 @@ def export_network_to_oedb(session, table, tabletype, srid): run_id=row['run_id'], id_db=row['id_db'], mv_grid_id_db=row['MV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, )) , axis=1) @@ -2262,7 +2376,8 @@ def export_network_to_oedb(session, table, tabletype, srid): run_id=row['run_id'], id_db=row['id_db'], mv_grid_id_db=row['MV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, status=row['status'], )) , axis=1) @@ -2273,7 +2388,8 @@ def export_network_to_oedb(session, table, tabletype, srid): run_id=row['run_id'], id_db=row['id_db'], mv_grid_id_db=row['MV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, type=row['type'], subtype=row['subtype'], v_level=row['v_level'], @@ -2289,7 +2405,8 @@ def export_network_to_oedb(session, table, tabletype, srid): run_id=row['run_id'], id_db=row['id_db'], mv_grid_id_db=row['MV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, is_aggregated=row['is_aggregated'], consumption=row['consumption'], )) @@ -2301,7 +2418,8 @@ def export_network_to_oedb(session, table, tabletype, srid): run_id=row['run_id'], id_db=row['id_db'], mv_grid_id=row['MV_grid_id'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, population=row['population'], voltage_nom=row['voltage_nom'], )) @@ -2313,7 +2431,8 @@ def export_network_to_oedb(session, table, tabletype, srid): run_id=row['run_id'], id_db=row['id_db'], mv_grid_id_db=row['MV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, )) , axis=1) @@ -2323,7 +2442,8 @@ def export_network_to_oedb(session, table, tabletype, srid): run_id=row['run_id'], id_db=row['id_db'], mv_grid_id_db=row['MV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, voltage_op=row['voltage_op'], S_nom=row['S_nom'], X=row['X'], @@ -2337,8 +2457,10 @@ def export_network_to_oedb(session, table, tabletype, srid): def export_data_to_oedb(session, run_id, metadata_json, srid, - lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, - mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, lines, mvlv_mapping): + lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, + lv_loads, + mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, + mv_loads, lines, mvlv_mapping): # only for testing # engine = create_engine('sqlite:///:memory:') @@ -2434,7 +2556,8 @@ def drop_ding0_db_tables(engine): "- yes\n" + "- no\n" + "- the indexes to drop in the format 0, 2, 3, 5") - confirmation = input("Please type the choice completely as there is no default choice.") + confirmation = input( + "Please type the choice completely as there is no default choice.") if re.fullmatch('[Yy]es', confirmation): for tab in tables: tab().__table__.drop(bind=engine, checkfirst=True) @@ -2463,7 +2586,6 @@ def drop_ding0_db_tables(engine): def grant_access_ding0_db_tables(engine): - tables = [md.EgoGridDing0Line, md.EgoGridDing0LvBranchtee, md.EgoGridDing0LvGenerator, @@ -2495,12 +2617,11 @@ def grant_db_access(engine, table, role): tablename = table.__table__.name schema = table.__table__.schema - grant_str = """BEGIN; GRANT ALL PRIVILEGES ON TABLE {schema}.{table} TO {role} WITH GRANT OPTION; COMMIT;""".format(schema=schema, table=tablename, - role=role) + role=role) engine.execute(grant_str) @@ -2509,6 +2630,7 @@ def grant_db_access(engine, table, role): for tab in tables: grant_db_access(engine, tab, 'oeuser') + ######################################################## if __name__ == "__main__": # nw = init_mv_grid(mv_grid_districts=[3544, 3545]) From 154782028851ed4aa132a79c98ebe311011ba67a Mon Sep 17 00:00:00 2001 From: gplessm Date: Thu, 12 Jul 2018 21:38:01 +0200 Subject: [PATCH 019/215] Make usage of ids and names consistent --- ding0/tools/results.py | 338 +++++++++++++++++++++++------------------ 1 file changed, 194 insertions(+), 144 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index e6028553..5b5bd476 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1734,7 +1734,7 @@ def export_network(nw, mode=''): mvcb_dict = {} mvcd_idx = 0 mv_cd_dict = {} - mvstations_idx = 0 + # mvstations_idx = 0 mv_stations_dict = {} mvtrafos_idx = 0 hvmv_trafos_dict = {} @@ -1801,8 +1801,9 @@ def aggregate_loads(la_center, aggr): for mv_district in nw.mv_grid_districts(): from shapely.wkt import dumps as wkt_dumps mv_grid_id = mv_district.mv_grid.id_db - mv_grid_id_db = '_'.join( - [str(mv_district.mv_grid.__class__.__name__), 'MV', str(mv_grid_id), str(mv_district.mv_grid.id_db)]) + mv_grid_name = '_'.join( + [str(mv_district.mv_grid.__class__.__name__), 'MV', str(mv_grid_id), + str(mv_district.mv_grid.id_db)]) if mv_info: lv_grid_id = 0 @@ -1811,36 +1812,61 @@ def aggregate_loads(la_center, aggr): # ToDo: geom <- Polygon mvgrid_idx += 1 mv_grid_dict[mvgrid_idx] = { - 'MV_grid_id': mv_district.mv_grid.id_db, - 'id_db': '_'.join([str(mv_district.mv_grid.__class__.__name__), 'MV', str(mv_grid_id), - str(mv_district.mv_grid.id_db)]), - # 'network': mv_district.mv_grid.network, + 'id': mv_grid_id, + 'name': mv_grid_name, 'geom': wkt_dumps(mv_district.geo_data), - 'population': # None, + 'population': sum([_.zensus_sum for _ in - mv_district._lv_load_areas # ding0_grid.grid_district._lv_load_areas + mv_district._lv_load_areas if not np.isnan(_.zensus_sum)]), 'voltage_nom': mv_district.mv_grid.v_level, # in kV 'run_id': run_id } - # id_db: Classname_MV/LV_mvgridid/lvgridid_id - # excemptions: class LVStations: LVStationDing0_MV_mvgridid_id(=lvgridid) + # MV station + mv_station = mv_district.mv_grid._station + mv_station_name = '_'.join( + ['MVStationDing0', 'MV', str(mv_station.id_db), + str(mv_station.id_db)]) + mv_stations_dict[0] = { + 'id': mv_district.mv_grid.id_db, + 'name': mv_station_name, + 'geom': mv_station.geo_data, + 'run_id': run_id} + + # Trafos MV + for t in mv_station.transformers(): + mvtrafos_idx += 1 + hvmv_trafos_dict[mvtrafos_idx] = { + 'id': mv_station.id_db, + 'geom': mv_station.geo_data, + 'name': '_'.join( + ['MVTransformerDing0', 'MV', str(mv_station.id_db), + str(mv_station.id_db)]), + 'voltage_op': t.v_level, + 'S_nom': t.s_max_a, + 'X': t.x, + 'R': t.r, + 'run_id': run_id, + } - # MVGrid + # MV grid components for node in mv_district.mv_grid.graph_nodes_sorted(): geom = wkt_dumps(node.geo_data) - # geom = from_shape(Point(node.geo_data), srid=srid) - db_id = node.id_db # LVStation if isinstance(node, LVStationDing0): if not node.lv_load_area.is_aggregated: lvstations_idx += 1 + lv_grid_name = '_'.join( + ['LVGridDing0', 'LV', str(node.id_db), + str(node.id_db)]) lv_stations_dict[lvstations_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), - 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), + 'id': node.id_db, + 'name': '_'.join([node.__class__.__name__, + 'MV', str(mv_grid_id), + str(node.id_db)]), 'geom': geom, 'run_id': run_id, } @@ -1848,10 +1874,10 @@ def aggregate_loads(la_center, aggr): # LV-MV mapping LVMVmapping_idx += 1 mvlv_mapping_dict[LVMVmapping_idx] = { - 'MV_grid_id': mv_grid_id, - 'MV_grid_id_db': mv_grid_id_db, - 'LV_grid_id': node.id_db, - 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), + 'mv_grid_id': mv_grid_id, + 'mv_grid_name': mv_grid_name, + 'lv_grid_id': node.id_db, + 'lv_grid_name': lv_grid_name, 'run_id': run_id, } @@ -1859,9 +1885,11 @@ def aggregate_loads(la_center, aggr): for t in node.transformers(): lvtrafos_idx += 1 mvlv_trafos_dict[lvtrafos_idx] = { - 'id_db': '_'.join([str(t.__class__.__name__), 'LV', str(mv_grid_id), str(node.id_db)]), + 'id': node.id_db, 'geom': geom, - 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), + 'name': '_'.join(['LVTransformerDing0', 'LV', + str(node.id_db), + str(node.id_db)]), 'voltage_op': t.v_level, 'S_nom': t.s_max_a, 'X': t.x, @@ -1869,32 +1897,9 @@ def aggregate_loads(la_center, aggr): 'run_id': run_id, } - # MVStation - elif isinstance(node, MVStationDing0): - mvstations_idx += 1 - mv_stations_dict[mvstations_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), - 'MV_grid_id_db': mv_grid_id_db, - 'geom': geom, - 'run_id': run_id, - } - - # Trafos MV - for t in node.transformers(): - mvtrafos_idx += 1 - hvmv_trafos_dict[mvtrafos_idx] = { - 'id_db': '_'.join([str(t.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), - 'geom': geom, - 'MV_grid_id_db': mv_grid_id_db, - 'voltage_op': t.v_level, - 'S_nom': t.s_max_a, - 'X': t.x, - 'R': t.r, - 'run_id': run_id, - } - # MVGenerator - elif (isinstance(node, GeneratorDing0) or isinstance(node, GeneratorFluctuatingDing0)): + elif (isinstance(node, GeneratorDing0) or isinstance(node, + GeneratorFluctuatingDing0)): if node.subtype == None: subtype = 'other' else: @@ -1902,9 +1907,11 @@ def aggregate_loads(la_center, aggr): if isinstance(node, GeneratorFluctuatingDing0): type = node.type mvgen_idx += 1 - mv_gen_dict[mvgen_idx] ={ - 'id_db': node.id_db, - 'MV_grid_id_db': mv_grid_id_db, + mv_gen_dict[mvgen_idx] = { + 'id': node.id_db, + 'name': '_'.join(['GeneratorFluctuatingDing0', 'MV', + str(mv_grid_id), + str(node.id_db)]), 'geom': geom, 'type': type, 'subtype': subtype, @@ -1918,8 +1925,10 @@ def aggregate_loads(la_center, aggr): type = node.type mvgen_idx += 1 mv_gen_dict[mvgen_idx] = { - 'id_db': node.id_db, - 'MV_grid_id_db': mv_grid_id_db, + 'id': node.id_db, + 'name': '_'.join( + ['GeneratorDing0', 'MV', str(mv_grid_id), + str(node.id_db)]), 'geom': geom, 'type': type, 'subtype': subtype, @@ -1932,8 +1941,10 @@ def aggregate_loads(la_center, aggr): elif isinstance(node, MVCableDistributorDing0): mvcd_idx += 1 mv_cd_dict[mvcd_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), - 'MV_grid_id_db': mv_grid_id_db, + 'id': node.id_db, + 'name': '_'.join( + [str(node.__class__.__name__), 'MV', + str(mv_grid_id), str(node.id_db)]), 'geom': geom, 'run_id': run_id, } @@ -1969,50 +1980,52 @@ def aggregate_loads(la_center, aggr): elif aggr_node == 'load': for type in aggr['load']: mvloads_idx += 1 + mv_aggr_load_name = '_'.join( + ['Load_aggregated', str(type), + repr(mv_district.mv_grid), + str(node.lv_load_area.id_db)]) mv_loads_dict[mvloads_idx] = { - 'id_db': '_'.join( - ['AggregatedLoad', 'MV', str(mv_grid_id), str(mvloads_idx)]), - 'MV_grid_id_db': mv_grid_id_db, + # Exception: aggregated loads get a string as id + 'id': mv_aggr_load_name, + 'name': mv_aggr_load_name, 'geom': geom, - 'consumption': json.dumps({type: aggr['load'][type]['nominal']}), + 'consumption': json.dumps( + {type: aggr['load'][type]['nominal']}), 'is_aggregated': True, 'run_id': run_id, } lines_idx += 1 aggr_lines += 1 + edge_name = '_'.join( + ['line_aggr_load_la_', + str(node.lv_load_area.id_db), str(type), + str(node.lv_load_area.id_db)]) lines_dict[lines_idx] = { - # ToDo: Rename edge_name - 'edge_name': '_'.join( - [str(mv_grid_id), 'aggr', str(node.lv_load_area.id_db), str(aggr_lines)]), - # 'edge_name': '_'.join( - # ['line_aggr_load', str(node.lv_load_area), 'vlevel', str(v_level), - # 'subtype', str(subtype)]), # }'.format(v_level=v_level, subtype=subtype), - 'grid_id_db': mv_grid_id_db, - # ToDo: read type_name from aggr_line_type - 'type_name': 'NA2XS2Y 3x1x500 RM/35', # aggr_line_type.name, - 'type_kind': 'cable', # branch['branch'].kind, - # 'type': aggr_line_type, + 'id': edge_name, + 'edge_name': edge_name, + 'grid_name': mv_grid_name, + 'type_name': aggr_line_type.name, + 'type_kind': 'cable', 'length': 1e-3, # in km 'U_n': aggr_line_type.U_n, 'I_max_th': aggr_line_type.I_max_th, 'R': aggr_line_type.R, 'L': aggr_line_type.L, 'C': aggr_line_type.C, - 'node1': '_'.join( - ['AggregatedLoad', 'MV', str(mv_grid_id), str(mvloads_idx)]), - 'node2': '_'.join([ - 'MVStationDing0', 'MV', str(mv_grid_id), str(mv_grid_id)]), + 'node1': mv_aggr_load_name, + 'node2': mv_station_name, 'run_id': run_id, } + # TODO: eventually remove export of DisconnectingPoints from export # DisconnectingPoints elif isinstance(node, CircuitBreakerDing0): mvcb_idx += 1 mvcb_dict[mvcb_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), - 'MV_grid_id': mv_grid_id, - 'MV_grid_id_db': mv_grid_id_db, + 'id': node.id_db, + 'name': '_'.join([str(node.__class__.__name__), 'MV', + str(mv_grid_id), str(node.id_db)]), 'geom': geom, 'status': node.status, 'run_id': run_id, @@ -2023,14 +2036,20 @@ def aggregate_loads(la_center, aggr): # MVedges for branch in mv_district.mv_grid.graph_edges(): # geom = wkt_dumps(node.geo_data) - geom = from_shape(LineString([branch['adj_nodes'][0].geo_data, branch['adj_nodes'][1].geo_data]), + geom = from_shape(LineString([branch['adj_nodes'][0].geo_data, + branch['adj_nodes'][1].geo_data]), srid=srid) - if not any([isinstance(branch['adj_nodes'][0], LVLoadAreaCentreDing0), - isinstance(branch['adj_nodes'][1], LVLoadAreaCentreDing0)]): + if not any([isinstance(branch['adj_nodes'][0], + LVLoadAreaCentreDing0), + isinstance(branch['adj_nodes'][1], + LVLoadAreaCentreDing0)]): lines_idx += 1 lines_dict[lines_idx] = { - 'edge_name': branch['branch'].id_db, - 'grid_id_db': mv_grid_id_db, + 'id': branch['branch'].id_db, + 'edge_name': '_'.join( + [branch['branch'].__class__.__name__, + str(branch['branch'].id_db)]), + 'grid_name': mv_grid_name, 'type_name': branch['branch'].type['name'], 'type_kind': branch['branch'].kind, 'length': branch['branch'].length / 1e3, @@ -2039,10 +2058,14 @@ def aggregate_loads(la_center, aggr): 'R': branch['branch'].type['R'], 'L': branch['branch'].type['L'], 'C': branch['branch'].type['C'], - 'node1': '_'.join([str(branch['adj_nodes'][0].__class__.__name__), 'MV', str(mv_grid_id), - str(branch['adj_nodes'][0].id_db)]), - 'node2': '_'.join([str(branch['adj_nodes'][1].__class__.__name__), 'MV', str(mv_grid_id), - str(branch['adj_nodes'][1].id_db)]), + 'node1': '_'.join( + [str(branch['adj_nodes'][0].__class__.__name__), + 'MV', str(mv_grid_id), + str(branch['adj_nodes'][0].id_db)]), + 'node2': '_'.join( + [str(branch['adj_nodes'][1].__class__.__name__), + 'MV', str(mv_grid_id), + str(branch['adj_nodes'][1].id_db)]), 'run_id': run_id, } @@ -2051,30 +2074,30 @@ def aggregate_loads(la_center, aggr): for lv_district in LA.lv_grid_districts(): if not lv_district.lv_grid.grid_district.lv_load_area.is_aggregated: + lv_grid_id = lv_district.lv_grid.id_db + lv_grid_name = '_'.join( + [str(lv_district.lv_grid.__class__.__name__), 'LV', + str(lv_district.lv_grid.id_db), + str(lv_district.lv_grid.id_db)]) + lvgrid_idx += 1 lv_grid_dict[lvgrid_idx] = { - 'LV_grid_id': lv_district.lv_grid.id_db, - 'id_db': '_'.join( - [str(lv_district.lv_grid.__class__.__name__), 'LV', str(lv_district.lv_grid.id_db), - str(lv_district.lv_grid.id_db)]), + 'id': lv_district.lv_grid.id_db, + 'name': lv_grid_name, 'geom': wkt_dumps(lv_district.geo_data), 'population': lv_district.population, 'voltage_nom': lv_district.lv_grid.v_level / 1e3, 'run_id': run_id } - lv_grid_id = lv_district.lv_grid.id_db - lv_grid_id_db = '_'.join( - [str(lv_district.lv_grid.__class__.__name__), 'LV', str(lv_district.lv_grid.id_db), - str(lv_district.lv_grid.id_db)]) - # geom = from_shape(Point(lv_district.lv_grid.station().geo_data), srid=srid) # geom = wkt_dumps(lv_district.geo_data)# lv_grid.station() #ding0_lv_grid.grid_district.geo_data for node in lv_district.lv_grid.graph_nodes_sorted(): # geom = wkt_dumps(node.geo_data) # LVGenerator - if (isinstance(node, GeneratorDing0) or isinstance(node, GeneratorFluctuatingDing0)): + if (isinstance(node, GeneratorDing0) or isinstance(node, + GeneratorFluctuatingDing0)): if node.subtype == None: subtype = 'other' else: @@ -2083,9 +2106,13 @@ def aggregate_loads(la_center, aggr): type = node.type lvgen_idx += 1 lv_gen_dict[lvgen_idx] = { - 'id_db': node.id_db, + 'id': node.id_db, 'la_id': LA.id_db, - 'LV_grid_id_db': lv_grid_id_db, + 'name': '_'.join( + ['GeneratorFluctuatingDing0', 'LV', + str(lv_grid_id), + str(node.id_db)]), + 'lv_grid_id': lv_grid_id, 'geom': wkt_dumps(node.geo_data), 'type': type, 'subtype': subtype, @@ -2099,9 +2126,13 @@ def aggregate_loads(la_center, aggr): type = node.type lvgen_idx += 1 lv_gen_dict[lvgen_idx] = { - 'id_db': node.id_db, + 'id': node.id_db, + 'name': '_'.join( + ['GeneratorDing0', 'LV', + str(lv_grid_id), + str(node.id_db)]), 'la_id': LA.id_db, - 'LV_grid_id_db': lv_grid_id_db, + 'lv_grid_id': lv_grid_id, 'geom': wkt_dumps(node.geo_data), 'type': type, 'subtype': subtype, @@ -2116,9 +2147,11 @@ def aggregate_loads(la_center, aggr): if not node.grid.grid_district.lv_load_area.is_aggregated: lvcd_idx += 1 lv_cd_dict[lvcd_idx] = { - 'id_db': '_'.join( - [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), - 'LV_grid_id_db': lv_grid_id_db, + 'name': '_'.join( + [str(node.__class__.__name__), 'LV', + str(lv_grid_id), str(node.id_db)]), + 'id': node.id_db, + 'lv_grid_id': lv_grid_id, 'geom': None, # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? 'run_id': run_id, @@ -2129,75 +2162,92 @@ def aggregate_loads(la_center, aggr): if not node.grid.grid_district.lv_load_area.is_aggregated: lvloads_idx += 1 lv_loads_dict[lvloads_idx] = { - 'id_db': '_'.join( - [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), - 'LV_grid_id_db': lv_grid_id_db, + 'id': node.id_db, + 'name': '_'.join( + [str(node.__class__.__name__), 'LV', + str(lv_grid_id), str(node.id_db)]), + 'lv_grid_id': lv_grid_id, 'geom': None, # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? 'consumption': json.dumps(node.consumption), 'run_id': run_id, } - else: - type = 'Unknown' - # LVedges for branch in lv_district.lv_grid.graph_edges(): if not branch['branch'].connects_aggregated: - # geom = from_shape( - # LineString([branch['adj_nodes'][0].geo_data, branch['adj_nodes'][1].geo_data]), srid=srid) - if not any([isinstance(branch['adj_nodes'][0], LVLoadAreaCentreDing0), - isinstance(branch['adj_nodes'][1], LVLoadAreaCentreDing0)]): + # geom = from_shape( + # LineString([branch['adj_nodes'][0].geo_data, branch['adj_nodes'][1].geo_data]), srid=srid) + if not any([isinstance(branch['adj_nodes'][0], + LVLoadAreaCentreDing0), + isinstance(branch['adj_nodes'][1], + LVLoadAreaCentreDing0)]): lines_idx += 1 lines_dict[lines_idx] = { - 'edge_name': branch['branch'].id_db, - 'grid_id_db': lv_grid_id_db, - 'type_name': branch['branch'].type.to_frame().columns[0], + 'id': branch['branch'].id_db, + 'edge_name': '_'.join( + [branch.__class__.__name__, + str(branch['branch'].id_db)]), + 'grid_name': lv_grid_name, + 'type_name': branch[ + 'branch'].type.to_frame().columns[0], 'type_kind': branch['branch'].kind, - 'length': branch['branch'].length / 1e3, # length in km - 'U_n': branch['branch'].type['U_n'] / 1e3, # U_n in kV - 'I_max_th': branch['branch'].type['I_max_th'], + 'length': branch['branch'].length / 1e3, + # length in km + 'U_n': branch['branch'].type['U_n'] / 1e3, + # U_n in kV + 'I_max_th': branch['branch'].type[ + 'I_max_th'], 'R': branch['branch'].type['R'], 'L': branch['branch'].type['L'], 'C': branch['branch'].type['C'], 'node1': '_'.join( - [str(branch['adj_nodes'][0].__class__.__name__), 'LV', str(lv_grid_id), + [str(branch['adj_nodes'][ + 0].__class__.__name__), 'LV', + str(lv_grid_id), str(branch['adj_nodes'][0].id_db)]) - if not isinstance(branch['adj_nodes'][0], LVStationDing0) else '_'.join( - [str(branch['adj_nodes'][0].__class__.__name__), 'MV', str(mv_grid_id), + if not isinstance(branch['adj_nodes'][0], + LVStationDing0) else '_'.join( + [str(branch['adj_nodes'][ + 0].__class__.__name__), 'MV', + str(mv_grid_id), str(branch['adj_nodes'][0].id_db)]), 'node2': '_'.join( - [str(branch['adj_nodes'][1].__class__.__name__), 'LV', str(lv_grid_id), + [str(branch['adj_nodes'][ + 1].__class__.__name__), 'LV', + str(lv_grid_id), str(branch['adj_nodes'][1].id_db)]) - if not isinstance(branch['adj_nodes'][1], LVStationDing0) else '_'.join( - [str(branch['adj_nodes'][1].__class__.__name__), 'MV', str(mv_grid_id), + if not isinstance(branch['adj_nodes'][1], + LVStationDing0) else '_'.join( + [str(branch['adj_nodes'][ + 1].__class__.__name__), 'MV', + str(mv_grid_id), str(branch['adj_nodes'][1].id_db)]), 'run_id': run_id, } - lv_grid = pd.DataFrame.from_dict(lv_grid_dict, orient='index') - lv_gen = pd.DataFrame.from_dict(lv_gen_dict, orient='index') - lv_cd = pd.DataFrame.from_dict(lv_cd_dict, orient='index') - lv_stations = pd.DataFrame.from_dict(lv_stations_dict, orient='index') - mvlv_trafos = pd.DataFrame.from_dict(mvlv_trafos_dict, orient='index') - lv_loads = pd.DataFrame.from_dict(lv_loads_dict, orient='index') - mv_grid = pd.DataFrame.from_dict(mv_grid_dict, orient='index') - mv_gen = pd.DataFrame.from_dict(mv_gen_dict, orient='index') - mv_cb = pd.DataFrame.from_dict(mvcb_dict, orient='index') - mv_cd = pd.DataFrame.from_dict(mv_cd_dict, orient='index') - mv_stations = pd.DataFrame.from_dict(mv_stations_dict, orient='index') - # mv_areacenter= pd.DataFrame.from_dict(areacenter_dict, orient='index') - hvmv_trafos = pd.DataFrame.from_dict(hvmv_trafos_dict, orient='index') - mv_loads = pd.DataFrame.from_dict(mv_loads_dict, orient='index') - lines = pd.DataFrame.from_dict(lines_dict, orient='index') - mvlv_mapping = pd.DataFrame.from_dict(mvlv_mapping_dict, orient='index') + lv_grid = pd.DataFrame.from_dict(lv_grid_dict, orient='index') + lv_gen = pd.DataFrame.from_dict(lv_gen_dict, orient='index') + lv_cd = pd.DataFrame.from_dict(lv_cd_dict, orient='index') + lv_stations = pd.DataFrame.from_dict(lv_stations_dict, orient='index') + mvlv_trafos = pd.DataFrame.from_dict(mvlv_trafos_dict, orient='index') + lv_loads = pd.DataFrame.from_dict(lv_loads_dict, orient='index') + mv_grid = pd.DataFrame.from_dict(mv_grid_dict, orient='index') + mv_gen = pd.DataFrame.from_dict(mv_gen_dict, orient='index') + mv_cb = pd.DataFrame.from_dict(mvcb_dict, orient='index') + mv_cd = pd.DataFrame.from_dict(mv_cd_dict, orient='index') + mv_stations = pd.DataFrame.from_dict(mv_stations_dict, orient='index') + hvmv_trafos = pd.DataFrame.from_dict(hvmv_trafos_dict, orient='index') + mv_loads = pd.DataFrame.from_dict(mv_loads_dict, orient='index') + lines = pd.DataFrame.from_dict(lines_dict, orient='index') + mvlv_mapping = pd.DataFrame.from_dict(mvlv_mapping_dict, orient='index') lines = lines[sorted(lines.columns.tolist())] - return run_id, metadata_json,\ - lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ - mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ - lines, mvlv_mapping # mv_areacenter, + return run_id, metadata_json, \ + lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ + mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ + lines, mvlv_mapping ####################################################### From 18c34d71cd863a142005c3b089cc38d09dc9cea0 Mon Sep 17 00:00:00 2001 From: gplessm Date: Thu, 12 Jul 2018 21:38:52 +0200 Subject: [PATCH 020/215] Adapt table descriptions in docs (only partially) --- doc/usage_details.rst | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index b8432462..7e3d788b 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -216,8 +216,8 @@ LV-Stations :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "id", "int","unambiguous id_db of LV-Grid", "n/a" + "lv_grid_name", "str", "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#'", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -227,8 +227,8 @@ LV-Transformers :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'TransformerDing0_LV_#mvgridid#_#lvgridid#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "id", "int","unambiguous id_db of LV-Grid", "n/a" + "lv_grid_name", "str", "unambiguous name: 'TransformerDing0_LV_#mvgridid#_#lvgridid#'", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "voltage_op","float","","kV" "S_nom","float","nominal apparent power","kVA" @@ -242,10 +242,10 @@ LV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "LV_grid_id", "int","unambiguous number of LV-Grid", "n/a" - "MV_grid_id", "int","unambiguous number of MV-Grid", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "lv_grid_id", "int","unambiguous number of LV-Grid", "n/a" + "mv_grid_id", "int","unambiguous number of MV-Grid", "n/a" + "lv_grid_name", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" + "mv_grid_name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" MV-Branchtees @@ -280,8 +280,8 @@ MV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "MV_grid_id", "int","unambiguous number of LV-Grid", "n/a" + "id", "int","unambiguous number of LV-Grid", "n/a" + "mv_grid_name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" "geom", "wkt","geometric coordinates", "WGS84 MULTIPOLYGON" "population","int","population in LV-Grid","?" "voltage_nom","float","voltage level of grid","kV" From 2869dacb54777cb1fd24efbfa8ee1a9f2e7dba87 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Tue, 17 Jul 2018 14:08:14 +0200 Subject: [PATCH 021/215] Bring the oedb export back to working order, added LineString Geometries to represent line connections on qgis --- ding0/tools/results.py | 103 +++++++++++++++++++++++------------------ 1 file changed, 57 insertions(+), 46 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 5b5bd476..84e87eb3 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -2016,6 +2016,9 @@ def aggregate_loads(la_center, aggr): 'node1': mv_aggr_load_name, 'node2': mv_station_name, 'run_id': run_id, + 'geom': from_shape(LineString([mv_station.geo_data, + mv_station.geo_data]), + srid=srid) } # TODO: eventually remove export of DisconnectingPoints from export @@ -2067,6 +2070,9 @@ def aggregate_loads(la_center, aggr): 'MV', str(mv_grid_id), str(branch['adj_nodes'][1].id_db)]), 'run_id': run_id, + 'geom': from_shape(LineString([branch['adj_nodes'][0].geo_data, + branch['adj_nodes'][1].geo_data]), + srid=srid), } if lv_info: @@ -2176,8 +2182,6 @@ def aggregate_loads(la_center, aggr): # LVedges for branch in lv_district.lv_grid.graph_edges(): if not branch['branch'].connects_aggregated: - # geom = from_shape( - # LineString([branch['adj_nodes'][0].geo_data, branch['adj_nodes'][1].geo_data]), srid=srid) if not any([isinstance(branch['adj_nodes'][0], LVLoadAreaCentreDing0), isinstance(branch['adj_nodes'][1], @@ -2224,6 +2228,7 @@ def aggregate_loads(la_center, aggr): str(mv_grid_id), str(branch['adj_nodes'][1].id_db)]), 'run_id': run_id, + 'geom': None } lv_grid = pd.DataFrame.from_dict(lv_grid_dict, orient='index') @@ -2302,18 +2307,20 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0Line( run_id=row['run_id'], + id_db=row['id'], edge_name=row['edge_name'], - grid_id_db=row['grid_id_db'], + grid_name=row['grid_name'], node1=row['node1'], node2=row['node2'], type_kind=row['type_kind'], type_name=row['type_name'], length=row['length'], - U_n=row['U_n'], - C=row['C'], - L=row['L'], - R=row['R'], - I_max_th=row['I_max_th'], + u_n=row['U_n'], + c=row['C'], + l=row['L'], + r=row['R'], + i_max_th=row['I_max_th'], + geom=row['geom'], )) , axis=1) @@ -2321,8 +2328,8 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvBranchtee( run_id=row['run_id'], - id_db=row['id_db'], - lv_grid_id_db=row['LV_grid_id_db'], + id_db=row['id'], + name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, )) @@ -2332,9 +2339,10 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvGenerator( run_id=row['run_id'], - id_db=row['id_db'], + id_db=row['id'], la_id=row['la_id'], - lv_grid_id_db=str(row['LV_grid_id_db']), + name=row['name'], + lv_grid_id=str(row['lv_grid_id']), geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, type=row['type'], @@ -2342,7 +2350,8 @@ def export_network_to_oedb(session, table, tabletype, srid): v_level=row['v_level'], nominal_capacity=row['nominal_capacity'], is_aggregated=row['is_aggregated'], - weather_cell_id=row['weather_cell_id'] + weather_cell_id=row['weather_cell_id'] if not(pd.isnull(row[ + 'weather_cell_id'])) else None, )) , axis=1) @@ -2351,8 +2360,9 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvLoad( run_id=row['run_id'], - id_db=row['id_db'], - lv_grid_id_db=row['LV_grid_id_db'], + id_db=row['id'], + name=row['name'], + lv_grid_id=row['lv_grid_id'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, consumption=row['consumption'] @@ -2363,8 +2373,8 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvGrid( run_id=row['run_id'], - id_db=row['id_db'], - lv_grid_id=row['LV_grid_id'], + id_db=row['id'], + name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, population=row['population'], @@ -2376,8 +2386,8 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvStation( run_id=row['run_id'], - id_db=row['id_db'], - lv_grid_id_db=row['LV_grid_id_db'], + id_db=row['id'], + name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, )) @@ -2387,14 +2397,14 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvlvTransformer( run_id=row['run_id'], - id_db=row['id_db'], - lv_grid_id_db=row['LV_grid_id_db'], + id_db=row['id'], + name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, voltage_op=row['voltage_op'], - S_nom=row['S_nom'], - X=row['X'], - R=row['R'], + s_nom=row['S_nom'], + x=row['X'], + r=row['R'], )) , axis=1) @@ -2402,10 +2412,10 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvlvMapping( run_id=row['run_id'], - lv_grid_id=row['LV_grid_id'], - lv_grid_id_db=row['LV_grid_id_db'], - mv_grid_id=row['MV_grid_id'], - mv_grid_id_db=row['MV_grid_id_db'], + lv_grid_id=row['lv_grid_id'], + lv_grid_name=row['lv_grid_name'], + mv_grid_id=row['mv_grid_id'], + mv_grid_name=row['mv_grid_name'], )) , axis=1) @@ -2413,8 +2423,8 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvBranchtee( run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id_db=row['MV_grid_id_db'], + id_db=row['id'], + name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, )) @@ -2424,8 +2434,8 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvCircuitbreaker( run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id_db=row['MV_grid_id_db'], + id_db=row['id'], + name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, status=row['status'], @@ -2436,8 +2446,8 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvGenerator( run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id_db=row['MV_grid_id_db'], + id_db=row['id'], + name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, type=row['type'], @@ -2445,7 +2455,8 @@ def export_network_to_oedb(session, table, tabletype, srid): v_level=row['v_level'], nominal_capacity=row['nominal_capacity'], is_aggregated=row['is_aggregated'], - weather_cell_id=row['weather_cell_id'] + weather_cell_id=row['weather_cell_id'] if not(pd.isnull(row[ + 'weather_cell_id'])) else None, )) , axis=1) @@ -2453,8 +2464,8 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvLoad( run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id_db=row['MV_grid_id_db'], + id_db=row['id'], + name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, is_aggregated=row['is_aggregated'], @@ -2466,8 +2477,8 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvGrid( run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id=row['MV_grid_id'], + id_db=row['id'], + name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, population=row['population'], @@ -2479,8 +2490,8 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvStation( run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id_db=row['MV_grid_id_db'], + id_db=row['id'], + name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, )) @@ -2490,14 +2501,14 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0HvmvTransformer( run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id_db=row['MV_grid_id_db'], + id_db=row['id'], + name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, voltage_op=row['voltage_op'], - S_nom=row['S_nom'], - X=row['X'], - R=row['R'], + s_nom=row['S_nom'], + x=row['X'], + r=row['R'], )) , axis=1) # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): From 5f913f22d54bce1c3267558d23d4b7fe72eee2f3 Mon Sep 17 00:00:00 2001 From: gplessm Date: Thu, 19 Jul 2018 10:50:47 +0200 Subject: [PATCH 022/215] Change name attribute for CSV/OEDB export Makes consistent names in eDisGo --- ding0/tools/results.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 84e87eb3..f8531789 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1998,7 +1998,7 @@ def aggregate_loads(la_center, aggr): lines_idx += 1 aggr_lines += 1 edge_name = '_'.join( - ['line_aggr_load_la_', + ['line_aggr_load_la', str(node.lv_load_area.id_db), str(type), str(node.lv_load_area.id_db)]) lines_dict[lines_idx] = { From f1d06bd73ed0edd4de54d17f6ef588172c2f2821 Mon Sep 17 00:00:00 2001 From: gplessm Date: Tue, 31 Jul 2018 14:13:22 +0200 Subject: [PATCH 023/215] Adapt ding0 csv/oedb exporter Final changes by gplssm --- ding0/tools/results.py | 36 ++++++++++++++++++++++-------------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index f8531789..9f9c8674 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1680,7 +1680,7 @@ def parallel_running_stats(districts_list, ######################################################## -def export_network(nw, mode=''): +def export_network(nw, mode='', run_id=None): """ Export all nodes and lines of the network nw as DataFrames @@ -1715,7 +1715,8 @@ def export_network(nw, mode=''): lv_info = False ############################## # from datetime import datetime - run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") + if not run_id: + run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") metadata_json = json.dumps(nw.metadata) ############################## ############################# @@ -1980,13 +1981,15 @@ def aggregate_loads(la_center, aggr): elif aggr_node == 'load': for type in aggr['load']: mvloads_idx += 1 + aggr_line_id = 100 * node.lv_load_area.id_db + mvloads_idx + 1 mv_aggr_load_name = '_'.join( ['Load_aggregated', str(type), repr(mv_district.mv_grid), - str(node.lv_load_area.id_db)]) + # str(node.lv_load_area.id_db)]) + str(aggr_line_id)]) mv_loads_dict[mvloads_idx] = { # Exception: aggregated loads get a string as id - 'id': mv_aggr_load_name, + 'id': aggr_line_id, #node.lv_load_area.id_db, #mv_aggr_load_name, 'name': mv_aggr_load_name, 'geom': geom, 'consumption': json.dumps( @@ -2000,9 +2003,10 @@ def aggregate_loads(la_center, aggr): edge_name = '_'.join( ['line_aggr_load_la', str(node.lv_load_area.id_db), str(type), - str(node.lv_load_area.id_db)]) + # str(node.lv_load_area.id_db)]) + str(aggr_line_id)]) lines_dict[lines_idx] = { - 'id': edge_name, + 'id': aggr_line_id, #node.lv_load_area.id_db, 'edge_name': edge_name, 'grid_name': mv_grid_name, 'type_name': aggr_line_type.name, @@ -2528,13 +2532,14 @@ def export_data_to_oedb(session, run_id, metadata_json, srid, # get the run_id from model_draft.ego_grid_ding0_versioning # compare the run_id from table to the current run_id - oedb_versioning_query = session.query( - md.EgoGridDing0Versioning.run_id, - md.EgoGridDing0Versioning.description - ).filter(md.EgoGridDing0Versioning.run_id == run_id) - - oedb_versioning = pd.read_sql_query(oedb_versioning_query.statement, - session.bind) + # oedb_versioning_query = session.query( + # md.EgoGridDing0Versioning.run_id, + # md.EgoGridDing0Versioning.description + # ).filter(md.EgoGridDing0Versioning.run_id == run_id) + # + # oedb_versioning = pd.read_sql_query(oedb_versioning_query.statement, + # session.bind) + oedb_versioning = pd.DataFrame() if oedb_versioning.empty: # if the run_id doesn't exist then @@ -2684,13 +2689,16 @@ def grant_db_access(engine, table, role): COMMIT;""".format(schema=schema, table=tablename, role=role) - engine.execute(grant_str) + # engine.execute(grant_str) + engine.execution_options(autocommit=True).execute(grant_str) # engine.echo=True for tab in tables: grant_db_access(engine, tab, 'oeuser') + engine.close() + ######################################################## if __name__ == "__main__": From 35bec461f6f2a9abfac4809af157604d244f928d Mon Sep 17 00:00:00 2001 From: Jonas Huber Date: Tue, 31 Jul 2018 15:00:29 +0200 Subject: [PATCH 024/215] #268 changed column names according to proposal --- ding0/tools/results.py | 50 +++++++++++++++++++++--------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index f8531789..0c2434ac 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -2307,7 +2307,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0Line( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], edge_name=row['edge_name'], grid_name=row['grid_name'], node1=row['node1'], @@ -2315,11 +2315,11 @@ def export_network_to_oedb(session, table, tabletype, srid): type_kind=row['type_kind'], type_name=row['type_name'], length=row['length'], - u_n=row['U_n'], - c=row['C'], - l=row['L'], - r=row['R'], - i_max_th=row['I_max_th'], + u_n=row['u_n'], + c=row['c'], + l=row['l'], + r=row['r'], + i_max_th=row['i_max_th'], geom=row['geom'], )) , axis=1) @@ -2328,7 +2328,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvBranchtee( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2339,7 +2339,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvGenerator( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], la_id=row['la_id'], name=row['name'], lv_grid_id=str(row['lv_grid_id']), @@ -2360,7 +2360,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvLoad( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], name=row['name'], lv_grid_id=row['lv_grid_id'], geom="SRID={};{}".format(srid, row['geom']) if row[ @@ -2373,7 +2373,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvGrid( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2386,7 +2386,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvStation( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2397,14 +2397,14 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvlvTransformer( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, voltage_op=row['voltage_op'], - s_nom=row['S_nom'], - x=row['X'], - r=row['R'], + s_nom=row['s_nom'], + x=row['x'], + r=row['r'], )) , axis=1) @@ -2423,7 +2423,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvBranchtee( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2434,7 +2434,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvCircuitbreaker( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2446,7 +2446,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvGenerator( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2464,7 +2464,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvLoad( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2477,7 +2477,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvGrid( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2490,7 +2490,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvStation( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2501,14 +2501,14 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0HvmvTransformer( run_id=row['run_id'], - id_db=row['id'], + id=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, voltage_op=row['voltage_op'], - s_nom=row['S_nom'], - x=row['X'], - r=row['R'], + s_nom=row['s_nom'], + x=row['x'], + r=row['r'], )) , axis=1) # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): From 13144a8f6d3d6c5c2b832939be595e5894efdc96 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 31 Jul 2018 16:46:26 +0200 Subject: [PATCH 025/215] #268 eDisGo - changed the description of the tables in the documentation --- doc/usage_details.rst | 58 +++++++++++++++++++++---------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 7e3d788b..5c6fb5d3 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -152,11 +152,11 @@ Lines "node1","str","id_db of first node","n/a" "node2","str","id_db of second node","n/a" "length","float","length of line","km" - "U_n","float","nominal voltage","kV" - "R","float","","Ohm/km" - "C","float","inductive resistance at 50Hz","uF/km" - "L","float","","mH/km" - "I_max_th","float","","A" + "u_n","float","nominal voltage","kV" + "r","float","","Ohm/km" + "c","float","inductive resistance at 50Hz","uF/km" + "l","float","","mH/km" + "i_max_th","float","","A" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" LV-Branchtees @@ -165,8 +165,8 @@ LV-Branchtees :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "id", "str", "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "None","geometric coordinates", "n/a" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -176,8 +176,8 @@ LV-Generators :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "id", "str", "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "type","str","type of generation","{solar; biomass}" "subtype","str","subtype of generation: {solar_roof_mounted, unknown; biomass}","n/a" @@ -191,8 +191,8 @@ LV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" - "LV_grid_id", "int","unambiguous number of LV-Grid", "n/a" + "id", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" + "lv_grid_id", "int","unambiguous number of LV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 MULTIPOLYGON" "population","int","population in LV-Grid","?" "voltage_nom","float","voltage level of grid","kV" @@ -204,8 +204,8 @@ LV-Loads :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "id", "str", "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "None","geometric coordinates", "n/a" "consumption","{''str'': float}","type of load {residential, agricultural, industrial} and corresponding consumption", "n/a" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -231,9 +231,9 @@ LV-Transformers "lv_grid_name", "str", "unambiguous name: 'TransformerDing0_LV_#mvgridid#_#lvgridid#'", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "voltage_op","float","","kV" - "S_nom","float","nominal apparent power","kVA" - "X","float","","Ohm" - "R","float","","Ohm" + "s_nom","float","nominal apparent power","kVA" + "x","float","","Ohm" + "r","float","","Ohm" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" LV-Grids @@ -254,8 +254,8 @@ MV-Branchtees :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "id", "str", "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -265,8 +265,8 @@ MV-Generators :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "id", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "type","str","type of generation: {solar; biomass}","n/a" "subtype","str","subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}","n/a" @@ -293,8 +293,8 @@ MV-Loads :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "id", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POLYGON" "consumption","{''str'': float}","type of load {retail, residential, agricultural, industrial} and corresponding consumption","n/a" "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" @@ -306,8 +306,8 @@ MV-Stations :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "id", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -317,11 +317,11 @@ MV-Transformers :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'TransformerDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "id", "str", "unambiguous name: 'TransformerDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "voltage_op","float","","kV" - "S_nom","float","nominal apparent power","kVA" - "X","float","","Ohm" - "R","float","","Ohm" + "s_nom","float","nominal apparent power","kVA" + "x","float","","Ohm" + "r","float","","Ohm" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" \ No newline at end of file From 567277b96b1357e7d6e118d2da773feb2da8e741 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 31 Jul 2018 16:59:59 +0200 Subject: [PATCH 026/215] Revert "#268 eDisGo - changed the description of the tables in the documentation" This reverts commit 13144a8 --- doc/usage_details.rst | 58 +++++++++++++++++++++---------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 5c6fb5d3..7e3d788b 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -152,11 +152,11 @@ Lines "node1","str","id_db of first node","n/a" "node2","str","id_db of second node","n/a" "length","float","length of line","km" - "u_n","float","nominal voltage","kV" - "r","float","","Ohm/km" - "c","float","inductive resistance at 50Hz","uF/km" - "l","float","","mH/km" - "i_max_th","float","","A" + "U_n","float","nominal voltage","kV" + "R","float","","Ohm/km" + "C","float","inductive resistance at 50Hz","uF/km" + "L","float","","mH/km" + "I_max_th","float","","A" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" LV-Branchtees @@ -165,8 +165,8 @@ LV-Branchtees :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id", "str", "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "id_db", "str", "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "None","geometric coordinates", "n/a" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -176,8 +176,8 @@ LV-Generators :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id", "str", "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "id_db", "str", "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "type","str","type of generation","{solar; biomass}" "subtype","str","subtype of generation: {solar_roof_mounted, unknown; biomass}","n/a" @@ -191,8 +191,8 @@ LV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" - "lv_grid_id", "int","unambiguous number of LV-Grid", "n/a" + "id_db", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" + "LV_grid_id", "int","unambiguous number of LV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 MULTIPOLYGON" "population","int","population in LV-Grid","?" "voltage_nom","float","voltage level of grid","kV" @@ -204,8 +204,8 @@ LV-Loads :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id", "str", "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "id_db", "str", "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "None","geometric coordinates", "n/a" "consumption","{''str'': float}","type of load {residential, agricultural, industrial} and corresponding consumption", "n/a" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -231,9 +231,9 @@ LV-Transformers "lv_grid_name", "str", "unambiguous name: 'TransformerDing0_LV_#mvgridid#_#lvgridid#'", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "voltage_op","float","","kV" - "s_nom","float","nominal apparent power","kVA" - "x","float","","Ohm" - "r","float","","Ohm" + "S_nom","float","nominal apparent power","kVA" + "X","float","","Ohm" + "R","float","","Ohm" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" LV-Grids @@ -254,8 +254,8 @@ MV-Branchtees :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id", "str", "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "id_db", "str", "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -265,8 +265,8 @@ MV-Generators :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "id_db", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "type","str","type of generation: {solar; biomass}","n/a" "subtype","str","subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}","n/a" @@ -293,8 +293,8 @@ MV-Loads :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "id_db", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POLYGON" "consumption","{''str'': float}","type of load {retail, residential, agricultural, industrial} and corresponding consumption","n/a" "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" @@ -306,8 +306,8 @@ MV-Stations :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "id_db", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -317,11 +317,11 @@ MV-Transformers :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id", "str", "unambiguous name: 'TransformerDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "mv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "id_db", "str", "unambiguous name: 'TransformerDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "MV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "voltage_op","float","","kV" - "s_nom","float","nominal apparent power","kVA" - "x","float","","Ohm" - "r","float","","Ohm" + "S_nom","float","nominal apparent power","kVA" + "X","float","","Ohm" + "R","float","","Ohm" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" \ No newline at end of file From 36562ea8b3f084c5d267fa5a36dd5e74b13a6221 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 31 Jul 2018 17:00:59 +0200 Subject: [PATCH 027/215] Revert "#268 changed column names according to proposal" This reverts commit 35bec46 --- ding0/tools/results.py | 50 +++++++++++++++++++++--------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 0c2434ac..f8531789 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -2307,7 +2307,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0Line( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], edge_name=row['edge_name'], grid_name=row['grid_name'], node1=row['node1'], @@ -2315,11 +2315,11 @@ def export_network_to_oedb(session, table, tabletype, srid): type_kind=row['type_kind'], type_name=row['type_name'], length=row['length'], - u_n=row['u_n'], - c=row['c'], - l=row['l'], - r=row['r'], - i_max_th=row['i_max_th'], + u_n=row['U_n'], + c=row['C'], + l=row['L'], + r=row['R'], + i_max_th=row['I_max_th'], geom=row['geom'], )) , axis=1) @@ -2328,7 +2328,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvBranchtee( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2339,7 +2339,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvGenerator( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], la_id=row['la_id'], name=row['name'], lv_grid_id=str(row['lv_grid_id']), @@ -2360,7 +2360,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvLoad( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], name=row['name'], lv_grid_id=row['lv_grid_id'], geom="SRID={};{}".format(srid, row['geom']) if row[ @@ -2373,7 +2373,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvGrid( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2386,7 +2386,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0LvStation( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2397,14 +2397,14 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvlvTransformer( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, voltage_op=row['voltage_op'], - s_nom=row['s_nom'], - x=row['x'], - r=row['r'], + s_nom=row['S_nom'], + x=row['X'], + r=row['R'], )) , axis=1) @@ -2423,7 +2423,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvBranchtee( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2434,7 +2434,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvCircuitbreaker( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2446,7 +2446,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvGenerator( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2464,7 +2464,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvLoad( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2477,7 +2477,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvGrid( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2490,7 +2490,7 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0MvStation( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, @@ -2501,14 +2501,14 @@ def export_network_to_oedb(session, table, tabletype, srid): table.apply(lambda row: session.add(md.EgoGridDing0HvmvTransformer( run_id=row['run_id'], - id=row['id'], + id_db=row['id'], name=row['name'], geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, voltage_op=row['voltage_op'], - s_nom=row['s_nom'], - x=row['x'], - r=row['r'], + s_nom=row['S_nom'], + x=row['X'], + r=row['R'], )) , axis=1) # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): From 615a9a0ad5c36d91dcbdc06b34db1b3aa75d319f Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 31 Jul 2018 17:26:36 +0200 Subject: [PATCH 028/215] #268 changed column names according to proposal (id_db) --- ding0/tools/results.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index f8531789..94292c66 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -2315,11 +2315,11 @@ def export_network_to_oedb(session, table, tabletype, srid): type_kind=row['type_kind'], type_name=row['type_name'], length=row['length'], - u_n=row['U_n'], - c=row['C'], - l=row['L'], - r=row['R'], - i_max_th=row['I_max_th'], + u_n=row['u_n'], + c=row['c'], + l=row['l'], + r=row['r'], + i_max_th=row['i_max_th'], geom=row['geom'], )) , axis=1) @@ -2402,9 +2402,9 @@ def export_network_to_oedb(session, table, tabletype, srid): geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, voltage_op=row['voltage_op'], - s_nom=row['S_nom'], - x=row['X'], - r=row['R'], + s_nom=row['s_nom'], + x=row['x'], + r=row['r'], )) , axis=1) @@ -2506,9 +2506,9 @@ def export_network_to_oedb(session, table, tabletype, srid): geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, voltage_op=row['voltage_op'], - s_nom=row['S_nom'], - x=row['X'], - r=row['R'], + s_nom=row['s_nom'], + x=row['x'], + r=row['r'], )) , axis=1) # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): From f0da0f0a52b6c476bc42937258b0cc5a8dc5553a Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 31 Jul 2018 17:41:58 +0200 Subject: [PATCH 029/215] jh-RLI/ding0#268 changed column names in the documentation (id_db) --- doc/usage_details.rst | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 7e3d788b..02579a0c 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -152,11 +152,11 @@ Lines "node1","str","id_db of first node","n/a" "node2","str","id_db of second node","n/a" "length","float","length of line","km" - "U_n","float","nominal voltage","kV" - "R","float","","Ohm/km" - "C","float","inductive resistance at 50Hz","uF/km" - "L","float","","mH/km" - "I_max_th","float","","A" + "u_n","float","nominal voltage","kV" + "r","float","","Ohm/km" + "c","float","inductive resistance at 50Hz","uF/km" + "l","float","","mH/km" + "i_max_th","float","","A" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" LV-Branchtees @@ -166,7 +166,7 @@ LV-Branchtees :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "None","geometric coordinates", "n/a" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -177,7 +177,7 @@ LV-Generators :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "type","str","type of generation","{solar; biomass}" "subtype","str","subtype of generation: {solar_roof_mounted, unknown; biomass}","n/a" @@ -192,7 +192,7 @@ LV-Grids :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" - "LV_grid_id", "int","unambiguous number of LV-Grid", "n/a" + "lv_grid_id", "int","unambiguous number of LV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 MULTIPOLYGON" "population","int","population in LV-Grid","?" "voltage_nom","float","voltage level of grid","kV" @@ -205,7 +205,7 @@ LV-Loads :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "None","geometric coordinates", "n/a" "consumption","{''str'': float}","type of load {residential, agricultural, industrial} and corresponding consumption", "n/a" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -231,9 +231,9 @@ LV-Transformers "lv_grid_name", "str", "unambiguous name: 'TransformerDing0_LV_#mvgridid#_#lvgridid#'", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "voltage_op","float","","kV" - "S_nom","float","nominal apparent power","kVA" - "X","float","","Ohm" - "R","float","","Ohm" + "s_nom","float","nominal apparent power","kVA" + "x","float","","Ohm" + "r","float","","Ohm" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" LV-Grids @@ -255,7 +255,7 @@ MV-Branchtees :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -294,7 +294,7 @@ MV-Loads :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POLYGON" "consumption","{''str'': float}","type of load {retail, residential, agricultural, industrial} and corresponding consumption","n/a" "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" @@ -307,7 +307,7 @@ MV-Stations :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -318,10 +318,10 @@ MV-Transformers :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'TransformerDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "voltage_op","float","","kV" - "S_nom","float","nominal apparent power","kVA" - "X","float","","Ohm" - "R","float","","Ohm" + "s_nom","float","nominal apparent power","kVA" + "x","float","","Ohm" + "r","float","","Ohm" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" \ No newline at end of file From 37b980ea3758c7e0cac15671a0a769bf7e83bbfb Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 31 Jul 2018 17:41:58 +0200 Subject: [PATCH 030/215] #269 changed column names in the documentation (id_db) --- doc/usage_details.rst | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 7e3d788b..02579a0c 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -152,11 +152,11 @@ Lines "node1","str","id_db of first node","n/a" "node2","str","id_db of second node","n/a" "length","float","length of line","km" - "U_n","float","nominal voltage","kV" - "R","float","","Ohm/km" - "C","float","inductive resistance at 50Hz","uF/km" - "L","float","","mH/km" - "I_max_th","float","","A" + "u_n","float","nominal voltage","kV" + "r","float","","Ohm/km" + "c","float","inductive resistance at 50Hz","uF/km" + "l","float","","mH/km" + "i_max_th","float","","A" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" LV-Branchtees @@ -166,7 +166,7 @@ LV-Branchtees :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "None","geometric coordinates", "n/a" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -177,7 +177,7 @@ LV-Generators :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "type","str","type of generation","{solar; biomass}" "subtype","str","subtype of generation: {solar_roof_mounted, unknown; biomass}","n/a" @@ -192,7 +192,7 @@ LV-Grids :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" - "LV_grid_id", "int","unambiguous number of LV-Grid", "n/a" + "lv_grid_id", "int","unambiguous number of LV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 MULTIPOLYGON" "population","int","population in LV-Grid","?" "voltage_nom","float","voltage level of grid","kV" @@ -205,7 +205,7 @@ LV-Loads :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "None","geometric coordinates", "n/a" "consumption","{''str'': float}","type of load {residential, agricultural, industrial} and corresponding consumption", "n/a" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -231,9 +231,9 @@ LV-Transformers "lv_grid_name", "str", "unambiguous name: 'TransformerDing0_LV_#mvgridid#_#lvgridid#'", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "voltage_op","float","","kV" - "S_nom","float","nominal apparent power","kVA" - "X","float","","Ohm" - "R","float","","Ohm" + "s_nom","float","nominal apparent power","kVA" + "x","float","","Ohm" + "r","float","","Ohm" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" LV-Grids @@ -255,7 +255,7 @@ MV-Branchtees :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -294,7 +294,7 @@ MV-Loads :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POLYGON" "consumption","{''str'': float}","type of load {retail, residential, agricultural, industrial} and corresponding consumption","n/a" "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" @@ -307,7 +307,7 @@ MV-Stations :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" @@ -318,10 +318,10 @@ MV-Transformers :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'TransformerDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "voltage_op","float","","kV" - "S_nom","float","nominal apparent power","kVA" - "X","float","","Ohm" - "R","float","","Ohm" + "s_nom","float","nominal apparent power","kVA" + "x","float","","Ohm" + "r","float","","Ohm" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" \ No newline at end of file From 0f09591dceea8e78a86a5c87e0558d8cc819ee6c Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 31 Jul 2018 18:14:35 +0200 Subject: [PATCH 031/215] #269 Updated description table (id_db) --- doc/usage_details.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 02579a0c..fabf5260 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -266,7 +266,7 @@ MV-Generators :widths: 15, 10, 10, 30 "id_db", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "type","str","type of generation: {solar; biomass}","n/a" "subtype","str","subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}","n/a" From aaecc72e5ceba3c1729550db0d8a1d596179b0e7 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Mon, 13 Aug 2018 18:46:38 +0200 Subject: [PATCH 032/215] fixed grant_db_access --- ding0/tools/results.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 9f9c8674..e5b3e3a0 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -2684,8 +2684,7 @@ def grant_db_access(engine, table, role): schema = table.__table__.schema grant_str = """BEGIN; - GRANT ALL PRIVILEGES ON TABLE {schema}.{table} - TO {role} WITH GRANT OPTION; + ALTER TABLE {schema}.{table} OWNER to {role}; COMMIT;""".format(schema=schema, table=tablename, role=role) From ab18b2ef10cfe15ad7e967e89c28aca66f8f6c08 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 14 Aug 2018 11:18:10 +0200 Subject: [PATCH 033/215] #269 Updated description changed grid_id_db to id_db --- doc/usage_details.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index fabf5260..781114b4 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -146,7 +146,7 @@ Lines :widths: 15, 10, 10, 30 "edge_name", "str", "unambiguous name of edge", "n/a" - "grid_id_db", "int","unambiguous id_db of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge", "n/a" + "id_db", "int","unambiguous id_db of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge", "n/a" "type_kind","str","","n/a" "type_name","str","","n/a" "node1","str","id_db of first node","n/a" From 781ae01deb411185f6bfe578da45ee73cd726b50 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 14 Aug 2018 17:07:02 +0200 Subject: [PATCH 034/215] #269 Updated description fixed description up to MV-Generators --- doc/usage_details.rst | 145 +++++++++++++++++++++++------------------- 1 file changed, 80 insertions(+), 65 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 781114b4..900ef697 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -145,19 +145,22 @@ Lines :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "edge_name", "str", "unambiguous name of edge", "n/a" - "id_db", "int","unambiguous id_db of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge", "n/a" - "type_kind","str","","n/a" - "type_name","str","","n/a" - "node1","str","id_db of first node","n/a" - "node2","str","id_db of second node","n/a" - "length","float","length of line","km" - "u_n","float","nominal voltage","kV" - "r","float","","Ohm/km" - "c","float","inductive resistance at 50Hz","uF/km" - "l","float","","mH/km" - "i_max_th","float","","A" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge", "n/a" + "edge_name", "str", "unambiguous name of edge", "n/a" + "grid_name", "str", "unambiguous name of grid", "n/a" + "node1","str","id_db of first node","n/a" + "node2","str","id_db of second node","n/a" + "type_kind","str","","n/a" + "type_name","str","","n/a" + "length","float","length of line","km" + "u_n","float","nominal voltage","kV" + "c","float","inductive resistance at 50Hz","uF/km" + "l","float","","mH/km" + "r","float","","Ohm/km" + "i_max_th","float","","A" + "geom", "None","geometric coordinates", "n/a" + LV-Branchtees ----------- @@ -165,10 +168,11 @@ LV-Branchtees :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" - "geom", "None","geometric coordinates", "n/a" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "geom", "None","geometric coordinates", "WGS 84, POINT" + "name", "str", "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + LV-Generators ----------- @@ -176,14 +180,18 @@ LV-Generators :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" - "type","str","type of generation","{solar; biomass}" - "subtype","str","subtype of generation: {solar_roof_mounted, unknown; biomass}","n/a" - "v_level","int","voltage level of generator","" - "nominal_capacity","float","nominal capacity","" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "la_id", "int", "", "" + "name", "str", "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "lv_grid_id", "int","unambiguous id_db of LV-Grid", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POINT" + "type","str","type of generation","{solar; biomass}" + "subtype","str","subtype of generation: {solar_roof_mounted, unknown; biomass}","n/a" + "v_level","int","voltage level of generator","" + "nominal_capacity","float","nominal capacity","" + "is_aggregated", "boolean", "", "" + "weather_cell_id", "int", "", "" LV-Grids ----------- @@ -191,12 +199,12 @@ LV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" - "lv_grid_id", "int","unambiguous number of LV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 MULTIPOLYGON" - "population","int","population in LV-Grid","?" - "voltage_nom","float","voltage level of grid","kV" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int", "unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, MULTIPOLYGON" + "population","int","population in LV-Grid","?" + "voltage_nom","float","voltage level of grid","kV" LV-Loads ----------- @@ -204,11 +212,13 @@ LV-Loads :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "lv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" - "geom", "None","geometric coordinates", "n/a" - "consumption","{''str'': float}","type of load {residential, agricultural, industrial} and corresponding consumption", "n/a" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int", "unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "lv_grid_id", "int","unambiguous id_db of LV-Grid", "n/a" + "geom", "None", "geometric coordinates", "WGS84, POINT" + "consumption","{''str'': float}","type of load {residential, agricultural, industrial} and corresponding consumption", "n/a" + LV-Stations ----------- @@ -216,10 +226,11 @@ LV-Stations :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id", "int","unambiguous id_db of LV-Grid", "n/a" - "lv_grid_name", "str", "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#'", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int", "unambiguous number of LV-Grid", "n/a" + "geom", "wkt", "geometric coordinates", "WGS84, POINT" + "name", "str", "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#'", "n/a" + LV-Transformers ----------- @@ -227,14 +238,15 @@ LV-Transformers :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id", "int","unambiguous id_db of LV-Grid", "n/a" - "lv_grid_name", "str", "unambiguous name: 'TransformerDing0_LV_#mvgridid#_#lvgridid#'", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" - "voltage_op","float","","kV" - "s_nom","float","nominal apparent power","kVA" - "x","float","","Ohm" - "r","float","","Ohm" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "geom", "wkt","geometric coordinates", "WGS84 POINT" + "name", "str", "unambiguous name: 'TransformerDing0_LV_#mvgridid#_#lvgridid#'", "n/a" + "voltage_op","float","","kV" + "s_nom","float","nominal apparent power","kVA" + "x","float","","Ohm" + "r","float","","Ohm" + LV-Grids ----------- @@ -242,11 +254,12 @@ LV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "lv_grid_id", "int","unambiguous number of LV-Grid", "n/a" - "mv_grid_id", "int","unambiguous number of MV-Grid", "n/a" - "lv_grid_name", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" - "mv_grid_name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "lv_grid_id", "int","unambiguous number of LV-Grid", "n/a" + "lv_grid_name", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" + "mv_grid_id", "int","unambiguous number of MV-Grid", "n/a" + "mv_grid_name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + MV-Branchtees ----------- @@ -254,10 +267,10 @@ MV-Branchtees :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POINT" MV-Generators ----------- @@ -265,14 +278,16 @@ MV-Generators :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" - "type","str","type of generation: {solar; biomass}","n/a" - "subtype","str","subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}","n/a" - "v_level","int","voltage level of generator","" - "nominal_capacity","float","nominal capacity","" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int", "unambiguous number of MV-Grid", "n/a" + "id_db", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "geom", "wkt", "geometric coordinates", "WGS84, POINT" + "type", "str", "type of generation: {solar; biomass}", "n/a" + "subtype", "str", "subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}", "n/a" + "v_level", "int", "voltage level of generator", "" + "nominal_capacity", "float", "nominal capacity", "" + MV-Grids ----------- From a067ee976ecdc24e1873fa55a0fdbb2488639b91 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 14 Aug 2018 17:16:00 +0200 Subject: [PATCH 035/215] #269 Updated description, fixed description up to MV-Stations --- doc/usage_details.rst | 39 ++++++++++++++++++++++----------------- 1 file changed, 22 insertions(+), 17 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 900ef697..ce2c7fba 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -281,12 +281,14 @@ MV-Generators "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" "id_db", "int", "unambiguous number of MV-Grid", "n/a" - "id_db", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "name", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" "geom", "wkt", "geometric coordinates", "WGS84, POINT" "type", "str", "type of generation: {solar; biomass}", "n/a" "subtype", "str", "subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}", "n/a" "v_level", "int", "voltage level of generator", "" "nominal_capacity", "float", "nominal capacity", "" + "weather_cell_id", "int", "", "" + "is_aggregated", "boolean", "", "" MV-Grids @@ -295,12 +297,13 @@ MV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id", "int","unambiguous number of LV-Grid", "n/a" - "mv_grid_name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 MULTIPOLYGON" - "population","int","population in LV-Grid","?" - "voltage_nom","float","voltage level of grid","kV" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, MULTIPOLYGON" + "population","int","population in LV-Grid","?" + "voltage_nom","float","voltage level of grid","kV" + MV-Loads ----------- @@ -308,12 +311,13 @@ MV-Loads :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POLYGON" - "consumption","{''str'': float}","type of load {retail, residential, agricultural, industrial} and corresponding consumption","n/a" - "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POLYGON" + "consumption","{''str'': float}","type of load {retail, residential, agricultural, industrial} and corresponding consumption","n/a" + "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" + MV-Stations ----------- @@ -321,10 +325,11 @@ MV-Stations :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "mv_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POINT" + MV-Transformers ----------- From 3a88e91325229cd03b0ed1415c57de5ef517f190 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 14 Aug 2018 17:21:58 +0200 Subject: [PATCH 036/215] #269 Updated description, fixed all --- doc/usage_details.rst | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index ce2c7fba..9a7bb05e 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -312,7 +312,7 @@ MV-Loads :widths: 15, 10, 10, 30 "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "id_db", "int","unambiguous number of MV-Grid", "n/a" "name", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" "geom", "wkt","geometric coordinates", "WGS84, POLYGON" "consumption","{''str'': float}","type of load {retail, residential, agricultural, industrial} and corresponding consumption","n/a" @@ -326,7 +326,7 @@ MV-Stations :widths: 15, 10, 10, 30 "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "id_db", "int","unambiguous number of MV-Grid", "n/a" "name", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" "geom", "wkt","geometric coordinates", "WGS84, POINT" @@ -337,11 +337,11 @@ MV-Transformers :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'TransformerDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "mv_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" - "voltage_op","float","","kV" - "s_nom","float","nominal apparent power","kVA" - "x","float","","Ohm" - "r","float","","Ohm" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" \ No newline at end of file + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'TransformerDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POINT" + "voltage_op","float","","kV" + "s_nom","float","nominal apparent power","kVA" + "x","float","","Ohm" + "r","float","","Ohm" From 9e0fa1264c7eca3a4373f2b2e5e4e8b002183ecc Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 15 Aug 2018 11:39:22 +0200 Subject: [PATCH 037/215] #269 Updated description, minor changes --- doc/usage_details.rst | 197 +++++++++++++++++++++--------------------- 1 file changed, 98 insertions(+), 99 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 9a7bb05e..967220ae 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -145,21 +145,21 @@ Lines :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge", "n/a" - "edge_name", "str", "unambiguous name of edge", "n/a" - "grid_name", "str", "unambiguous name of grid", "n/a" - "node1","str","id_db of first node","n/a" - "node2","str","id_db of second node","n/a" - "type_kind","str","","n/a" - "type_name","str","","n/a" - "length","float","length of line","km" - "u_n","float","nominal voltage","kV" - "c","float","inductive resistance at 50Hz","uF/km" - "l","float","","mH/km" - "r","float","","Ohm/km" - "i_max_th","float","","A" - "geom", "None","geometric coordinates", "n/a" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge", "n/a" + "edge_name", "str", "unambiguous name of edge", "n/a" + "grid_name", "str", "unambiguous name of grid", "n/a" + "node1","str","id_db of first node","n/a" + "node2","str","id_db of second node","n/a" + "type_kind","str","","n/a" + "type_name","str","","n/a" + "length","float","length of line","km" + "u_n","float","nominal voltage","kV" + "c","float","inductive resistance at 50Hz","uF/km" + "l","float","","mH/km" + "r","float","","Ohm/km" + "i_max_th","float","","A" + "geom", "None","geometric coordinates", "n/a" LV-Branchtees @@ -168,10 +168,10 @@ LV-Branchtees :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous number of LV-Grid", "n/a" - "geom", "None","geometric coordinates", "WGS 84, POINT" - "name", "str", "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "geom", "None","geometric coordinates", "WGS 84, POINT" + "name", "str", "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" LV-Generators @@ -180,18 +180,18 @@ LV-Generators :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous number of LV-Grid", "n/a" - "la_id", "int", "", "" - "name", "str", "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "lv_grid_id", "int","unambiguous id_db of LV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84, POINT" - "type","str","type of generation","{solar; biomass}" - "subtype","str","subtype of generation: {solar_roof_mounted, unknown; biomass}","n/a" - "v_level","int","voltage level of generator","" - "nominal_capacity","float","nominal capacity","" - "is_aggregated", "boolean", "", "" - "weather_cell_id", "int", "", "" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "la_id", "int", "", "" + "name", "str", "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "lv_grid_id", "int","unambiguous id_db of LV-Grid", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POINT" + "type","str","type of generation","{solar; biomass}" + "subtype","str","subtype of generation: {solar_roof_mounted, unknown; biomass}","n/a" + "v_level","int","voltage level of generator","" + "nominal_capacity","float","nominal capacity","" + "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" + "weather_cell_id", "int", "unambiguous number of the corresponding weather cell", "n/a" LV-Grids ----------- @@ -199,12 +199,12 @@ LV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int", "unambiguous number of LV-Grid", "n/a" - "name", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" - "geom", "wkt","geometric coordinates", "WGS84, MULTIPOLYGON" - "population","int","population in LV-Grid","?" - "voltage_nom","float","voltage level of grid","kV" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int", "unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, MULTIPOLYGON" + "population","int","population in LV-Grid","?" + "voltage_nom","float","voltage level of grid","kV" LV-Loads ----------- @@ -212,12 +212,12 @@ LV-Loads :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int", "unambiguous number of LV-Grid", "n/a" - "name", "str", "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "lv_grid_id", "int","unambiguous id_db of LV-Grid", "n/a" - "geom", "None", "geometric coordinates", "WGS84, POINT" - "consumption","{''str'': float}","type of load {residential, agricultural, industrial} and corresponding consumption", "n/a" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int", "unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "lv_grid_id", "int","unambiguous id_db of LV-Grid", "n/a" + "geom", "None", "geometric coordinates", "WGS84, POINT" + "consumption","{''str'': float}","type of load {residential, agricultural, industrial} and corresponding consumption", "n/a" LV-Stations @@ -226,10 +226,10 @@ LV-Stations :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int", "unambiguous number of LV-Grid", "n/a" - "geom", "wkt", "geometric coordinates", "WGS84, POINT" - "name", "str", "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#'", "n/a" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int", "unambiguous number of LV-Grid", "n/a" + "geom", "wkt", "geometric coordinates", "WGS84, POINT" + "name", "str", "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#'", "n/a" LV-Transformers @@ -238,14 +238,14 @@ LV-Transformers :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous number of LV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" - "name", "str", "unambiguous name: 'TransformerDing0_LV_#mvgridid#_#lvgridid#'", "n/a" - "voltage_op","float","","kV" - "s_nom","float","nominal apparent power","kVA" - "x","float","","Ohm" - "r","float","","Ohm" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'TransformerDing0_LV_#mvgridid#_#lvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84 POINT" + "voltage_op","float","","kV" + "s_nom","float","nominal apparent power","kVA" + "x","float","","Ohm" + "r","float","","Ohm" LV-Grids @@ -254,11 +254,11 @@ LV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "lv_grid_id", "int","unambiguous number of LV-Grid", "n/a" - "lv_grid_name", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" - "mv_grid_id", "int","unambiguous number of MV-Grid", "n/a" - "mv_grid_name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "lv_grid_id", "int","unambiguous number of LV-Grid", "n/a" + "lv_grid_name", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" + "mv_grid_id", "int","unambiguous number of MV-Grid", "n/a" + "mv_grid_name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" MV-Branchtees @@ -267,10 +267,10 @@ MV-Branchtees :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous id_db of MV-Grid", "n/a" - "name", "str", "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "geom", "wkt","geometric coordinates", "WGS84, POINT" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POINT" MV-Generators ----------- @@ -278,17 +278,16 @@ MV-Generators :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int", "unambiguous number of MV-Grid", "n/a" - "name", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "geom", "wkt", "geometric coordinates", "WGS84, POINT" - "type", "str", "type of generation: {solar; biomass}", "n/a" - "subtype", "str", "subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}", "n/a" - "v_level", "int", "voltage level of generator", "" - "nominal_capacity", "float", "nominal capacity", "" - "weather_cell_id", "int", "", "" - "is_aggregated", "boolean", "", "" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int", "unambiguous number of MV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "geom", "wkt", "geometric coordinates", "WGS84, POINT" + "type", "str", "type of generation: {solar; biomass}", "n/a" + "subtype", "str", "subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}", "n/a" + "v_level", "int", "voltage level of generator", "" + "nominal_capacity", "float", "nominal capacity", "" + "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" + "weather_cell_id", "int", "unambiguous number of the corresponding weather cell", "n/a" MV-Grids @@ -297,12 +296,12 @@ MV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous number of LV-Grid", "n/a" - "name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "geom", "wkt","geometric coordinates", "WGS84, MULTIPOLYGON" - "population","int","population in LV-Grid","?" - "voltage_nom","float","voltage level of grid","kV" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, MULTIPOLYGON" + "population","int","population in LV-Grid","?" + "voltage_nom","float","voltage level of grid","kV" MV-Loads @@ -311,12 +310,12 @@ MV-Loads :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous number of MV-Grid", "n/a" - "name", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "geom", "wkt","geometric coordinates", "WGS84, POLYGON" - "consumption","{''str'': float}","type of load {retail, residential, agricultural, industrial} and corresponding consumption","n/a" - "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of MV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POLYGON" + "consumption","{''str'': float}","type of load {retail, residential, agricultural, industrial} and corresponding consumption","n/a" + "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" MV-Stations @@ -325,10 +324,10 @@ MV-Stations :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous number of MV-Grid", "n/a" - "name", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "geom", "wkt","geometric coordinates", "WGS84, POINT" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of MV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POINT" MV-Transformers @@ -337,11 +336,11 @@ MV-Transformers :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous number of LV-Grid", "n/a" - "name", "str", "unambiguous name: 'TransformerDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "geom", "wkt","geometric coordinates", "WGS84, POINT" - "voltage_op","float","","kV" - "s_nom","float","nominal apparent power","kVA" - "x","float","","Ohm" - "r","float","","Ohm" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'TransformerDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POINT" + "voltage_op","float","","kV" + "s_nom","float","nominal apparent power","kVA" + "x","float","","Ohm" + "r","float","","Ohm" From d97c0c9baaca2a40717bb81c4f620416bd3ded54 Mon Sep 17 00:00:00 2001 From: nesnoj Date: Mon, 20 Aug 2018 16:53:58 +0200 Subject: [PATCH 038/215] fix DB export of MVGD which have no genos of GeneratorFlucuatingDing0 The column 'weather_cell_id' was therefore not created which leads to an error during export. It is added now prior to export and set to np.nan --- ding0/tools/results.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index e5b3e3a0..363090bd 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1899,8 +1899,7 @@ def aggregate_loads(la_center, aggr): } # MVGenerator - elif (isinstance(node, GeneratorDing0) or isinstance(node, - GeneratorFluctuatingDing0)): + elif isinstance(node, (GeneratorDing0, GeneratorFluctuatingDing0)): if node.subtype == None: subtype = 'other' else: @@ -1920,7 +1919,7 @@ def aggregate_loads(la_center, aggr): 'nominal_capacity': node.capacity, 'run_id': run_id, 'is_aggregated': False, - 'weather_cell_id': node.weather_cell_id, + 'weather_cell_id': node.weather_cell_id } else: type = node.type @@ -1936,7 +1935,9 @@ def aggregate_loads(la_center, aggr): 'v_level': node.v_level, 'nominal_capacity': node.capacity, 'run_id': run_id, - 'is_aggregated': False} + 'is_aggregated': False, + 'weather_cell_id': np.nan + } # MVBranchTees elif isinstance(node, MVCableDistributorDing0): @@ -2106,8 +2107,7 @@ def aggregate_loads(la_center, aggr): # geom = wkt_dumps(node.geo_data) # LVGenerator - if (isinstance(node, GeneratorDing0) or isinstance(node, - GeneratorFluctuatingDing0)): + if isinstance(node, (GeneratorDing0, GeneratorFluctuatingDing0)): if node.subtype == None: subtype = 'other' else: @@ -2150,6 +2150,7 @@ def aggregate_loads(la_center, aggr): 'nominal_capacity': node.capacity, 'run_id': run_id, 'is_aggregated': node.lv_load_area.is_aggregated, + 'weather_cell_id': np.nan } # LVcd From a0c0b6812649e4ee06cc1cdd3125a7fea1f1fc36 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 23 Aug 2018 15:57:20 +0200 Subject: [PATCH 039/215] jh-RLI/ding0#270 initial commit --- doc/usage_details.rst | 4 +- ..._ding0_hvmv_transformer_metadata_v1.3.json | 51 ++++++++++++++++ .../ego_grid_ding0_line_metadata_v1.3.json | 59 +++++++++++++++++++ ...grid_ding0_lv_branchtee_metadata_v1.3.json | 48 +++++++++++++++ ...grid_ding0_lv_generator_metadata_v1.3.json | 56 ++++++++++++++++++ .../ego_grid_ding0_lv_grid_metadata_v1.3.json | 48 +++++++++++++++ .../ego_grid_ding0_lv_load_metadata_v1.3.json | 50 ++++++++++++++++ ...o_grid_ding0_lv_station_metadata_v1.3.json | 48 +++++++++++++++ ...grid_ding0_mv_branchtee_metadata_v1.3.json | 48 +++++++++++++++ ...ding0_mv_circuitbreaker_metadata_v1.3.json | 49 +++++++++++++++ ...grid_ding0_mv_generator_metadata_v1.3.json | 54 +++++++++++++++++ .../ego_grid_ding0_mv_grid_metadata_v1.3.json | 50 ++++++++++++++++ .../ego_grid_ding0_mv_load_metadata_v1.3.json | 49 +++++++++++++++ ...o_grid_ding0_mv_station_metadata_v1.3.json | 48 +++++++++++++++ ...grid_ding0_mvlv_mapping_metadata_v1.3.json | 49 +++++++++++++++ ..._ding0_mvlv_transformer_metadata_v1.3.json | 52 ++++++++++++++++ ...o_grid_ding0_versioning_metadata_v1.3.json | 46 +++++++++++++++ 17 files changed, 807 insertions(+), 2 deletions(-) create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 967220ae..6b1d438c 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -297,10 +297,10 @@ MV-Grids :widths: 15, 10, 10, 30 "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous number of LV-Grid", "n/a" + "id_db", "int","unambiguous number of MV-Grid", "n/a" "name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" "geom", "wkt","geometric coordinates", "WGS84, MULTIPOLYGON" - "population","int","population in LV-Grid","?" + "population","int","population in MV-Grid","?" "voltage_nom","float","voltage level of grid","kV" diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json new file mode 100644 index 00000000..216c37c3 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json @@ -0,0 +1,51 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_hvmv_transformer", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","discription": "FIXME","unit": "string"}, + {"name": "voltage_op","discription": "FIXME","unit": "float"}, + {"name": "s_nom","discription": "nominal apparent power in kVA","unit": "float"}, + {"name": "x","discription": "in Ohm","unit": "float"}, + {"name": "r","discription": "in Ohm","unit": "float"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json new file mode 100644 index 00000000..9c084040 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json @@ -0,0 +1,59 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_line", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge","unit": "integer"}, + {"name": "edge_name","discription": "unambiguous name of edge","unit": "string"}, + {"name": "grid_name","discription": "unambiguous name of grid","unit": "string"}, + {"name": "node1","discription": "id_db of first node","unit": "string"}, + {"name": "node2","discription": "id_db of second node","unit": "string"}, + {"name": "type_kind","discription": "n/a","unit": "string"}, + {"name": "type_name","discription": "n/a","unit": "string"}, + {"name": "length","discription": "length of line in km","unit": "float"}, + {"name": "u_n","discription": "nominal voltage in kV","unit": "float"}, + {"name": "c","discription": "inductive resistance at 50Hz in uF/km","unit": "float"}, + {"name": "l","discription": "in mH/km","unit": "float"}, + {"name": "r","discription": "in Ohm/km","unit": "float"}, + {"name": "i_max_th","discription": "in A","unit": "float"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 LINESTRING"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json new file mode 100644 index 00000000..1221a056 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json @@ -0,0 +1,48 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_lv_branchtee", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "name","discription": "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber","unit": "string "} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json new file mode 100644 index 00000000..63b76c77 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json @@ -0,0 +1,56 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_lv_generator", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "la_id","discription": "FIXME","unit": "integer"}, + {"name": "name","discription": "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "lv_grid_id","discription": "unambiguous id_db of LV-Grid","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84, POINT"}, + {"name": "type","discription": "type of generation","{solar; biomass}","unit": "string"}, + {"name": "subtype","discription": "subtype of generation: {solar_roof_mounted, unknown; biomass}","unit": "string"}, + {"name": "v_level","discription": "voltage level of generator","unit": "integer"}, + {"name": "nominal_capacity","discription": "nominal capacity","unit": "float"}, + {"name": "is_aggregated","discription": "True if load is aggregated load, else False","unit": "boolean"}; + {"name": "weather_cell_id","discription": "unambiguous number of the corresponding weather cell","unit": "integer"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json new file mode 100644 index 00000000..99745483 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json @@ -0,0 +1,48 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_lv_grid", "format": "PostgreSQL", "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "name","discription": "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#","unit": "string"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 MULTIPOLYGON"}, + {"name": "population","discription": "population in LV-Grid","unit": "integer"}, + {"name": "voltage_nom","discription": "voltage level of grid in kV","unit": "float "} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json new file mode 100644 index 00000000..cd18da5b --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json @@ -0,0 +1,50 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_lv_load", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "name","discription": "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "lv_grid_id","discription": "unambiguous id_db of LV-Grid","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "consumption","discription": "type of load {residential, agricultural, industrial} and corresponding consumption","unit": "string "} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json new file mode 100644 index 00000000..3d443767 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json @@ -0,0 +1,48 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "", + "name": "", + "version": "", + "url": "", + "instruction": "", + "copyright": ""}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date": "", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_lv_station", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","discription": "FIXME","unit": "string"}, +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json new file mode 100644 index 00000000..287f7e3e --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json @@ -0,0 +1,48 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mv_branchtee", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "geometric coordinates","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","discription": "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"} ] } ] +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json new file mode 100644 index 00000000..b8207abb --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json @@ -0,0 +1,49 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mv_circuitbreaker", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","discription": "FIXME","unit": "string"}, + {"name": "status","discription": "FIXME","unit": "string "} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json new file mode 100644 index 00000000..da3fd741 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json @@ -0,0 +1,54 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mv_generator", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "name","discription": "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "type","discription": "type of generation: {solar; biomass}","unit": "string"}, + {"name": "subtype","discription": "subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}","unit": "string"}, + {"name": "v_level","discription": "voltage level of generator","unit": "integer"}, + {"name": "nominal_capacity","discription": "nominal capacity","unit": "float"}, + {"name": "weather_cell_id","discription": "unambiguous number of the corresponding weather cell","unit": "integer"}, + {"name": "is_aggregated","discription": "True if load is aggregated load, else False","unit": "boolean"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json new file mode 100644 index 00000000..d4380d41 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json @@ -0,0 +1,50 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mv_grid", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 MULTIPOLYGON"}, + {"name": "name","discription": "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'","unit": "string"}, + {"name": "population","discription": "population in MV-Grid","unit": "integer"}, + {"name": "voltage_nom","discription": "voltage level of grid in kV","unit": "float" } ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json new file mode 100644 index 00000000..70e4c713 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json @@ -0,0 +1,49 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": +{"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mv_load", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "name","discription": "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 GEOMETRY"}, + {"name": "is_aggregated","discription": "True if load is aggregated load, else False","unit": "boolean"}, + {"name": "consumption","discription": "type of load {retail, residential, agricultural, industrial} and corresponding consumption","unit": "string" } ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json new file mode 100644 index 00000000..4575dd57 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json @@ -0,0 +1,48 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mv_station", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "geom","discription": "geometric coordinates","unit": "wkt"}, + {"name": "name","discription": "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#","unit": "string"}, + {"name": "","discription": "","unit": ""} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json new file mode 100644 index 00000000..bf7c51a2 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json @@ -0,0 +1,49 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mvlv_mapping", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, + {"name": "lv_grid_id","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "lv_grid_name","discription": "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'","unit": "string"}, + {"name": "mv_grid_id","discription": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "mv_grid_name","discription": "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'","unit": "string"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json new file mode 100644 index 00000000..4d0251b7 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json @@ -0,0 +1,52 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mvlv_transformer", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","discription": "FIXME","unit": "string"}, + {"name": "voltage_op","discription": "in kV","unit": "float"}, + {"name": "s_nom","discription": "nominal apparent power in kVA","unit": "float"}, + {"name": "x","discription": "in Ohm","unitfloat"}, + {"name": "r","discription": "in Ohm","unit": "float"}] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json new file mode 100644 index 00000000..5ec0fc54 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json @@ -0,0 +1,46 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_versioning", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "description","discription": "FIXME","unit": "string"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": "http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file From d4db6463c87a59d8872d7676e2738049d978b93f Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 23 Aug 2018 16:04:02 +0200 Subject: [PATCH 040/215] jh-RLI/ding0#270 minor changes --- .../ego_grid_ding0_hvmv_transformer_metadata_v1.3.json | 2 +- .../ego_grid_ding0_line_metadata_v1.3.json | 2 +- .../ego_grid_ding0_lv_branchtee_metadata_v1.3.json | 2 +- .../ego_grid_ding0_lv_generator_metadata_v1.3.json | 2 +- .../ego_grid_ding0_lv_grid_metadata_v1.3.json | 2 +- .../ego_grid_ding0_lv_load_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_branchtee_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_generator_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_grid_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_load_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_station_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mvlv_mapping_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mvlv_transformer_metadata_v1.3.json | 2 +- .../ego_grid_ding0_versioning_metadata_v1.3.json | 2 +- 15 files changed, 15 insertions(+), 15 deletions(-) diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json index 216c37c3..3756f4b7 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date":"", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_hvmv_transformer", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json index 9c084040..27d57335 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_line", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json index 1221a056..a6d8132a 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_branchtee", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json index 63b76c77..7106dbfe 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_generator", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json index 99745483..afc52dc4 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_grid", "format": "PostgreSQL", "fields": [ {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json index cd18da5b..81da365f 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_load", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json index 287f7e3e..42dda963 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_branchtee", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json index b8207abb..2a7a62b6 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_circuitbreaker", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json index da3fd741..80386b4e 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_generator", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json index d4380d41..d8088ea0 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_grid", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json index 70e4c713..20e14088 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_load", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json index 4575dd57..557921e7 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_station", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json index bf7c51a2..5db5285a 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mvlv_mapping", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json index 4d0251b7..bb078fb0 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mvlv_transformer", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json index 5ec0fc54..b04a3c3e 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_versioning", "format": "PostgreSQL", From 136a5c3b55a8ca2452d7534c35e867f5fb9090a7 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 23 Aug 2018 15:57:20 +0200 Subject: [PATCH 041/215] #270 initial commit --- doc/usage_details.rst | 4 +- ..._ding0_hvmv_transformer_metadata_v1.3.json | 51 ++++++++++++++++ .../ego_grid_ding0_line_metadata_v1.3.json | 59 +++++++++++++++++++ ...grid_ding0_lv_branchtee_metadata_v1.3.json | 48 +++++++++++++++ ...grid_ding0_lv_generator_metadata_v1.3.json | 56 ++++++++++++++++++ .../ego_grid_ding0_lv_grid_metadata_v1.3.json | 48 +++++++++++++++ .../ego_grid_ding0_lv_load_metadata_v1.3.json | 50 ++++++++++++++++ ...o_grid_ding0_lv_station_metadata_v1.3.json | 48 +++++++++++++++ ...grid_ding0_mv_branchtee_metadata_v1.3.json | 48 +++++++++++++++ ...ding0_mv_circuitbreaker_metadata_v1.3.json | 49 +++++++++++++++ ...grid_ding0_mv_generator_metadata_v1.3.json | 54 +++++++++++++++++ .../ego_grid_ding0_mv_grid_metadata_v1.3.json | 50 ++++++++++++++++ .../ego_grid_ding0_mv_load_metadata_v1.3.json | 49 +++++++++++++++ ...o_grid_ding0_mv_station_metadata_v1.3.json | 48 +++++++++++++++ ...grid_ding0_mvlv_mapping_metadata_v1.3.json | 49 +++++++++++++++ ..._ding0_mvlv_transformer_metadata_v1.3.json | 52 ++++++++++++++++ ...o_grid_ding0_versioning_metadata_v1.3.json | 46 +++++++++++++++ 17 files changed, 807 insertions(+), 2 deletions(-) create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json create mode 100644 ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 967220ae..6b1d438c 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -297,10 +297,10 @@ MV-Grids :widths: 15, 10, 10, 30 "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" - "id_db", "int","unambiguous number of LV-Grid", "n/a" + "id_db", "int","unambiguous number of MV-Grid", "n/a" "name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" "geom", "wkt","geometric coordinates", "WGS84, MULTIPOLYGON" - "population","int","population in LV-Grid","?" + "population","int","population in MV-Grid","?" "voltage_nom","float","voltage level of grid","kV" diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json new file mode 100644 index 00000000..216c37c3 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json @@ -0,0 +1,51 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_hvmv_transformer", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","discription": "FIXME","unit": "string"}, + {"name": "voltage_op","discription": "FIXME","unit": "float"}, + {"name": "s_nom","discription": "nominal apparent power in kVA","unit": "float"}, + {"name": "x","discription": "in Ohm","unit": "float"}, + {"name": "r","discription": "in Ohm","unit": "float"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json new file mode 100644 index 00000000..9c084040 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json @@ -0,0 +1,59 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_line", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge","unit": "integer"}, + {"name": "edge_name","discription": "unambiguous name of edge","unit": "string"}, + {"name": "grid_name","discription": "unambiguous name of grid","unit": "string"}, + {"name": "node1","discription": "id_db of first node","unit": "string"}, + {"name": "node2","discription": "id_db of second node","unit": "string"}, + {"name": "type_kind","discription": "n/a","unit": "string"}, + {"name": "type_name","discription": "n/a","unit": "string"}, + {"name": "length","discription": "length of line in km","unit": "float"}, + {"name": "u_n","discription": "nominal voltage in kV","unit": "float"}, + {"name": "c","discription": "inductive resistance at 50Hz in uF/km","unit": "float"}, + {"name": "l","discription": "in mH/km","unit": "float"}, + {"name": "r","discription": "in Ohm/km","unit": "float"}, + {"name": "i_max_th","discription": "in A","unit": "float"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 LINESTRING"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json new file mode 100644 index 00000000..1221a056 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json @@ -0,0 +1,48 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_lv_branchtee", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "name","discription": "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber","unit": "string "} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json new file mode 100644 index 00000000..63b76c77 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json @@ -0,0 +1,56 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_lv_generator", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "la_id","discription": "FIXME","unit": "integer"}, + {"name": "name","discription": "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "lv_grid_id","discription": "unambiguous id_db of LV-Grid","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84, POINT"}, + {"name": "type","discription": "type of generation","{solar; biomass}","unit": "string"}, + {"name": "subtype","discription": "subtype of generation: {solar_roof_mounted, unknown; biomass}","unit": "string"}, + {"name": "v_level","discription": "voltage level of generator","unit": "integer"}, + {"name": "nominal_capacity","discription": "nominal capacity","unit": "float"}, + {"name": "is_aggregated","discription": "True if load is aggregated load, else False","unit": "boolean"}; + {"name": "weather_cell_id","discription": "unambiguous number of the corresponding weather cell","unit": "integer"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json new file mode 100644 index 00000000..99745483 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json @@ -0,0 +1,48 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_lv_grid", "format": "PostgreSQL", "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "name","discription": "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#","unit": "string"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 MULTIPOLYGON"}, + {"name": "population","discription": "population in LV-Grid","unit": "integer"}, + {"name": "voltage_nom","discription": "voltage level of grid in kV","unit": "float "} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json new file mode 100644 index 00000000..cd18da5b --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json @@ -0,0 +1,50 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_lv_load", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "name","discription": "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "lv_grid_id","discription": "unambiguous id_db of LV-Grid","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "consumption","discription": "type of load {residential, agricultural, industrial} and corresponding consumption","unit": "string "} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json new file mode 100644 index 00000000..3d443767 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json @@ -0,0 +1,48 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "", + "name": "", + "version": "", + "url": "", + "instruction": "", + "copyright": ""}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date": "", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_lv_station", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","discription": "FIXME","unit": "string"}, +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json new file mode 100644 index 00000000..287f7e3e --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json @@ -0,0 +1,48 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mv_branchtee", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "geometric coordinates","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","discription": "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"} ] } ] +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json new file mode 100644 index 00000000..b8207abb --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json @@ -0,0 +1,49 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mv_circuitbreaker", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","discription": "FIXME","unit": "string"}, + {"name": "status","discription": "FIXME","unit": "string "} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json new file mode 100644 index 00000000..da3fd741 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json @@ -0,0 +1,54 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mv_generator", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "name","discription": "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "type","discription": "type of generation: {solar; biomass}","unit": "string"}, + {"name": "subtype","discription": "subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}","unit": "string"}, + {"name": "v_level","discription": "voltage level of generator","unit": "integer"}, + {"name": "nominal_capacity","discription": "nominal capacity","unit": "float"}, + {"name": "weather_cell_id","discription": "unambiguous number of the corresponding weather cell","unit": "integer"}, + {"name": "is_aggregated","discription": "True if load is aggregated load, else False","unit": "boolean"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json new file mode 100644 index 00000000..d4380d41 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json @@ -0,0 +1,50 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mv_grid", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","discription": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 MULTIPOLYGON"}, + {"name": "name","discription": "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'","unit": "string"}, + {"name": "population","discription": "population in MV-Grid","unit": "integer"}, + {"name": "voltage_nom","discription": "voltage level of grid in kV","unit": "float" } ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json new file mode 100644 index 00000000..70e4c713 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json @@ -0,0 +1,49 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": +{"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mv_load", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "name","discription": "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 GEOMETRY"}, + {"name": "is_aggregated","discription": "True if load is aggregated load, else False","unit": "boolean"}, + {"name": "consumption","discription": "type of load {retail, residential, agricultural, industrial} and corresponding consumption","unit": "string" } ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json new file mode 100644 index 00000000..4575dd57 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json @@ -0,0 +1,48 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mv_station", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "geom","discription": "geometric coordinates","unit": "wkt"}, + {"name": "name","discription": "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#","unit": "string"}, + {"name": "","discription": "","unit": ""} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json new file mode 100644 index 00000000..bf7c51a2 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json @@ -0,0 +1,49 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mvlv_mapping", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, + {"name": "lv_grid_id","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "lv_grid_name","discription": "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'","unit": "string"}, + {"name": "mv_grid_id","discription": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "mv_grid_name","discription": "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'","unit": "string"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json new file mode 100644 index 00000000..4d0251b7 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json @@ -0,0 +1,52 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_mvlv_transformer", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","discription": "FIXME","unit": "string"}, + {"name": "voltage_op","discription": "in kV","unit": "float"}, + {"name": "s_nom","discription": "nominal apparent power in kVA","unit": "float"}, + {"name": "x","discription": "in Ohm","unitfloat"}, + {"name": "r","discription": "in Ohm","unit": "float"}] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json new file mode 100644 index 00000000..5ec0fc54 --- /dev/null +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json @@ -0,0 +1,46 @@ +{"title": "DING0 - Result data", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""}, + {"name": "","description": "","url": "","license": "","copyright": ""} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "", "email": "", "date", "comment": ""} ], +"resources": [ + {"name": "model_draft.ego_grid_ding0_versioning", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "description","discription": "FIXME","unit": "string"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": "http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use 'none'"} } } \ No newline at end of file From 17ecc79d37da62c91296bf7ec5c573bd54d13e82 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 23 Aug 2018 16:04:02 +0200 Subject: [PATCH 042/215] #270 minor changes --- .../ego_grid_ding0_hvmv_transformer_metadata_v1.3.json | 2 +- .../ego_grid_ding0_line_metadata_v1.3.json | 2 +- .../ego_grid_ding0_lv_branchtee_metadata_v1.3.json | 2 +- .../ego_grid_ding0_lv_generator_metadata_v1.3.json | 2 +- .../ego_grid_ding0_lv_grid_metadata_v1.3.json | 2 +- .../ego_grid_ding0_lv_load_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_branchtee_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_generator_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_grid_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_load_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_station_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mvlv_mapping_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mvlv_transformer_metadata_v1.3.json | 2 +- .../ego_grid_ding0_versioning_metadata_v1.3.json | 2 +- 15 files changed, 15 insertions(+), 15 deletions(-) diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json index 216c37c3..3756f4b7 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date":"", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_hvmv_transformer", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json index 9c084040..27d57335 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_line", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json index 1221a056..a6d8132a 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_branchtee", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json index 63b76c77..7106dbfe 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_generator", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json index 99745483..afc52dc4 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_grid", "format": "PostgreSQL", "fields": [ {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json index cd18da5b..81da365f 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_load", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json index 287f7e3e..42dda963 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_branchtee", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json index b8207abb..2a7a62b6 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_circuitbreaker", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json index da3fd741..80386b4e 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_generator", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json index d4380d41..d8088ea0 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_grid", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json index 70e4c713..20e14088 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_load", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json index 4575dd57..557921e7 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_station", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json index bf7c51a2..5db5285a 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mvlv_mapping", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json index 4d0251b7..bb078fb0 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mvlv_transformer", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json index 5ec0fc54..b04a3c3e 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json @@ -25,7 +25,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date", "comment": ""} ], + {"name": "", "email": "", "date": "", "comment": ""} ], "resources": [ {"name": "model_draft.ego_grid_ding0_versioning", "format": "PostgreSQL", From a2f0447c0af0c2db1a2f53a325cabb2733c6f45d Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 23 Aug 2018 17:26:37 +0200 Subject: [PATCH 043/215] #270 minor changes --- .../ego_grid_ding0_mv_station_metadata_v1.3.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json index 557921e7..64a145e4 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json @@ -45,4 +45,5 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } \ No newline at end of file + "_none": "If not applicable use 'none'"} } } + From a2fb5c1121096efa82e5aff981e99aa74ac51236 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 23 Aug 2018 17:27:45 +0200 Subject: [PATCH 044/215] #270 minor changes --- .../ego_grid_ding0_mv_station_metadata_v1.3.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json index 64a145e4..557921e7 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json @@ -45,5 +45,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } - + "_none": "If not applicable use 'none'"} } } \ No newline at end of file From 896ffa78898025fff24772838027ee3d2106013a Mon Sep 17 00:00:00 2001 From: boltbeard Date: Thu, 6 Sep 2018 14:10:53 +0200 Subject: [PATCH 045/215] restructuring of export functions (taken out of results.py), first draft, untested --- ding0/tools/db_export.py | 674 ++++++++++++++++++++++++ ding0/tools/export.py | 611 +++++++++++++++++++++ ding0/tools/file_export.py | 53 ++ ding0/tools/results.py | 1021 ------------------------------------ 4 files changed, 1338 insertions(+), 1021 deletions(-) create mode 100644 ding0/tools/db_export.py create mode 100644 ding0/tools/export.py create mode 100644 ding0/tools/file_export.py diff --git a/ding0/tools/db_export.py b/ding0/tools/db_export.py new file mode 100644 index 00000000..8afd0210 --- /dev/null +++ b/ding0/tools/db_export.py @@ -0,0 +1,674 @@ +"""This file is part of DINGO, the DIstribution Network GeneratOr. +DINGO is a tool to generate synthetic medium and low voltage power +distribution grids based on open data. + +It is developed in the project open_eGo: https://openegoproject.wordpress.com + +DING0 lives at github: https://github.com/openego/ding0/ +The documentation is available on RTD: http://ding0.readthedocs.io""" + +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "nesnoj, gplssm" + +import numpy as np +import pandas as pd + +import re + +from sqlalchemy import create_engine +from egoio.db_tables import model_draft as md + +from sqlalchemy import ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text +from geoalchemy2.types import Geometry, Raster +from sqlalchemy.orm import relationship +from sqlalchemy.dialects.postgresql.hstore import HSTORE +from sqlalchemy.dialects.postgresql.base import OID +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION, INTEGER, NUMERIC, TEXT, BIGINT, TIMESTAMP, VARCHAR + + +Base = declarative_base() +metadata = Base.metadata + + +def export_network_to_oedb(session, schema, table, tabletype, srid): + dataset = [] + engine = create_engine("sqlite:///myexample.db") + print("Exporting table type : {}".format(tabletype)) + if tabletype == 'line': + table.apply(lambda row: + session.add(schema.EgoGridDing0Line( + run_id=row['run_id'], + id_db=row['id'], + edge_name=row['edge_name'], + grid_name=row['grid_name'], + node1=row['node1'], + node2=row['node2'], + type_kind=row['type_kind'], + type_name=row['type_name'], + length=row['length'], + u_n=row['U_n'], + c=row['C'], + l=row['L'], + r=row['R'], + i_max_th=row['I_max_th'], + geom=row['geom'], + )) + , axis=1) + + elif tabletype == 'lv_cd': + table.apply(lambda row: + session.add(schema.EgoGridDing0LvBranchtee( + run_id=row['run_id'], + id_db=row['id'], + name=row['name'], + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + )) + , axis=1) + + elif tabletype == 'lv_gen': + table.apply(lambda row: + session.add(schema.EgoGridDing0LvGenerator( + run_id=row['run_id'], + id_db=row['id'], + la_id=row['la_id'], + name=row['name'], + lv_grid_id=str(row['lv_grid_id']), + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + type=row['type'], + subtype=row['subtype'], + v_level=row['v_level'], + nominal_capacity=row['nominal_capacity'], + is_aggregated=row['is_aggregated'], + weather_cell_id=row['weather_cell_id'] if not(pd.isnull(row[ + 'weather_cell_id'])) else None, + + )) + , axis=1) + + elif tabletype == 'lv_load': + table.apply(lambda row: + session.add(schema.EgoGridDing0LvLoad( + run_id=row['run_id'], + id_db=row['id'], + name=row['name'], + lv_grid_id=row['lv_grid_id'], + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + consumption=row['consumption'] + )) + , axis=1) + + elif tabletype == 'lv_grid': + table.apply(lambda row: + session.add(schema.EgoGridDing0LvGrid( + run_id=row['run_id'], + id_db=row['id'], + name=row['name'], + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + population=row['population'], + voltage_nom=row['voltage_nom'], + )) + , axis=1) + + elif tabletype == 'lv_station': + table.apply(lambda row: + session.add(schema.EgoGridDing0LvStation( + run_id=row['run_id'], + id_db=row['id'], + name=row['name'], + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + )) + , axis=1) + + elif tabletype == 'mvlv_trafo': + table.apply(lambda row: + session.add(schema.EgoGridDing0MvlvTransformer( + run_id=row['run_id'], + id_db=row['id'], + name=row['name'], + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + voltage_op=row['voltage_op'], + s_nom=row['S_nom'], + x=row['X'], + r=row['R'], + )) + , axis=1) + + elif tabletype == 'mvlv_mapping': + table.apply(lambda row: + session.add(schema.EgoGridDing0MvlvMapping( + run_id=row['run_id'], + lv_grid_id=row['lv_grid_id'], + lv_grid_name=row['lv_grid_name'], + mv_grid_id=row['mv_grid_id'], + mv_grid_name=row['mv_grid_name'], + )) + , axis=1) + + elif tabletype == 'mv_cd': + table.apply(lambda row: + session.add(schema.EgoGridDing0MvBranchtee( + run_id=row['run_id'], + id_db=row['id'], + name=row['name'], + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + )) + , axis=1) + + elif tabletype == 'mv_cb': + table.apply(lambda row: + session.add(schema.EgoGridDing0MvCircuitbreaker( + run_id=row['run_id'], + id_db=row['id'], + name=row['name'], + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + status=row['status'], + )) + , axis=1) + + elif tabletype == 'mv_gen': + table.apply(lambda row: + session.add(schema.EgoGridDing0MvGenerator( + run_id=row['run_id'], + id_db=row['id'], + name=row['name'], + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + type=row['type'], + subtype=row['subtype'], + v_level=row['v_level'], + nominal_capacity=row['nominal_capacity'], + is_aggregated=row['is_aggregated'], + weather_cell_id=row['weather_cell_id'] if not(pd.isnull(row[ + 'weather_cell_id'])) else None, + )) + , axis=1) + + elif tabletype == 'mv_load': + table.apply(lambda row: + session.add(schema.EgoGridDing0MvLoad( + run_id=row['run_id'], + id_db=row['id'], + name=row['name'], + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + is_aggregated=row['is_aggregated'], + consumption=row['consumption'], + )) + , axis=1) + + elif tabletype == 'mv_grid': + table.apply(lambda row: + session.add(schema.EgoGridDing0MvGrid( + run_id=row['run_id'], + id_db=row['id'], + name=row['name'], + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + population=row['population'], + voltage_nom=row['voltage_nom'], + )) + , axis=1) + + elif tabletype == 'mv_station': + table.apply(lambda row: + session.add(schema.EgoGridDing0MvStation( + run_id=row['run_id'], + id_db=row['id'], + name=row['name'], + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + )) + , axis=1) + + elif tabletype == 'hvmv_trafo': + table.apply(lambda row: + session.add(schema.EgoGridDing0HvmvTransformer( + run_id=row['run_id'], + id_db=row['id'], + name=row['name'], + geom="SRID={};{}".format(srid, row['geom']) if row[ + 'geom'] else None, + voltage_op=row['voltage_op'], + s_nom=row['S_nom'], + x=row['X'], + r=row['R'], + )) + , axis=1) + # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): + # print('helloworld') + + session.commit() + + +def export_data_to_db(session, schema, run_id, metadata_json, srid, + lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, + lv_loads, + mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, + mv_loads, lines, mvlv_mapping): + # only for testing + # engine = create_engine('sqlite:///:memory:') + + # get the run_id from model_draft.ego_grid_ding0_versioning + # compare the run_id from table to the current run_id + + # oedb_versioning_query = session.query( + # schema.EgoGridDing0Versioning.run_id, + # schema.EgoGridDing0Versioning.description + # ).filter(schema.EgoGridDing0Versioning.run_id == run_id) + # + # oedb_versioning = pd.read_sql_query(oedb_versioning_query.statement, + # session.bind) + oedb_versioning = pd.DataFrame() + + if oedb_versioning.empty: + # if the run_id doesn't exist then + # create entry into ego_grid_ding0_versioning: + metadata_df = pd.DataFrame({'run_id': run_id, + 'description': metadata_json}, + index=[0]) + metadata_df.apply(lambda row: + session.add(schema.EgoGridDing0Versioning( + run_id=row['run_id'], + description=row['description'], + )) + , axis=1) + session.commit() + + export_network_to_oedb(session, lv_grid, 'lv_grid', srid) + export_network_to_oedb(session, lv_gen, 'lv_gen', srid) + export_network_to_oedb(session, lv_cd, 'lv_cd', srid) + export_network_to_oedb(session, lv_stations, 'lv_station', srid) + export_network_to_oedb(session, mvlv_trafos, 'mvlv_trafo', srid) + export_network_to_oedb(session, lv_loads, 'lv_load', srid) + export_network_to_oedb(session, mv_grid, 'mv_grid', srid) + export_network_to_oedb(session, mv_gen, 'mv_gen', srid) + export_network_to_oedb(session, mv_cb, 'mv_cb', srid) + export_network_to_oedb(session, mv_cd, 'mv_cd', srid) + export_network_to_oedb(session, mv_stations, 'mv_station', srid) + export_network_to_oedb(session, hvmv_trafos, 'hvmv_trafo', srid) + export_network_to_oedb(session, mv_loads, 'mv_load', srid) + export_network_to_oedb(session, lines, 'line', srid) + export_network_to_oedb(session, mvlv_mapping, 'mvlv_mapping', srid) + else: + raise KeyError("run_id already present! No tables are input!") + + +def create_ding0_db_tables(engine, schema,): + tables = [schema.EgoGridDing0Versioning, + schema.EgoGridDing0Line, + schema.EgoGridDing0LvBranchtee, + schema.EgoGridDing0LvGenerator, + schema.EgoGridDing0LvLoad, + schema.EgoGridDing0LvGrid, + schema.EgoGridDing0LvStation, + schema.EgoGridDing0MvlvTransformer, + schema.EgoGridDing0MvlvMapping, + schema.EgoGridDing0MvBranchtee, + schema.EgoGridDing0MvCircuitbreaker, + schema.EgoGridDing0MvGenerator, + schema.EgoGridDing0MvLoad, + schema.EgoGridDing0MvGrid, + schema.EgoGridDing0MvStation, + schema.EgoGridDing0HvmvTransformer] + + for tab in tables: + tab().__table__.create(bind=engine, checkfirst=True) + + +def drop_ding0_db_tables(engine, schema): + tables = [schema.EgoGridDing0Line, + schema.EgoGridDing0LvBranchtee, + schema.EgoGridDing0LvGenerator, + schema.EgoGridDing0LvLoad, + schema.EgoGridDing0LvGrid, + schema.EgoGridDing0LvStation, + schema.EgoGridDing0MvlvTransformer, + schema.EgoGridDing0MvlvMapping, + schema.EgoGridDing0MvBranchtee, + schema.EgoGridDing0MvCircuitbreaker, + schema.EgoGridDing0MvGenerator, + schema.EgoGridDing0MvLoad, + schema.EgoGridDing0MvGrid, + schema.EgoGridDing0MvStation, + schema.EgoGridDing0HvmvTransformer, + schema.EgoGridDing0Versioning] + + print("Please confirm that you would like to drop the following tables:") + for n, tab in enumerate(tables): + print("{: 3d}. {}".format(n, tab)) + + print("Please confirm with either of the choices below:\n" + + "- yes\n" + + "- no\n" + + "- the indexes to drop in the format 0, 2, 3, 5") + confirmation = input( + "Please type the choice completely as there is no default choice.") + if re.fullmatch('[Yy]es', confirmation): + for tab in tables: + tab().__table__.drop(bind=engine, checkfirst=True) + elif re.fullmatch('[Nn]o', confirmation): + print("Cancelled dropping of tables") + else: + try: + indlist = confirmation.split(',') + indlist = list(map(int, indlist)) + print("Please confirm deletion of the following tables:") + tablist = np.array(tables)[indlist].tolist() + for n, tab in enumerate(tablist): + print("{: 3d}. {}".format(n, tab)) + con2 = input("Please confirm with either of the choices below:\n" + + "- yes\n" + + "- no") + if re.fullmatch('[Yy]es', con2): + for tab in tablist: + tab().__table__.drop(bind=engine, checkfirst=True) + elif re.fullmatch('[Nn]o', con2): + print("Cancelled dropping of tables") + else: + print("The input is unclear, no action taken") + except ValueError: + print("Confirmation unclear, no action taken") + + +def db_tables_change_owner(engine, schema): + tables = [schema.EgoGridDing0Line, + schema.EgoGridDing0LvBranchtee, + schema.EgoGridDing0LvGenerator, + schema.EgoGridDing0LvLoad, + schema.EgoGridDing0LvGrid, + schema.EgoGridDing0LvStation, + schema.EgoGridDing0MvlvTransformer, + schema.EgoGridDing0MvlvMapping, + schema.EgoGridDing0MvBranchtee, + schema.EgoGridDing0MvCircuitbreaker, + schema.EgoGridDing0MvGenerator, + schema.EgoGridDing0MvLoad, + schema.EgoGridDing0MvGrid, + schema.EgoGridDing0MvStation, + schema.EgoGridDing0HvmvTransformer, + schema.EgoGridDing0Versioning] + + + def change_owner(engine, table, role): + r"""Gives access to database users/ groups + Parameters + ---------- + session : sqlalchemy session object + A valid connection to a database + table : sqlalchmy Table class definition + The database table + role : str + database role that access is granted to + """ + tablename = table.__table__.name + schema = table.__table__.schema + + grant_str = """ALTER TABLE {schema}.{table} + OWNER TO {role};""".format(schema=schema, table=tablename, + role=role) + + # engine.execute(grant_str) + engine.execution_options(autocommit=True).execute(grant_str) + + # engine.echo=True + + for tab in tables: + change_owner(engine, tab, 'oeuser') + + engine.close() + + +class EgoGridDing0Versioning(Base): + __tablename__ = 'ego_grid_ding0_versioning' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, unique=True, nullable=False) + description = Column(String(3000)) + + +class EgoGridDing0MvStation(Base): + __tablename__ = 'ego_grid_ding0_mv_station' + __table_args__ = {'schema': 'model_draft'} + + id = Column(BigInteger, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + geom = Column(Geometry('POINT', 4326)) + name = Column(String(100)) + + +class EgoGridDing0HvmvTransformer(Base): + __tablename__ = 'ego_grid_ding0_hvmv_transformer' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + geom = Column(Geometry('POINT', 4326)) + name = Column(String(100)) + voltage_op = Column(Float(10)) + s_nom = Column(Float(10)) + x = Column(Float(10)) + r = Column(Float(10)) + + +class EgoGridDing0Line(Base): + __tablename__ = 'ego_grid_ding0_line' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + edge_name = Column(String(100)) + grid_name = Column(String(100)) + node1 = Column(String(100)) + node2 = Column(String(100)) + type_kind = Column(String(20)) + type_name = Column(String(30)) + length = Column(Float(10)) + u_n = Column(Float(10)) + c = Column(Float(10)) + l = Column(Float(10)) + r = Column(Float(10)) + i_max_th = Column(Float(10)) + geom = Column(Geometry('LINESTRING', 4326)) + + +class EgoGridDing0LvBranchtee(Base): + __tablename__ = 'ego_grid_ding0_lv_branchtee' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + geom = Column(Geometry('POINT', 4326)) + name = Column(String(100)) + + +class EgoGridDing0LvGenerator(Base): + __tablename__ = 'ego_grid_ding0_lv_generator' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + la_id = Column(BigInteger) + name = Column(String(100)) + lv_grid_id = Column(BigInteger) + geom = Column(Geometry('POINT', 4326)) + type = Column(String(22)) + subtype = Column(String(22)) + v_level = Column(Integer) + nominal_capacity = Column(Float(10)) + weather_cell_id = Column(BigInteger) + is_aggregated = Column(Boolean) + + +class EgoGridDing0LvGrid(Base): + __tablename__ = 'ego_grid_ding0_lv_grid' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + name = Column(String(100)) + geom = Column(Geometry('MULTIPOLYGON', 4326)) #Todo: check if right srid? + population = Column(BigInteger) + voltage_nom = Column(Float(10)) #Todo: Check Datatypes + + +class EgoGridDing0LvLoad(Base): + __tablename__ = 'ego_grid_ding0_lv_load' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + name = Column(String(100)) + lv_grid_id = Column(Integer) + geom = Column(Geometry('POINT', 4326)) + consumption = Column(String(100)) + + +class EgoGridDing0MvBranchtee(Base): + __tablename__ = 'ego_grid_ding0_mv_branchtee' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + geom = Column(Geometry('POINT', 4326)) + name = Column(String(100)) + +class EgoGridDing0MvCircuitbreaker(Base): + __tablename__ = 'ego_grid_ding0_mv_circuitbreaker' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + geom = Column(Geometry('POINT', 4326)) + name = Column(String(100)) + status = Column(String(10)) + +class EgoGridDing0MvGenerator(Base): + __tablename__ = 'ego_grid_ding0_mv_generator' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + name = Column(String(100)) + geom = Column(Geometry('POINT', 4326)) + type = Column(String(22)) + subtype = Column(String(22)) + v_level = Column(Integer) + nominal_capacity = Column(Float(10)) + weather_cell_id = Column(BigInteger) + is_aggregated = Column(Boolean) + + +class EgoGridDing0MvGrid(Base): + __tablename__ = 'ego_grid_ding0_mv_grid' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + geom = Column(Geometry('MULTIPOLYGON', 4326)) #Todo: check if right srid? + name = Column(String(100)) + population = Column(BigInteger) + voltage_nom = Column(Float(10)) #Todo: Check Datatypes + + +class EgoGridDing0MvLoad(Base): + __tablename__ = 'ego_grid_ding0_mv_load' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + name = Column(String(100)) + geom = Column(Geometry('GEOMETRY', 4326)) + is_aggregated = Column(Boolean) + consumption = Column(String(100)) + + +class EgoGridDing0MvlvMapping(Base): + __tablename__ = 'ego_grid_ding0_mvlv_mapping' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + lv_grid_id = Column(BigInteger) + lv_grid_name = Column(String(100)) + mv_grid_id = Column(BigInteger) + mv_grid_name = Column(String(100)) + + +class EgoGridDing0LvStation(Base): + __tablename__ = 'ego_grid_ding0_lv_station' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + geom = Column(Geometry('POINT', 4326)) + name = Column(String(100)) + + +class EgoGridDing0MvlvTransformer(Base): + __tablename__ = 'ego_grid_ding0_mvlv_transformer' + __table_args__ = {'schema': 'model_draft'} + + id = Column(Integer, primary_key=True) + run_id = Column(BigInteger, + ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), + nullable=False) + id_db = Column(BigInteger) + geom = Column(Geometry('POINT', 4326)) + name = Column(String(100)) + voltage_op = Column(Float(10)) + s_nom = Column(Float(10)) + x = Column(Float(10)) + r = Column(Float(10)) diff --git a/ding0/tools/export.py b/ding0/tools/export.py new file mode 100644 index 00000000..41056fbe --- /dev/null +++ b/ding0/tools/export.py @@ -0,0 +1,611 @@ +"""This file is part of DINGO, the DIstribution Network GeneratOr. +DINGO is a tool to generate synthetic medium and low voltage power +distribution grids based on open data. + +It is developed in the project open_eGo: https://openegoproject.wordpress.com + +DING0 lives at github: https://github.com/openego/ding0/ +The documentation is available on RTD: http://ding0.readthedocs.io""" + +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "nesnoj, gplssm" + +import numpy as np +import pandas as pd + +import json + + +from ding0.core import NetworkDing0 +from ding0.core import GeneratorDing0, GeneratorFluctuatingDing0 +from ding0.core import LVCableDistributorDing0, MVCableDistributorDing0 +from ding0.core import MVStationDing0, LVStationDing0 +from ding0.core import CircuitBreakerDing0 +from ding0.core.network.loads import LVLoadDing0, MVLoadDing0 +from ding0.core import LVLoadAreaCentreDing0 + +from geoalchemy2.shape import from_shape +from shapely.geometry import Point, MultiPoint, MultiLineString, LineString +from shapely.geometry import shape, mapping + + +def export_network(nw, mode='', run_id=None): + """ + Export all nodes and lines of the network nw as DataFrames + + Parameters + ---------- + nw: :any:`list` of NetworkDing0 + The MV grid(s) to be studied + mode: str + If 'MV' export only medium voltage nodes and lines + If 'LV' export only low voltage nodes and lines + else, exports MV and LV nodes and lines + + Returns + ------- + pandas.DataFrame + nodes_df : Dataframe containing nodes and its attributes + pandas.DataFrame + lines_df : Dataframe containing lines and its attributes + """ + + # close circuit breakers + nw.control_circuit_breakers(mode='close') + # srid + srid = str(int(nw.config['geo']['srid'])) + ############################## + # check what to do + lv_info = True + mv_info = True + if mode == 'LV': + mv_info = False + if mode == 'MV': + lv_info = False + ############################## + # from datetime import datetime + if not run_id: + run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") + metadata_json = json.dumps(nw.metadata) + ############################## + ############################# + # go through the grid collecting info + lvgrid_idx = 0 + lv_grid_dict = {} + lvloads_idx = 0 + lv_loads_dict = {} + mvgrid_idx = 0 + mv_grid_dict = {} + mvloads_idx = 0 + mv_loads_dict = {} + mvgen_idx = 0 + mv_gen_dict = {} + mvcb_idx = 0 + mvcb_dict = {} + mvcd_idx = 0 + mv_cd_dict = {} + # mvstations_idx = 0 + mv_stations_dict = {} + mvtrafos_idx = 0 + hvmv_trafos_dict = {} + lvgen_idx = 0 + lv_gen_dict = {} + lvcd_idx = 0 + lv_cd_dict = {} + lvstations_idx = 0 + lv_stations_dict = {} + lvtrafos_idx = 0 + mvlv_trafos_dict = {} + areacenter_idx = 0 + areacenter_dict = {} + lines_idx = 0 + lines_dict = {} + LVMVmapping_idx = 0 + mvlv_mapping_dict = {} + + def aggregate_loads(la_center, aggr): + """Aggregate consumption in load area per sector + Parameters + ---------- + la_center: LVLoadAreaCentreDing0 + Load area center object from Ding0 + Returns + ------- + """ + for s in ['retail', 'industrial', 'agricultural', 'residential']: + if s not in aggr['load']: + aggr['load'][s] = {} + + for t in ['nominal', 'peak']: + if t not in aggr['load'][s]: + aggr['load'][s][t] = 0 + + aggr['load']['retail']['nominal'] += sum( + [_.sector_consumption_retail + for _ in la_center.lv_load_area._lv_grid_districts]) + aggr['load']['industrial']['nominal'] += sum( + [_.sector_consumption_industrial + for _ in la_center.lv_load_area._lv_grid_districts]) + aggr['load']['agricultural']['nominal'] += sum( + [_.sector_consumption_agricultural + for _ in la_center.lv_load_area._lv_grid_districts]) + aggr['load']['residential']['nominal'] += sum( + [_.sector_consumption_residential + for _ in la_center.lv_load_area._lv_grid_districts]) + + aggr['load']['retail']['peak'] += sum( + [_.peak_load_retail + for _ in la_center.lv_load_area._lv_grid_districts]) + aggr['load']['industrial']['peak'] += sum( + [_.peak_load_industrial + for _ in la_center.lv_load_area._lv_grid_districts]) + aggr['load']['agricultural']['peak'] += sum( + [_.peak_load_agricultural + for _ in la_center.lv_load_area._lv_grid_districts]) + aggr['load']['residential']['peak'] += sum( + [_.peak_load_residential + for _ in la_center.lv_load_area._lv_grid_districts]) + + return aggr + + for mv_district in nw.mv_grid_districts(): + from shapely.wkt import dumps as wkt_dumps + mv_grid_id = mv_district.mv_grid.id_db + mv_grid_name = '_'.join( + [str(mv_district.mv_grid.__class__.__name__), 'MV', str(mv_grid_id), + str(mv_district.mv_grid.id_db)]) + + if mv_info: + lv_grid_id = 0 + + # MV-grid + # ToDo: geom <- Polygon + mvgrid_idx += 1 + mv_grid_dict[mvgrid_idx] = { + 'id': mv_grid_id, + 'name': mv_grid_name, + 'geom': wkt_dumps(mv_district.geo_data), + 'population': + sum([_.zensus_sum + for _ in + mv_district._lv_load_areas + if not np.isnan(_.zensus_sum)]), + 'voltage_nom': mv_district.mv_grid.v_level, # in kV + 'run_id': run_id + } + + # MV station + mv_station = mv_district.mv_grid._station + mv_station_name = '_'.join( + ['MVStationDing0', 'MV', str(mv_station.id_db), + str(mv_station.id_db)]) + mv_stations_dict[0] = { + 'id': mv_district.mv_grid.id_db, + 'name': mv_station_name, + 'geom': mv_station.geo_data, + 'run_id': run_id} + + # Trafos MV + for t in mv_station.transformers(): + mvtrafos_idx += 1 + hvmv_trafos_dict[mvtrafos_idx] = { + 'id': mv_station.id_db, + 'geom': mv_station.geo_data, + 'name': '_'.join( + ['MVTransformerDing0', 'MV', str(mv_station.id_db), + str(mv_station.id_db)]), + 'voltage_op': t.v_level, + 'S_nom': t.s_max_a, + 'X': t.x, + 'R': t.r, + 'run_id': run_id, + } + + # MV grid components + for node in mv_district.mv_grid.graph_nodes_sorted(): + geom = wkt_dumps(node.geo_data) + + # LVStation + if isinstance(node, LVStationDing0): + if not node.lv_load_area.is_aggregated: + lvstations_idx += 1 + lv_grid_name = '_'.join( + ['LVGridDing0', 'LV', str(node.id_db), + str(node.id_db)]) + lv_stations_dict[lvstations_idx] = { + 'id': node.id_db, + 'name': '_'.join([node.__class__.__name__, + 'MV', str(mv_grid_id), + str(node.id_db)]), + 'geom': geom, + 'run_id': run_id, + } + + # LV-MV mapping + LVMVmapping_idx += 1 + mvlv_mapping_dict[LVMVmapping_idx] = { + 'mv_grid_id': mv_grid_id, + 'mv_grid_name': mv_grid_name, + 'lv_grid_id': node.id_db, + 'lv_grid_name': lv_grid_name, + 'run_id': run_id, + } + + # Trafos LV + for t in node.transformers(): + lvtrafos_idx += 1 + mvlv_trafos_dict[lvtrafos_idx] = { + 'id': node.id_db, + 'geom': geom, + 'name': '_'.join(['LVTransformerDing0', 'LV', + str(node.id_db), + str(node.id_db)]), + 'voltage_op': t.v_level, + 'S_nom': t.s_max_a, + 'X': t.x, + 'R': t.r, + 'run_id': run_id, + } + + # MVGenerator + elif isinstance(node, (GeneratorDing0, GeneratorFluctuatingDing0)): + if node.subtype == None: + subtype = 'other' + else: + subtype = node.subtype + if isinstance(node, GeneratorFluctuatingDing0): + type = node.type + mvgen_idx += 1 + mv_gen_dict[mvgen_idx] = { + 'id': node.id_db, + 'name': '_'.join(['GeneratorFluctuatingDing0', 'MV', + str(mv_grid_id), + str(node.id_db)]), + 'geom': geom, + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'is_aggregated': False, + 'weather_cell_id': node.weather_cell_id + } + else: + type = node.type + mvgen_idx += 1 + mv_gen_dict[mvgen_idx] = { + 'id': node.id_db, + 'name': '_'.join( + ['GeneratorDing0', 'MV', str(mv_grid_id), + str(node.id_db)]), + 'geom': geom, + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'is_aggregated': False, + 'weather_cell_id': np.nan + } + + # MVBranchTees + elif isinstance(node, MVCableDistributorDing0): + mvcd_idx += 1 + mv_cd_dict[mvcd_idx] = { + 'id': node.id_db, + 'name': '_'.join( + [str(node.__class__.__name__), 'MV', + str(mv_grid_id), str(node.id_db)]), + 'geom': geom, + 'run_id': run_id, + } + + # LoadAreaCentre + elif isinstance(node, LVLoadAreaCentreDing0): + + # type = 'Load area center of aggregated load area' + + areacenter_idx += 1 + aggr_lines = 0 + + aggr = {'generation': {}, 'load': {}, 'aggregates': []} + + # Determine aggregated load in MV grid + # -> Implement once loads in Ding0 MV grids exist + + # Determine aggregated load in LV grid + aggr = aggregate_loads(node, aggr) + + # Collect metadata of aggregated load areas + aggr['aggregates'] = { + 'population': node.lv_load_area.zensus_sum, + 'geom': node.lv_load_area.geo_area} + aggr_line_type = nw._static_data['MV_cables'].iloc[ + nw._static_data['MV_cables']['I_max_th'].idxmax()] + geom = wkt_dumps(node.geo_data) + + for aggr_node in aggr: + if aggr_node == 'generation': + pass + + elif aggr_node == 'load': + for type in aggr['load']: + mvloads_idx += 1 + aggr_line_id = 100 * node.lv_load_area.id_db + mvloads_idx + 1 + mv_aggr_load_name = '_'.join( + ['Load_aggregated', str(type), + repr(mv_district.mv_grid), + # str(node.lv_load_area.id_db)]) + str(aggr_line_id)]) + mv_loads_dict[mvloads_idx] = { + # Exception: aggregated loads get a string as id + 'id': aggr_line_id, #node.lv_load_area.id_db, #mv_aggr_load_name, + 'name': mv_aggr_load_name, + 'geom': geom, + 'consumption': json.dumps( + {type: aggr['load'][type]['nominal']}), + 'is_aggregated': True, + 'run_id': run_id, + } + + lines_idx += 1 + aggr_lines += 1 + edge_name = '_'.join( + ['line_aggr_load_la', + str(node.lv_load_area.id_db), str(type), + # str(node.lv_load_area.id_db)]) + str(aggr_line_id)]) + lines_dict[lines_idx] = { + 'id': aggr_line_id, #node.lv_load_area.id_db, + 'edge_name': edge_name, + 'grid_name': mv_grid_name, + 'type_name': aggr_line_type.name, + 'type_kind': 'cable', + 'length': 1e-3, # in km + 'U_n': aggr_line_type.U_n, + 'I_max_th': aggr_line_type.I_max_th, + 'R': aggr_line_type.R, + 'L': aggr_line_type.L, + 'C': aggr_line_type.C, + 'node1': mv_aggr_load_name, + 'node2': mv_station_name, + 'run_id': run_id, + 'geom': from_shape(LineString([mv_station.geo_data, + mv_station.geo_data]), + srid=srid) + } + + # TODO: eventually remove export of DisconnectingPoints from export + # DisconnectingPoints + elif isinstance(node, CircuitBreakerDing0): + mvcb_idx += 1 + mvcb_dict[mvcb_idx] = { + 'id': node.id_db, + 'name': '_'.join([str(node.__class__.__name__), 'MV', + str(mv_grid_id), str(node.id_db)]), + 'geom': geom, + 'status': node.status, + 'run_id': run_id, + } + else: + type = 'Unknown' + + # MVedges + for branch in mv_district.mv_grid.graph_edges(): + # geom = wkt_dumps(node.geo_data) + geom = from_shape(LineString([branch['adj_nodes'][0].geo_data, + branch['adj_nodes'][1].geo_data]), + srid=srid) + if not any([isinstance(branch['adj_nodes'][0], + LVLoadAreaCentreDing0), + isinstance(branch['adj_nodes'][1], + LVLoadAreaCentreDing0)]): + lines_idx += 1 + lines_dict[lines_idx] = { + 'id': branch['branch'].id_db, + 'edge_name': '_'.join( + [branch['branch'].__class__.__name__, + str(branch['branch'].id_db)]), + 'grid_name': mv_grid_name, + 'type_name': branch['branch'].type['name'], + 'type_kind': branch['branch'].kind, + 'length': branch['branch'].length / 1e3, + 'U_n': branch['branch'].type['U_n'], + 'I_max_th': branch['branch'].type['I_max_th'], + 'R': branch['branch'].type['R'], + 'L': branch['branch'].type['L'], + 'C': branch['branch'].type['C'], + 'node1': '_'.join( + [str(branch['adj_nodes'][0].__class__.__name__), + 'MV', str(mv_grid_id), + str(branch['adj_nodes'][0].id_db)]), + 'node2': '_'.join( + [str(branch['adj_nodes'][1].__class__.__name__), + 'MV', str(mv_grid_id), + str(branch['adj_nodes'][1].id_db)]), + 'run_id': run_id, + 'geom': from_shape(LineString([branch['adj_nodes'][0].geo_data, + branch['adj_nodes'][1].geo_data]), + srid=srid), + } + + if lv_info: + for LA in mv_district.lv_load_areas(): + for lv_district in LA.lv_grid_districts(): + + if not lv_district.lv_grid.grid_district.lv_load_area.is_aggregated: + lv_grid_id = lv_district.lv_grid.id_db + lv_grid_name = '_'.join( + [str(lv_district.lv_grid.__class__.__name__), 'LV', + str(lv_district.lv_grid.id_db), + str(lv_district.lv_grid.id_db)]) + + lvgrid_idx += 1 + lv_grid_dict[lvgrid_idx] = { + 'id': lv_district.lv_grid.id_db, + 'name': lv_grid_name, + 'geom': wkt_dumps(lv_district.geo_data), + 'population': lv_district.population, + 'voltage_nom': lv_district.lv_grid.v_level / 1e3, + 'run_id': run_id + } + + # geom = from_shape(Point(lv_district.lv_grid.station().geo_data), srid=srid) + # geom = wkt_dumps(lv_district.geo_data)# lv_grid.station() #ding0_lv_grid.grid_district.geo_data + for node in lv_district.lv_grid.graph_nodes_sorted(): + # geom = wkt_dumps(node.geo_data) + + # LVGenerator + if isinstance(node, (GeneratorDing0, GeneratorFluctuatingDing0)): + if node.subtype == None: + subtype = 'other' + else: + subtype = node.subtype + if isinstance(node, GeneratorFluctuatingDing0): + type = node.type + lvgen_idx += 1 + lv_gen_dict[lvgen_idx] = { + 'id': node.id_db, + 'la_id': LA.id_db, + 'name': '_'.join( + ['GeneratorFluctuatingDing0', 'LV', + str(lv_grid_id), + str(node.id_db)]), + 'lv_grid_id': lv_grid_id, + 'geom': wkt_dumps(node.geo_data), + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'is_aggregated': node.lv_load_area.is_aggregated, + 'weather_cell_id': node.weather_cell_id, + } + else: + type = node.type + lvgen_idx += 1 + lv_gen_dict[lvgen_idx] = { + 'id': node.id_db, + 'name': '_'.join( + ['GeneratorDing0', 'LV', + str(lv_grid_id), + str(node.id_db)]), + 'la_id': LA.id_db, + 'lv_grid_id': lv_grid_id, + 'geom': wkt_dumps(node.geo_data), + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'is_aggregated': node.lv_load_area.is_aggregated, + 'weather_cell_id': np.nan + } + + # LVcd + elif isinstance(node, LVCableDistributorDing0): + if not node.grid.grid_district.lv_load_area.is_aggregated: + lvcd_idx += 1 + lv_cd_dict[lvcd_idx] = { + 'name': '_'.join( + [str(node.__class__.__name__), 'LV', + str(lv_grid_id), str(node.id_db)]), + 'id': node.id_db, + 'lv_grid_id': lv_grid_id, + 'geom': None, + # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? + 'run_id': run_id, + } + + # LVload + elif isinstance(node, LVLoadDing0): + if not node.grid.grid_district.lv_load_area.is_aggregated: + lvloads_idx += 1 + lv_loads_dict[lvloads_idx] = { + 'id': node.id_db, + 'name': '_'.join( + [str(node.__class__.__name__), 'LV', + str(lv_grid_id), str(node.id_db)]), + 'lv_grid_id': lv_grid_id, + 'geom': None, + # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? + 'consumption': json.dumps(node.consumption), + 'run_id': run_id, + } + + # LVedges + for branch in lv_district.lv_grid.graph_edges(): + if not branch['branch'].connects_aggregated: + if not any([isinstance(branch['adj_nodes'][0], + LVLoadAreaCentreDing0), + isinstance(branch['adj_nodes'][1], + LVLoadAreaCentreDing0)]): + lines_idx += 1 + lines_dict[lines_idx] = { + 'id': branch['branch'].id_db, + 'edge_name': '_'.join( + [branch.__class__.__name__, + str(branch['branch'].id_db)]), + 'grid_name': lv_grid_name, + 'type_name': branch[ + 'branch'].type.to_frame().columns[0], + 'type_kind': branch['branch'].kind, + 'length': branch['branch'].length / 1e3, + # length in km + 'U_n': branch['branch'].type['U_n'] / 1e3, + # U_n in kV + 'I_max_th': branch['branch'].type[ + 'I_max_th'], + 'R': branch['branch'].type['R'], + 'L': branch['branch'].type['L'], + 'C': branch['branch'].type['C'], + 'node1': '_'.join( + [str(branch['adj_nodes'][ + 0].__class__.__name__), 'LV', + str(lv_grid_id), + str(branch['adj_nodes'][0].id_db)]) + if not isinstance(branch['adj_nodes'][0], + LVStationDing0) else '_'.join( + [str(branch['adj_nodes'][ + 0].__class__.__name__), 'MV', + str(mv_grid_id), + str(branch['adj_nodes'][0].id_db)]), + 'node2': '_'.join( + [str(branch['adj_nodes'][ + 1].__class__.__name__), 'LV', + str(lv_grid_id), + str(branch['adj_nodes'][1].id_db)]) + if not isinstance(branch['adj_nodes'][1], + LVStationDing0) else '_'.join( + [str(branch['adj_nodes'][ + 1].__class__.__name__), 'MV', + str(mv_grid_id), + str(branch['adj_nodes'][1].id_db)]), + 'run_id': run_id, + 'geom': None + } + + lv_grid = pd.DataFrame.from_dict(lv_grid_dict, orient='index') + lv_gen = pd.DataFrame.from_dict(lv_gen_dict, orient='index') + lv_cd = pd.DataFrame.from_dict(lv_cd_dict, orient='index') + lv_stations = pd.DataFrame.from_dict(lv_stations_dict, orient='index') + mvlv_trafos = pd.DataFrame.from_dict(mvlv_trafos_dict, orient='index') + lv_loads = pd.DataFrame.from_dict(lv_loads_dict, orient='index') + mv_grid = pd.DataFrame.from_dict(mv_grid_dict, orient='index') + mv_gen = pd.DataFrame.from_dict(mv_gen_dict, orient='index') + mv_cb = pd.DataFrame.from_dict(mvcb_dict, orient='index') + mv_cd = pd.DataFrame.from_dict(mv_cd_dict, orient='index') + mv_stations = pd.DataFrame.from_dict(mv_stations_dict, orient='index') + hvmv_trafos = pd.DataFrame.from_dict(hvmv_trafos_dict, orient='index') + mv_loads = pd.DataFrame.from_dict(mv_loads_dict, orient='index') + lines = pd.DataFrame.from_dict(lines_dict, orient='index') + mvlv_mapping = pd.DataFrame.from_dict(mvlv_mapping_dict, orient='index') + + lines = lines[sorted(lines.columns.tolist())] + + return run_id, metadata_json, \ + lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ + mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ + lines, mvlv_mapping \ No newline at end of file diff --git a/ding0/tools/file_export.py b/ding0/tools/file_export.py new file mode 100644 index 00000000..7461d665 --- /dev/null +++ b/ding0/tools/file_export.py @@ -0,0 +1,53 @@ +"""This file is part of DINGO, the DIstribution Network GeneratOr. +DINGO is a tool to generate synthetic medium and low voltage power +distribution grids based on open data. + +It is developed in the project open_eGo: https://openegoproject.wordpress.com + +DING0 lives at github: https://github.com/openego/ding0/ +The documentation is available on RTD: http://ding0.readthedocs.io""" + +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "nesnoj, gplssm" + +import os + +import json + + +def export_data_tocsv(path, run_id, metadata_json, + lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, + lv_loads, + mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, + mv_loads, + lines, mvlv_mapping, csv_sep=','): + # make directory with run_id if it doesn't exist + os.makedirs(path, exist_ok=True) + + # put a text file with the metadata + metadata = json.loads(metadata_json) + with open(os.path.join(path, 'metadata.json'), 'w') as metafile: + json.dump(metadata, metafile) + + # Exports data to csv + def export_network_tocsv(path, table, tablename): + return table.to_csv(os.path.join(path, tablename + '.csv'), sep=csv_sep) + + export_network_tocsv(path, lv_grid, 'lv_grid') + export_network_tocsv(path, lv_gen, 'lv_generator') + export_network_tocsv(path, lv_cd, 'lv_branchtee') + export_network_tocsv(path, lv_stations, 'lv_station') + export_network_tocsv(path, mvlv_trafos, 'mvlv_transformer') + export_network_tocsv(path, lv_loads, 'lv_load') + export_network_tocsv(path, mv_grid, 'mv_grid') + export_network_tocsv(path, mv_gen, 'mv_generator') + export_network_tocsv(path, mv_cd, 'mv_branchtee') + export_network_tocsv(path, mv_stations, 'mv_station') + export_network_tocsv(path, hvmv_trafos, 'hvmv_transformer') + export_network_tocsv(path, mv_cb, 'mv_circuitbreaker') + export_network_tocsv(path, mv_loads, 'mv_load') + export_network_tocsv(path, lines, 'line') + export_network_tocsv(path, mvlv_mapping, 'mvlv_mapping') + # export_network_tocsv(path, areacenter, 'areacenter') \ No newline at end of file diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 363090bd..87f99cdb 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -1679,1027 +1679,6 @@ def parallel_running_stats(districts_list, return mv_stats, lv_stats, mv_crit_nodes, mv_crit_edges, lv_crit_nodes, lv_crit_edges -######################################################## -def export_network(nw, mode='', run_id=None): - """ - Export all nodes and lines of the network nw as DataFrames - - Parameters - ---------- - nw: :any:`list` of NetworkDing0 - The MV grid(s) to be studied - mode: str - If 'MV' export only medium voltage nodes and lines - If 'LV' export only low voltage nodes and lines - else, exports MV and LV nodes and lines - - Returns - ------- - pandas.DataFrame - nodes_df : Dataframe containing nodes and its attributes - pandas.DataFrame - lines_df : Dataframe containing lines and its attributes - """ - - # close circuit breakers - nw.control_circuit_breakers(mode='close') - # srid - srid = str(int(nw.config['geo']['srid'])) - ############################## - # check what to do - lv_info = True - mv_info = True - if mode == 'LV': - mv_info = False - if mode == 'MV': - lv_info = False - ############################## - # from datetime import datetime - if not run_id: - run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") - metadata_json = json.dumps(nw.metadata) - ############################## - ############################# - # go through the grid collecting info - lvgrid_idx = 0 - lv_grid_dict = {} - lvloads_idx = 0 - lv_loads_dict = {} - mvgrid_idx = 0 - mv_grid_dict = {} - mvloads_idx = 0 - mv_loads_dict = {} - mvgen_idx = 0 - mv_gen_dict = {} - mvcb_idx = 0 - mvcb_dict = {} - mvcd_idx = 0 - mv_cd_dict = {} - # mvstations_idx = 0 - mv_stations_dict = {} - mvtrafos_idx = 0 - hvmv_trafos_dict = {} - lvgen_idx = 0 - lv_gen_dict = {} - lvcd_idx = 0 - lv_cd_dict = {} - lvstations_idx = 0 - lv_stations_dict = {} - lvtrafos_idx = 0 - mvlv_trafos_dict = {} - areacenter_idx = 0 - areacenter_dict = {} - lines_idx = 0 - lines_dict = {} - LVMVmapping_idx = 0 - mvlv_mapping_dict = {} - - def aggregate_loads(la_center, aggr): - """Aggregate consumption in load area per sector - Parameters - ---------- - la_center: LVLoadAreaCentreDing0 - Load area center object from Ding0 - Returns - ------- - """ - for s in ['retail', 'industrial', 'agricultural', 'residential']: - if s not in aggr['load']: - aggr['load'][s] = {} - - for t in ['nominal', 'peak']: - if t not in aggr['load'][s]: - aggr['load'][s][t] = 0 - - aggr['load']['retail']['nominal'] += sum( - [_.sector_consumption_retail - for _ in la_center.lv_load_area._lv_grid_districts]) - aggr['load']['industrial']['nominal'] += sum( - [_.sector_consumption_industrial - for _ in la_center.lv_load_area._lv_grid_districts]) - aggr['load']['agricultural']['nominal'] += sum( - [_.sector_consumption_agricultural - for _ in la_center.lv_load_area._lv_grid_districts]) - aggr['load']['residential']['nominal'] += sum( - [_.sector_consumption_residential - for _ in la_center.lv_load_area._lv_grid_districts]) - - aggr['load']['retail']['peak'] += sum( - [_.peak_load_retail - for _ in la_center.lv_load_area._lv_grid_districts]) - aggr['load']['industrial']['peak'] += sum( - [_.peak_load_industrial - for _ in la_center.lv_load_area._lv_grid_districts]) - aggr['load']['agricultural']['peak'] += sum( - [_.peak_load_agricultural - for _ in la_center.lv_load_area._lv_grid_districts]) - aggr['load']['residential']['peak'] += sum( - [_.peak_load_residential - for _ in la_center.lv_load_area._lv_grid_districts]) - - return aggr - - for mv_district in nw.mv_grid_districts(): - from shapely.wkt import dumps as wkt_dumps - mv_grid_id = mv_district.mv_grid.id_db - mv_grid_name = '_'.join( - [str(mv_district.mv_grid.__class__.__name__), 'MV', str(mv_grid_id), - str(mv_district.mv_grid.id_db)]) - - if mv_info: - lv_grid_id = 0 - - # MV-grid - # ToDo: geom <- Polygon - mvgrid_idx += 1 - mv_grid_dict[mvgrid_idx] = { - 'id': mv_grid_id, - 'name': mv_grid_name, - 'geom': wkt_dumps(mv_district.geo_data), - 'population': - sum([_.zensus_sum - for _ in - mv_district._lv_load_areas - if not np.isnan(_.zensus_sum)]), - 'voltage_nom': mv_district.mv_grid.v_level, # in kV - 'run_id': run_id - } - - # MV station - mv_station = mv_district.mv_grid._station - mv_station_name = '_'.join( - ['MVStationDing0', 'MV', str(mv_station.id_db), - str(mv_station.id_db)]) - mv_stations_dict[0] = { - 'id': mv_district.mv_grid.id_db, - 'name': mv_station_name, - 'geom': mv_station.geo_data, - 'run_id': run_id} - - # Trafos MV - for t in mv_station.transformers(): - mvtrafos_idx += 1 - hvmv_trafos_dict[mvtrafos_idx] = { - 'id': mv_station.id_db, - 'geom': mv_station.geo_data, - 'name': '_'.join( - ['MVTransformerDing0', 'MV', str(mv_station.id_db), - str(mv_station.id_db)]), - 'voltage_op': t.v_level, - 'S_nom': t.s_max_a, - 'X': t.x, - 'R': t.r, - 'run_id': run_id, - } - - # MV grid components - for node in mv_district.mv_grid.graph_nodes_sorted(): - geom = wkt_dumps(node.geo_data) - - # LVStation - if isinstance(node, LVStationDing0): - if not node.lv_load_area.is_aggregated: - lvstations_idx += 1 - lv_grid_name = '_'.join( - ['LVGridDing0', 'LV', str(node.id_db), - str(node.id_db)]) - lv_stations_dict[lvstations_idx] = { - 'id': node.id_db, - 'name': '_'.join([node.__class__.__name__, - 'MV', str(mv_grid_id), - str(node.id_db)]), - 'geom': geom, - 'run_id': run_id, - } - - # LV-MV mapping - LVMVmapping_idx += 1 - mvlv_mapping_dict[LVMVmapping_idx] = { - 'mv_grid_id': mv_grid_id, - 'mv_grid_name': mv_grid_name, - 'lv_grid_id': node.id_db, - 'lv_grid_name': lv_grid_name, - 'run_id': run_id, - } - - # Trafos LV - for t in node.transformers(): - lvtrafos_idx += 1 - mvlv_trafos_dict[lvtrafos_idx] = { - 'id': node.id_db, - 'geom': geom, - 'name': '_'.join(['LVTransformerDing0', 'LV', - str(node.id_db), - str(node.id_db)]), - 'voltage_op': t.v_level, - 'S_nom': t.s_max_a, - 'X': t.x, - 'R': t.r, - 'run_id': run_id, - } - - # MVGenerator - elif isinstance(node, (GeneratorDing0, GeneratorFluctuatingDing0)): - if node.subtype == None: - subtype = 'other' - else: - subtype = node.subtype - if isinstance(node, GeneratorFluctuatingDing0): - type = node.type - mvgen_idx += 1 - mv_gen_dict[mvgen_idx] = { - 'id': node.id_db, - 'name': '_'.join(['GeneratorFluctuatingDing0', 'MV', - str(mv_grid_id), - str(node.id_db)]), - 'geom': geom, - 'type': type, - 'subtype': subtype, - 'v_level': node.v_level, - 'nominal_capacity': node.capacity, - 'run_id': run_id, - 'is_aggregated': False, - 'weather_cell_id': node.weather_cell_id - } - else: - type = node.type - mvgen_idx += 1 - mv_gen_dict[mvgen_idx] = { - 'id': node.id_db, - 'name': '_'.join( - ['GeneratorDing0', 'MV', str(mv_grid_id), - str(node.id_db)]), - 'geom': geom, - 'type': type, - 'subtype': subtype, - 'v_level': node.v_level, - 'nominal_capacity': node.capacity, - 'run_id': run_id, - 'is_aggregated': False, - 'weather_cell_id': np.nan - } - - # MVBranchTees - elif isinstance(node, MVCableDistributorDing0): - mvcd_idx += 1 - mv_cd_dict[mvcd_idx] = { - 'id': node.id_db, - 'name': '_'.join( - [str(node.__class__.__name__), 'MV', - str(mv_grid_id), str(node.id_db)]), - 'geom': geom, - 'run_id': run_id, - } - - # LoadAreaCentre - elif isinstance(node, LVLoadAreaCentreDing0): - - # type = 'Load area center of aggregated load area' - - areacenter_idx += 1 - aggr_lines = 0 - - aggr = {'generation': {}, 'load': {}, 'aggregates': []} - - # Determine aggregated load in MV grid - # -> Implement once loads in Ding0 MV grids exist - - # Determine aggregated load in LV grid - aggr = aggregate_loads(node, aggr) - - # Collect metadata of aggregated load areas - aggr['aggregates'] = { - 'population': node.lv_load_area.zensus_sum, - 'geom': node.lv_load_area.geo_area} - aggr_line_type = nw._static_data['MV_cables'].iloc[ - nw._static_data['MV_cables']['I_max_th'].idxmax()] - geom = wkt_dumps(node.geo_data) - - for aggr_node in aggr: - if aggr_node == 'generation': - pass - - elif aggr_node == 'load': - for type in aggr['load']: - mvloads_idx += 1 - aggr_line_id = 100 * node.lv_load_area.id_db + mvloads_idx + 1 - mv_aggr_load_name = '_'.join( - ['Load_aggregated', str(type), - repr(mv_district.mv_grid), - # str(node.lv_load_area.id_db)]) - str(aggr_line_id)]) - mv_loads_dict[mvloads_idx] = { - # Exception: aggregated loads get a string as id - 'id': aggr_line_id, #node.lv_load_area.id_db, #mv_aggr_load_name, - 'name': mv_aggr_load_name, - 'geom': geom, - 'consumption': json.dumps( - {type: aggr['load'][type]['nominal']}), - 'is_aggregated': True, - 'run_id': run_id, - } - - lines_idx += 1 - aggr_lines += 1 - edge_name = '_'.join( - ['line_aggr_load_la', - str(node.lv_load_area.id_db), str(type), - # str(node.lv_load_area.id_db)]) - str(aggr_line_id)]) - lines_dict[lines_idx] = { - 'id': aggr_line_id, #node.lv_load_area.id_db, - 'edge_name': edge_name, - 'grid_name': mv_grid_name, - 'type_name': aggr_line_type.name, - 'type_kind': 'cable', - 'length': 1e-3, # in km - 'U_n': aggr_line_type.U_n, - 'I_max_th': aggr_line_type.I_max_th, - 'R': aggr_line_type.R, - 'L': aggr_line_type.L, - 'C': aggr_line_type.C, - 'node1': mv_aggr_load_name, - 'node2': mv_station_name, - 'run_id': run_id, - 'geom': from_shape(LineString([mv_station.geo_data, - mv_station.geo_data]), - srid=srid) - } - - # TODO: eventually remove export of DisconnectingPoints from export - # DisconnectingPoints - elif isinstance(node, CircuitBreakerDing0): - mvcb_idx += 1 - mvcb_dict[mvcb_idx] = { - 'id': node.id_db, - 'name': '_'.join([str(node.__class__.__name__), 'MV', - str(mv_grid_id), str(node.id_db)]), - 'geom': geom, - 'status': node.status, - 'run_id': run_id, - } - else: - type = 'Unknown' - - # MVedges - for branch in mv_district.mv_grid.graph_edges(): - # geom = wkt_dumps(node.geo_data) - geom = from_shape(LineString([branch['adj_nodes'][0].geo_data, - branch['adj_nodes'][1].geo_data]), - srid=srid) - if not any([isinstance(branch['adj_nodes'][0], - LVLoadAreaCentreDing0), - isinstance(branch['adj_nodes'][1], - LVLoadAreaCentreDing0)]): - lines_idx += 1 - lines_dict[lines_idx] = { - 'id': branch['branch'].id_db, - 'edge_name': '_'.join( - [branch['branch'].__class__.__name__, - str(branch['branch'].id_db)]), - 'grid_name': mv_grid_name, - 'type_name': branch['branch'].type['name'], - 'type_kind': branch['branch'].kind, - 'length': branch['branch'].length / 1e3, - 'U_n': branch['branch'].type['U_n'], - 'I_max_th': branch['branch'].type['I_max_th'], - 'R': branch['branch'].type['R'], - 'L': branch['branch'].type['L'], - 'C': branch['branch'].type['C'], - 'node1': '_'.join( - [str(branch['adj_nodes'][0].__class__.__name__), - 'MV', str(mv_grid_id), - str(branch['adj_nodes'][0].id_db)]), - 'node2': '_'.join( - [str(branch['adj_nodes'][1].__class__.__name__), - 'MV', str(mv_grid_id), - str(branch['adj_nodes'][1].id_db)]), - 'run_id': run_id, - 'geom': from_shape(LineString([branch['adj_nodes'][0].geo_data, - branch['adj_nodes'][1].geo_data]), - srid=srid), - } - - if lv_info: - for LA in mv_district.lv_load_areas(): - for lv_district in LA.lv_grid_districts(): - - if not lv_district.lv_grid.grid_district.lv_load_area.is_aggregated: - lv_grid_id = lv_district.lv_grid.id_db - lv_grid_name = '_'.join( - [str(lv_district.lv_grid.__class__.__name__), 'LV', - str(lv_district.lv_grid.id_db), - str(lv_district.lv_grid.id_db)]) - - lvgrid_idx += 1 - lv_grid_dict[lvgrid_idx] = { - 'id': lv_district.lv_grid.id_db, - 'name': lv_grid_name, - 'geom': wkt_dumps(lv_district.geo_data), - 'population': lv_district.population, - 'voltage_nom': lv_district.lv_grid.v_level / 1e3, - 'run_id': run_id - } - - # geom = from_shape(Point(lv_district.lv_grid.station().geo_data), srid=srid) - # geom = wkt_dumps(lv_district.geo_data)# lv_grid.station() #ding0_lv_grid.grid_district.geo_data - for node in lv_district.lv_grid.graph_nodes_sorted(): - # geom = wkt_dumps(node.geo_data) - - # LVGenerator - if isinstance(node, (GeneratorDing0, GeneratorFluctuatingDing0)): - if node.subtype == None: - subtype = 'other' - else: - subtype = node.subtype - if isinstance(node, GeneratorFluctuatingDing0): - type = node.type - lvgen_idx += 1 - lv_gen_dict[lvgen_idx] = { - 'id': node.id_db, - 'la_id': LA.id_db, - 'name': '_'.join( - ['GeneratorFluctuatingDing0', 'LV', - str(lv_grid_id), - str(node.id_db)]), - 'lv_grid_id': lv_grid_id, - 'geom': wkt_dumps(node.geo_data), - 'type': type, - 'subtype': subtype, - 'v_level': node.v_level, - 'nominal_capacity': node.capacity, - 'run_id': run_id, - 'is_aggregated': node.lv_load_area.is_aggregated, - 'weather_cell_id': node.weather_cell_id, - } - else: - type = node.type - lvgen_idx += 1 - lv_gen_dict[lvgen_idx] = { - 'id': node.id_db, - 'name': '_'.join( - ['GeneratorDing0', 'LV', - str(lv_grid_id), - str(node.id_db)]), - 'la_id': LA.id_db, - 'lv_grid_id': lv_grid_id, - 'geom': wkt_dumps(node.geo_data), - 'type': type, - 'subtype': subtype, - 'v_level': node.v_level, - 'nominal_capacity': node.capacity, - 'run_id': run_id, - 'is_aggregated': node.lv_load_area.is_aggregated, - 'weather_cell_id': np.nan - } - - # LVcd - elif isinstance(node, LVCableDistributorDing0): - if not node.grid.grid_district.lv_load_area.is_aggregated: - lvcd_idx += 1 - lv_cd_dict[lvcd_idx] = { - 'name': '_'.join( - [str(node.__class__.__name__), 'LV', - str(lv_grid_id), str(node.id_db)]), - 'id': node.id_db, - 'lv_grid_id': lv_grid_id, - 'geom': None, - # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? - 'run_id': run_id, - } - - # LVload - elif isinstance(node, LVLoadDing0): - if not node.grid.grid_district.lv_load_area.is_aggregated: - lvloads_idx += 1 - lv_loads_dict[lvloads_idx] = { - 'id': node.id_db, - 'name': '_'.join( - [str(node.__class__.__name__), 'LV', - str(lv_grid_id), str(node.id_db)]), - 'lv_grid_id': lv_grid_id, - 'geom': None, - # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? - 'consumption': json.dumps(node.consumption), - 'run_id': run_id, - } - - # LVedges - for branch in lv_district.lv_grid.graph_edges(): - if not branch['branch'].connects_aggregated: - if not any([isinstance(branch['adj_nodes'][0], - LVLoadAreaCentreDing0), - isinstance(branch['adj_nodes'][1], - LVLoadAreaCentreDing0)]): - lines_idx += 1 - lines_dict[lines_idx] = { - 'id': branch['branch'].id_db, - 'edge_name': '_'.join( - [branch.__class__.__name__, - str(branch['branch'].id_db)]), - 'grid_name': lv_grid_name, - 'type_name': branch[ - 'branch'].type.to_frame().columns[0], - 'type_kind': branch['branch'].kind, - 'length': branch['branch'].length / 1e3, - # length in km - 'U_n': branch['branch'].type['U_n'] / 1e3, - # U_n in kV - 'I_max_th': branch['branch'].type[ - 'I_max_th'], - 'R': branch['branch'].type['R'], - 'L': branch['branch'].type['L'], - 'C': branch['branch'].type['C'], - 'node1': '_'.join( - [str(branch['adj_nodes'][ - 0].__class__.__name__), 'LV', - str(lv_grid_id), - str(branch['adj_nodes'][0].id_db)]) - if not isinstance(branch['adj_nodes'][0], - LVStationDing0) else '_'.join( - [str(branch['adj_nodes'][ - 0].__class__.__name__), 'MV', - str(mv_grid_id), - str(branch['adj_nodes'][0].id_db)]), - 'node2': '_'.join( - [str(branch['adj_nodes'][ - 1].__class__.__name__), 'LV', - str(lv_grid_id), - str(branch['adj_nodes'][1].id_db)]) - if not isinstance(branch['adj_nodes'][1], - LVStationDing0) else '_'.join( - [str(branch['adj_nodes'][ - 1].__class__.__name__), 'MV', - str(mv_grid_id), - str(branch['adj_nodes'][1].id_db)]), - 'run_id': run_id, - 'geom': None - } - - lv_grid = pd.DataFrame.from_dict(lv_grid_dict, orient='index') - lv_gen = pd.DataFrame.from_dict(lv_gen_dict, orient='index') - lv_cd = pd.DataFrame.from_dict(lv_cd_dict, orient='index') - lv_stations = pd.DataFrame.from_dict(lv_stations_dict, orient='index') - mvlv_trafos = pd.DataFrame.from_dict(mvlv_trafos_dict, orient='index') - lv_loads = pd.DataFrame.from_dict(lv_loads_dict, orient='index') - mv_grid = pd.DataFrame.from_dict(mv_grid_dict, orient='index') - mv_gen = pd.DataFrame.from_dict(mv_gen_dict, orient='index') - mv_cb = pd.DataFrame.from_dict(mvcb_dict, orient='index') - mv_cd = pd.DataFrame.from_dict(mv_cd_dict, orient='index') - mv_stations = pd.DataFrame.from_dict(mv_stations_dict, orient='index') - hvmv_trafos = pd.DataFrame.from_dict(hvmv_trafos_dict, orient='index') - mv_loads = pd.DataFrame.from_dict(mv_loads_dict, orient='index') - lines = pd.DataFrame.from_dict(lines_dict, orient='index') - mvlv_mapping = pd.DataFrame.from_dict(mvlv_mapping_dict, orient='index') - - lines = lines[sorted(lines.columns.tolist())] - - return run_id, metadata_json, \ - lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ - mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ - lines, mvlv_mapping - - -####################################################### - -def export_data_tocsv(path, run_id, metadata_json, - lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, - lv_loads, - mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, - mv_loads, - lines, mvlv_mapping, csv_sep=','): - # make directory with run_id if it doesn't exist - os.makedirs(path, exist_ok=True) - - # put a text file with the metadata - metadata = json.loads(metadata_json) - with open(os.path.join(path, 'metadata.json'), 'w') as metafile: - json.dump(metadata, metafile) - - # Exports data to csv - def export_network_tocsv(path, table, tablename): - return table.to_csv(os.path.join(path, tablename + '.csv'), sep=csv_sep) - - export_network_tocsv(path, lv_grid, 'lv_grid') - export_network_tocsv(path, lv_gen, 'lv_generator') - export_network_tocsv(path, lv_cd, 'lv_branchtee') - export_network_tocsv(path, lv_stations, 'lv_station') - export_network_tocsv(path, mvlv_trafos, 'mvlv_transformer') - export_network_tocsv(path, lv_loads, 'lv_load') - export_network_tocsv(path, mv_grid, 'mv_grid') - export_network_tocsv(path, mv_gen, 'mv_generator') - export_network_tocsv(path, mv_cd, 'mv_branchtee') - export_network_tocsv(path, mv_stations, 'mv_station') - export_network_tocsv(path, hvmv_trafos, 'hvmv_transformer') - export_network_tocsv(path, mv_cb, 'mv_circuitbreaker') - export_network_tocsv(path, mv_loads, 'mv_load') - export_network_tocsv(path, lines, 'line') - export_network_tocsv(path, mvlv_mapping, 'mvlv_mapping') - # export_network_tocsv(path, areacenter, 'areacenter') - - -######################################################## - -from sqlalchemy import create_engine -from egoio.db_tables import model_draft as md - - -def export_network_to_oedb(session, table, tabletype, srid): - dataset = [] - engine = create_engine("sqlite:///myexample.db") - print("Exporting table type : {}".format(tabletype)) - if tabletype == 'line': - table.apply(lambda row: - session.add(md.EgoGridDing0Line( - run_id=row['run_id'], - id_db=row['id'], - edge_name=row['edge_name'], - grid_name=row['grid_name'], - node1=row['node1'], - node2=row['node2'], - type_kind=row['type_kind'], - type_name=row['type_name'], - length=row['length'], - u_n=row['U_n'], - c=row['C'], - l=row['L'], - r=row['R'], - i_max_th=row['I_max_th'], - geom=row['geom'], - )) - , axis=1) - - elif tabletype == 'lv_cd': - table.apply(lambda row: - session.add(md.EgoGridDing0LvBranchtee( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - )) - , axis=1) - - elif tabletype == 'lv_gen': - table.apply(lambda row: - session.add(md.EgoGridDing0LvGenerator( - run_id=row['run_id'], - id_db=row['id'], - la_id=row['la_id'], - name=row['name'], - lv_grid_id=str(row['lv_grid_id']), - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - type=row['type'], - subtype=row['subtype'], - v_level=row['v_level'], - nominal_capacity=row['nominal_capacity'], - is_aggregated=row['is_aggregated'], - weather_cell_id=row['weather_cell_id'] if not(pd.isnull(row[ - 'weather_cell_id'])) else None, - - )) - , axis=1) - - elif tabletype == 'lv_load': - table.apply(lambda row: - session.add(md.EgoGridDing0LvLoad( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - lv_grid_id=row['lv_grid_id'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - consumption=row['consumption'] - )) - , axis=1) - - elif tabletype == 'lv_grid': - table.apply(lambda row: - session.add(md.EgoGridDing0LvGrid( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - population=row['population'], - voltage_nom=row['voltage_nom'], - )) - , axis=1) - - elif tabletype == 'lv_station': - table.apply(lambda row: - session.add(md.EgoGridDing0LvStation( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - )) - , axis=1) - - elif tabletype == 'mvlv_trafo': - table.apply(lambda row: - session.add(md.EgoGridDing0MvlvTransformer( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - voltage_op=row['voltage_op'], - s_nom=row['S_nom'], - x=row['X'], - r=row['R'], - )) - , axis=1) - - elif tabletype == 'mvlv_mapping': - table.apply(lambda row: - session.add(md.EgoGridDing0MvlvMapping( - run_id=row['run_id'], - lv_grid_id=row['lv_grid_id'], - lv_grid_name=row['lv_grid_name'], - mv_grid_id=row['mv_grid_id'], - mv_grid_name=row['mv_grid_name'], - )) - , axis=1) - - elif tabletype == 'mv_cd': - table.apply(lambda row: - session.add(md.EgoGridDing0MvBranchtee( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - )) - , axis=1) - - elif tabletype == 'mv_cb': - table.apply(lambda row: - session.add(md.EgoGridDing0MvCircuitbreaker( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - status=row['status'], - )) - , axis=1) - - elif tabletype == 'mv_gen': - table.apply(lambda row: - session.add(md.EgoGridDing0MvGenerator( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - type=row['type'], - subtype=row['subtype'], - v_level=row['v_level'], - nominal_capacity=row['nominal_capacity'], - is_aggregated=row['is_aggregated'], - weather_cell_id=row['weather_cell_id'] if not(pd.isnull(row[ - 'weather_cell_id'])) else None, - )) - , axis=1) - - elif tabletype == 'mv_load': - table.apply(lambda row: - session.add(md.EgoGridDing0MvLoad( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - is_aggregated=row['is_aggregated'], - consumption=row['consumption'], - )) - , axis=1) - - elif tabletype == 'mv_grid': - table.apply(lambda row: - session.add(md.EgoGridDing0MvGrid( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - population=row['population'], - voltage_nom=row['voltage_nom'], - )) - , axis=1) - - elif tabletype == 'mv_station': - table.apply(lambda row: - session.add(md.EgoGridDing0MvStation( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - )) - , axis=1) - - elif tabletype == 'hvmv_trafo': - table.apply(lambda row: - session.add(md.EgoGridDing0HvmvTransformer( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - voltage_op=row['voltage_op'], - s_nom=row['S_nom'], - x=row['X'], - r=row['R'], - )) - , axis=1) - # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): - # print('helloworld') - - session.commit() - - -def export_data_to_oedb(session, run_id, metadata_json, srid, - lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, - lv_loads, - mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, - mv_loads, lines, mvlv_mapping): - # only for testing - # engine = create_engine('sqlite:///:memory:') - - # get the run_id from model_draft.ego_grid_ding0_versioning - # compare the run_id from table to the current run_id - - # oedb_versioning_query = session.query( - # md.EgoGridDing0Versioning.run_id, - # md.EgoGridDing0Versioning.description - # ).filter(md.EgoGridDing0Versioning.run_id == run_id) - # - # oedb_versioning = pd.read_sql_query(oedb_versioning_query.statement, - # session.bind) - oedb_versioning = pd.DataFrame() - - if oedb_versioning.empty: - # if the run_id doesn't exist then - # create entry into ego_grid_ding0_versioning: - metadata_df = pd.DataFrame({'run_id': run_id, - 'description': metadata_json}, - index=[0]) - metadata_df.apply(lambda row: - session.add(md.EgoGridDing0Versioning( - run_id=row['run_id'], - description=row['description'], - )) - , axis=1) - session.commit() - - export_network_to_oedb(session, lv_grid, 'lv_grid', srid) - export_network_to_oedb(session, lv_gen, 'lv_gen', srid) - export_network_to_oedb(session, lv_cd, 'lv_cd', srid) - export_network_to_oedb(session, lv_stations, 'lv_station', srid) - export_network_to_oedb(session, mvlv_trafos, 'mvlv_trafo', srid) - export_network_to_oedb(session, lv_loads, 'lv_load', srid) - export_network_to_oedb(session, mv_grid, 'mv_grid', srid) - export_network_to_oedb(session, mv_gen, 'mv_gen', srid) - export_network_to_oedb(session, mv_cb, 'mv_cb', srid) - export_network_to_oedb(session, mv_cd, 'mv_cd', srid) - export_network_to_oedb(session, mv_stations, 'mv_station', srid) - export_network_to_oedb(session, hvmv_trafos, 'hvmv_trafo', srid) - export_network_to_oedb(session, mv_loads, 'mv_load', srid) - export_network_to_oedb(session, lines, 'line', srid) - export_network_to_oedb(session, mvlv_mapping, 'mvlv_mapping', srid) - else: - raise KeyError("run_id already present! No tables are input!") - - -def create_ding0_db_tables(engine): - tables = [md.EgoGridDing0Versioning, - md.EgoGridDing0Line, - md.EgoGridDing0LvBranchtee, - md.EgoGridDing0LvGenerator, - md.EgoGridDing0LvLoad, - md.EgoGridDing0LvGrid, - md.EgoGridDing0LvStation, - md.EgoGridDing0MvlvTransformer, - md.EgoGridDing0MvlvMapping, - md.EgoGridDing0MvBranchtee, - md.EgoGridDing0MvCircuitbreaker, - md.EgoGridDing0MvGenerator, - md.EgoGridDing0MvLoad, - md.EgoGridDing0MvGrid, - md.EgoGridDing0MvStation, - md.EgoGridDing0HvmvTransformer] - - for tab in tables: - tab().__table__.create(bind=engine, checkfirst=True) - - -def drop_ding0_db_tables(engine): - tables = [md.EgoGridDing0Line, - md.EgoGridDing0LvBranchtee, - md.EgoGridDing0LvGenerator, - md.EgoGridDing0LvLoad, - md.EgoGridDing0LvGrid, - md.EgoGridDing0LvStation, - md.EgoGridDing0MvlvTransformer, - md.EgoGridDing0MvlvMapping, - md.EgoGridDing0MvBranchtee, - md.EgoGridDing0MvCircuitbreaker, - md.EgoGridDing0MvGenerator, - md.EgoGridDing0MvLoad, - md.EgoGridDing0MvGrid, - md.EgoGridDing0MvStation, - md.EgoGridDing0HvmvTransformer, - md.EgoGridDing0Versioning] - - print("Please confirm that you would like to drop the following tables:") - for n, tab in enumerate(tables): - print("{: 3d}. {}".format(n, tab)) - - print("Please confirm with either of the choices below:\n" + - "- yes\n" + - "- no\n" + - "- the indexes to drop in the format 0, 2, 3, 5") - confirmation = input( - "Please type the choice completely as there is no default choice.") - if re.fullmatch('[Yy]es', confirmation): - for tab in tables: - tab().__table__.drop(bind=engine, checkfirst=True) - elif re.fullmatch('[Nn]o', confirmation): - print("Cancelled dropping of tables") - else: - try: - indlist = confirmation.split(',') - indlist = list(map(int, indlist)) - print("Please confirm deletion of the following tables:") - tablist = np.array(tables)[indlist].tolist() - for n, tab in enumerate(tablist): - print("{: 3d}. {}".format(n, tab)) - con2 = input("Please confirm with either of the choices below:\n" + - "- yes\n" + - "- no") - if re.fullmatch('[Yy]es', con2): - for tab in tablist: - tab().__table__.drop(bind=engine, checkfirst=True) - elif re.fullmatch('[Nn]o', con2): - print("Cancelled dropping of tables") - else: - print("The input is unclear, no action taken") - except ValueError: - print("Confirmation unclear, no action taken") - - -def grant_access_ding0_db_tables(engine): - tables = [md.EgoGridDing0Line, - md.EgoGridDing0LvBranchtee, - md.EgoGridDing0LvGenerator, - md.EgoGridDing0LvLoad, - md.EgoGridDing0LvGrid, - md.EgoGridDing0LvStation, - md.EgoGridDing0MvlvTransformer, - md.EgoGridDing0MvlvMapping, - md.EgoGridDing0MvBranchtee, - md.EgoGridDing0MvCircuitbreaker, - md.EgoGridDing0MvGenerator, - md.EgoGridDing0MvLoad, - md.EgoGridDing0MvGrid, - md.EgoGridDing0MvStation, - md.EgoGridDing0HvmvTransformer, - md.EgoGridDing0Versioning] - - def grant_db_access(engine, table, role): - r"""Gives access to database users/ groups - Parameters - ---------- - session : sqlalchemy session object - A valid connection to a database - table : sqlalchmy Table class definition - The database table - role : str - database role that access is granted to - """ - tablename = table.__table__.name - schema = table.__table__.schema - - grant_str = """BEGIN; - ALTER TABLE {schema}.{table} OWNER to {role}; - COMMIT;""".format(schema=schema, table=tablename, - role=role) - - # engine.execute(grant_str) - engine.execution_options(autocommit=True).execute(grant_str) - - # engine.echo=True - - for tab in tables: - grant_db_access(engine, tab, 'oeuser') - - engine.close() - - ######################################################## if __name__ == "__main__": # nw = init_mv_grid(mv_grid_districts=[3544, 3545]) From 33e48a95090c58a3ce149b8359951933fc5f288b Mon Sep 17 00:00:00 2001 From: boltbeard Date: Mon, 10 Sep 2018 11:27:09 +0200 Subject: [PATCH 046/215] example of moving away from orm to direct sql definitions --- ding0/tools/db_export.py | 99 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 98 insertions(+), 1 deletion(-) diff --git a/ding0/tools/db_export.py b/ding0/tools/db_export.py index 8afd0210..70d551f8 100644 --- a/ding0/tools/db_export.py +++ b/ding0/tools/db_export.py @@ -20,7 +20,7 @@ from sqlalchemy import create_engine from egoio.db_tables import model_draft as md -from sqlalchemy import ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text +from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text from geoalchemy2.types import Geometry, Raster from sqlalchemy.orm import relationship from sqlalchemy.dialects.postgresql.hstore import HSTORE @@ -32,6 +32,103 @@ Base = declarative_base() metadata = Base.metadata +DING0_TABLES = {'versioning': 'ding0_versioning', + 'lines': 'ding0_line', + 'lv_branchtee': 'ding0_lv_branchtee', + 'lv_generator': 'ding0_lv_generator', + 'lv_load': 'ding0_lv_load', + 'lv_grid': 'ding0_lv_grid', + 'lv_station': 'ding0_lv_station', + 'mvlv_transformer': 'ding0_mvlv_transformer', + 'mvlv_mapping': 'ding0_mvlv_mapping', + 'mv_branchtee': 'ding0_mv_branchtee', + 'mv_circuitbreaker': 'ding0_mv_circuitbreaker', + 'mv_generator': 'ding0_mv_generator', + 'mv_load': 'ding0_mv_load', + 'mv_grid': 'ding0_mv_grid', + 'mv_station': 'ding0_mv_station', + 'hvmv_transformer': 'ding0_hvmv_transformer'} + + +def df_sql_write(dataframe, db_table, engine): + """ + Convert dataframes such that their column names + are made small and the index is renamed 'id' so as to + correctly load its data to its appropriate sql table. + + .. ToDo: need to check for id_db instead of only 'id' in index label names + + NOTE: This function does not check if the dataframe columns + matches the db_table fields, if they do not then no warning + is given. + + Parameters + ---------- + dataframe: :pandas:`DataFrame` + The pandas dataframe to be transferred to its + apprpritate db_table + + db_table: :py:mod:`sqlalchemy.sql.schema.Table` + A table instance definition from sqlalchemy. + NOTE: This isn't an orm definition + + engine: :py:mod:`sqlalchemy.engine.base.Engine` + Sqlalchemy database engine + """ + sql_write_df = dataframe.copy() + sql_write_df.columns = sql_write_df.columns.map(str.lower) + sql_write_df = sql_write_df.set_index('id') + sql_write_df.to_sql(db_table.name, con=engine, if_exists='append') + + +def create_ding0_sql_tables(engine, ding0_schema): + """ + Create the ding0 tables + + Parameters + ---------- + engine: :py:mod:`sqlalchemy.engine.base.Engine` + Sqlalchemy database engine + + schema: :obj:`str` + The schema in which the tables are to be created + Returns + ------- + """ + + # versioning table + versioning = Table(DING0_TABLES['versioning'], metadata, + Column('run_id', BigInteger, primary_key=True, autoincrement=False, nullable=False), + Column('description', String(3000)), + schema=ding0_schema, + comment="""This is a comment on table for the ding0 versioning table""" + ) + + + # ding0 lines table + lines = Table(DING0_TABLES['lines'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('edge_name', String(100)), + Column('grid_name', String(100)), + Column('node1', String(100)), + Column('node2', String(100)), + Column('type_kind', String(100)), + Column('type_name', String(100)), + Column('length', Float(10)), + Column('u_n', Float(10)), + Column('c', Float(10)), + Column('l', Float(10)), + Column('r', Float(10)), + Column('i_max_th', Float(10)), + Column('geom', Geometry('LINESTRING', 4326)), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + # create all the tables + metadata.create_all(engine, checkfirst=True) def export_network_to_oedb(session, schema, table, tabletype, srid): dataset = [] From 3ca804267c7bbcdfbfa025176d8badb1f91f31b4 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Mon, 10 Sep 2018 11:30:00 +0200 Subject: [PATCH 047/215] move input output related files to io director --- ding0/io/__init__.py | 0 ding0/{tools => io}/db_export.py | 0 ding0/{tools => io}/export.py | 0 ding0/{tools => io}/file_export.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 ding0/io/__init__.py rename ding0/{tools => io}/db_export.py (100%) rename ding0/{tools => io}/export.py (100%) rename ding0/{tools => io}/file_export.py (100%) diff --git a/ding0/io/__init__.py b/ding0/io/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ding0/tools/db_export.py b/ding0/io/db_export.py similarity index 100% rename from ding0/tools/db_export.py rename to ding0/io/db_export.py diff --git a/ding0/tools/export.py b/ding0/io/export.py similarity index 100% rename from ding0/tools/export.py rename to ding0/io/export.py diff --git a/ding0/tools/file_export.py b/ding0/io/file_export.py similarity index 100% rename from ding0/tools/file_export.py rename to ding0/io/file_export.py From 768c6c31ac5957f9803c394001f11e8e1bd35e93 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 10 Sep 2018 15:08:16 +0200 Subject: [PATCH 048/215] #270 added all ding0 tables to create_ding0_sql_tables --- ding0/io/db_export.py | 215 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 194 insertions(+), 21 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 70d551f8..38620d97 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -33,7 +33,7 @@ metadata = Base.metadata DING0_TABLES = {'versioning': 'ding0_versioning', - 'lines': 'ding0_line', + 'line': 'ding0_line', 'lv_branchtee': 'ding0_lv_branchtee', 'lv_generator': 'ding0_lv_generator', 'lv_load': 'ding0_lv_load', @@ -106,26 +106,199 @@ def create_ding0_sql_tables(engine, ding0_schema): # ding0 lines table - lines = Table(DING0_TABLES['lines'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('edge_name', String(100)), - Column('grid_name', String(100)), - Column('node1', String(100)), - Column('node2', String(100)), - Column('type_kind', String(100)), - Column('type_name', String(100)), - Column('length', Float(10)), - Column('u_n', Float(10)), - Column('c', Float(10)), - Column('l', Float(10)), - Column('r', Float(10)), - Column('i_max_th', Float(10)), - Column('geom', Geometry('LINESTRING', 4326)), - schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" - ) + line = Table(DING0_TABLES['ding0_line'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('edge_name', String(100)), + Column('grid_name', String(100)), + Column('node1', String(100)), + Column('node2', String(100)), + Column('type_kind', String(100)), + Column('type_name', String(100)), + Column('length', Float(10)), + Column('u_n', Float(10)), + Column('c', Float(10)), + Column('l', Float(10)), + Column('r', Float(10)), + Column('i_max_th', Float(10)), + Column('geom', Geometry('LINESTRING', 4326)), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + # ding0 lv_branchtee table + lv_branchtee = Table(DING0_TABLES['ding0_lv_branchtee'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + # ding0 lv_generator table + lv_generator = Table(DING0_TABLES['ding0_lv_generator'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('la_id', BigInteger), + Column('name', String(100)), + Column('lv_grid_id', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('type', String(22)), + Column('subtype', String(22)), + Column('v_level', Integer), + Column('nominal_capacity', Float(10)), + Column('weather_cell_id', BigInteger), + Column('is_aggregated', Boolean), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + # ding0 lv_load table + lv_load = Table(DING0_TABLES['ding0_lv_load'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('lv_grid_id', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('consumption', String(100)), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + # ding0 lv_station table + lv_station = Table(DING0_TABLES['ding0_lv_station'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + # ding0 mvlv_transformer table + mvlv_transformer = Table(DING0_TABLES['ding0_mvlv_transformer'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + Column('voltage_op', Float(10)), + Column('s_nom', Float(10)), + Column('x', Float(10)), + Column('r', Float(10)), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + # ding0 mvlv_mapping table + mvlv_mapping = Table(DING0_TABLES['ding0_mvlv_mapping'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('lv_grid_id', BigInteger), + Column('lv_grid_name', String(100)), + Column('mv_grid_id', BigInteger), + Column('mv_grid_name', String(100)), + ) + + # ding0 mv_branchtee table + mv_branchtee = Table(DING0_TABLES['ding0_mv_branchtee'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + # ding0 mv_circuitbreaker table + mv_circuitbreaker = Table(DING0_TABLES['ding0_mv_circuitbreaker'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + Column('status', String(10)), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + # ding0 mv_generator table + mv_generator = Table(DING0_TABLES['ding0_mv_generator'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('geom', Geometry('POINT', 4326)), + Column('type', String(22)), + Column('subtype', String(22)), + Column('v_level', Integer), + Column('nominal_capacity', Float(10)), + Column('weather_cell_id', BigInteger), + Column('is_aggregated', Boolean), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + # ding0 mv_load table + mv_load = Table(DING0_TABLES['ding0_mv_load'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('geom', Geometry('LINESTRING', 4326)), + Column('is_aggregated', Boolean), + Column('consumption', String(100)), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + # ding0 mv_grid table + mv_grid = Table(DING0_TABLES['ding0_mv_grid'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('LINESTRING', 4326)), + Column('name', String(100)), + Column('population', BigInteger), + Column('voltage_nom', Float(10)), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + + + # ding0 mv_station table + mv_station = Table(DING0_TABLES['ding0_mv_station'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('LINESTRING', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) + + # ding0 hvmv_transformer table + hvmv_transformer = Table(DING0_TABLES['ding0_hvmv_transformer'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('LINESTRING', 4326)), + Column('name', String(100)), + Column('voltage_op', Float(10)), + Column('s_nom', Float(10)), + Column('x', Float(10)), + Column('r', Float(10)), + schema=ding0_schema, + comment="""This is a commment on table for the ding0 lines table""" + ) # create all the tables metadata.create_all(engine, checkfirst=True) From bc13bd94a4d2608491a3be2d7855d8aea321648a Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 10 Sep 2018 15:14:18 +0200 Subject: [PATCH 049/215] #270 changed func. name to export_data_to_db and minor changes to func. --- ding0/io/db_export.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 38620d97..e0ec8af2 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -303,7 +303,7 @@ def create_ding0_sql_tables(engine, ding0_schema): # create all the tables metadata.create_all(engine, checkfirst=True) -def export_network_to_oedb(session, schema, table, tabletype, srid): +def export_network_to_db(session, schema, table, tabletype, srid): dataset = [] engine = create_engine("sqlite:///myexample.db") print("Exporting table type : {}".format(tabletype)) @@ -539,9 +539,9 @@ def export_data_to_db(session, schema, run_id, metadata_json, srid, # # oedb_versioning = pd.read_sql_query(oedb_versioning_query.statement, # session.bind) - oedb_versioning = pd.DataFrame() + db_versioning = pd.DataFrame() - if oedb_versioning.empty: + if db_versioning.empty: # if the run_id doesn't exist then # create entry into ego_grid_ding0_versioning: metadata_df = pd.DataFrame({'run_id': run_id, @@ -555,21 +555,21 @@ def export_data_to_db(session, schema, run_id, metadata_json, srid, , axis=1) session.commit() - export_network_to_oedb(session, lv_grid, 'lv_grid', srid) - export_network_to_oedb(session, lv_gen, 'lv_gen', srid) - export_network_to_oedb(session, lv_cd, 'lv_cd', srid) - export_network_to_oedb(session, lv_stations, 'lv_station', srid) - export_network_to_oedb(session, mvlv_trafos, 'mvlv_trafo', srid) - export_network_to_oedb(session, lv_loads, 'lv_load', srid) - export_network_to_oedb(session, mv_grid, 'mv_grid', srid) - export_network_to_oedb(session, mv_gen, 'mv_gen', srid) - export_network_to_oedb(session, mv_cb, 'mv_cb', srid) - export_network_to_oedb(session, mv_cd, 'mv_cd', srid) - export_network_to_oedb(session, mv_stations, 'mv_station', srid) - export_network_to_oedb(session, hvmv_trafos, 'hvmv_trafo', srid) - export_network_to_oedb(session, mv_loads, 'mv_load', srid) - export_network_to_oedb(session, lines, 'line', srid) - export_network_to_oedb(session, mvlv_mapping, 'mvlv_mapping', srid) + export_network_to_db(session, lv_grid, 'lv_grid', srid) + export_network_to_db(session, lv_gen, 'lv_gen', srid) + export_network_to_db(session, lv_cd, 'lv_cd', srid) + export_network_to_db(session, lv_stations, 'lv_station', srid) + export_network_to_db(session, mvlv_trafos, 'mvlv_trafo', srid) + export_network_to_db(session, lv_loads, 'lv_load', srid) + export_network_to_db(session, mv_grid, 'mv_grid', srid) + export_network_to_db(session, mv_gen, 'mv_gen', srid) + export_network_to_db(session, mv_cb, 'mv_cb', srid) + export_network_to_db(session, mv_cd, 'mv_cd', srid) + export_network_to_db(session, mv_stations, 'mv_station', srid) + export_network_to_db(session, hvmv_trafos, 'hvmv_trafo', srid) + export_network_to_db(session, mv_loads, 'mv_load', srid) + export_network_to_db(session, lines, 'line', srid) + export_network_to_db(session, mvlv_mapping, 'mvlv_mapping', srid) else: raise KeyError("run_id already present! No tables are input!") From 0a81fce3da1b35704eb4dc11857c31e6c2521ef4 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Tue, 11 Sep 2018 10:16:52 +0200 Subject: [PATCH 050/215] Minor fixes --- ding0/io/db_export.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index e0ec8af2..a26bbcc6 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -77,11 +77,11 @@ def df_sql_write(dataframe, db_table, engine): """ sql_write_df = dataframe.copy() sql_write_df.columns = sql_write_df.columns.map(str.lower) - sql_write_df = sql_write_df.set_index('id') - sql_write_df.to_sql(db_table.name, con=engine, if_exists='append') + # sql_write_df = sql_write_df.set_index('id') + sql_write_df.to_sql(db_table.name, con=engine, if_exists='append', index=None) -def create_ding0_sql_tables(engine, ding0_schema): +def create_ding0_sql_tables(engine, ding0_schema=None): """ Create the ding0 tables @@ -90,10 +90,9 @@ def create_ding0_sql_tables(engine, ding0_schema): engine: :py:mod:`sqlalchemy.engine.base.Engine` Sqlalchemy database engine - schema: :obj:`str` + ding0_schema: :obj:`str` The schema in which the tables are to be created - Returns - ------- + Default: None """ # versioning table @@ -304,8 +303,6 @@ def create_ding0_sql_tables(engine, ding0_schema): metadata.create_all(engine, checkfirst=True) def export_network_to_db(session, schema, table, tabletype, srid): - dataset = [] - engine = create_engine("sqlite:///myexample.db") print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': table.apply(lambda row: From 89c5c4c493688e6049c16796abacc126de6fde99 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 12 Sep 2018 12:03:45 +0200 Subject: [PATCH 051/215] #270 updated metadatastring rdy to apply to db --- ...ego_grid_ding0_hvmv_transformer_metadata_v1.3.json | 9 +++------ .../ego_grid_ding0_line_metadata_v1.3.json | 9 +++------ .../ego_grid_ding0_lv_branchtee_metadata_v1.3.json | 9 +++------ .../ego_grid_ding0_lv_generator_metadata_v1.3.json | 9 +++------ .../ego_grid_ding0_lv_grid_metadata_v1.3.json | 9 +++------ .../ego_grid_ding0_lv_load_metadata_v1.3.json | 9 +++------ .../ego_grid_ding0_lv_station_metadata_v1.3.json | 11 ++++------- .../ego_grid_ding0_mv_branchtee_metadata_v1.3.json | 11 ++++------- ...go_grid_ding0_mv_circuitbreaker_metadata_v1.3.json | 9 +++------ .../ego_grid_ding0_mv_generator_metadata_v1.3.json | 9 +++------ .../ego_grid_ding0_mv_grid_metadata_v1.3.json | 9 +++------ .../ego_grid_ding0_mv_load_metadata_v1.3.json | 9 +++------ .../ego_grid_ding0_mv_station_metadata_v1.3.json | 9 +++------ .../ego_grid_ding0_mvlv_mapping_metadata_v1.3.json | 9 +++------ ...ego_grid_ding0_mvlv_transformer_metadata_v1.3.json | 11 ++++------- .../ego_grid_ding0_versioning_metadata_v1.3.json | 9 +++------ 16 files changed, 51 insertions(+), 99 deletions(-) diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json index 3756f4b7..6352fd91 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date":"", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_hvmv_transformer", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json index 27d57335..47379053 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_line", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json index a6d8132a..98123479 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_branchtee", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json index 7106dbfe..a8a06fe8 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_generator", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json index afc52dc4..b2b9da00 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_grid", "format": "PostgreSQL", "fields": [ {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json index 81da365f..57a192d6 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_load", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json index 3d443767..c6b211bd 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "", "name": "", @@ -25,7 +22,7 @@ "copyright": ""}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_station", "format": "PostgreSQL", @@ -34,7 +31,7 @@ {"name": "run_id","discription": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, - {"name": "name","discription": "FIXME","unit": "string"}, + {"name": "name","discription": "FIXME","unit": "string"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json index 42dda963..ec7e627f 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_branchtee", "format": "PostgreSQL", @@ -34,7 +31,7 @@ {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, {"name": "id_db","discription": "geometric coordinates","unit": "integer"}, {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, - {"name": "name","discription": "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"} ] } ] + {"name": "name","discription": "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json index 2a7a62b6..06cfd6dc 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_circuitbreaker", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json index 80386b4e..1aa6962f 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_generator", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json index d8088ea0..63671b53 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_grid", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json index 20e14088..4fcb13e8 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_load", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json index 557921e7..7cde8894 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mv_station", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json index 5db5285a..687ae496 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mvlv_mapping", "format": "PostgreSQL", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json index bb078fb0..170e6039 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_mvlv_transformer", "format": "PostgreSQL", @@ -37,7 +34,7 @@ {"name": "name","discription": "FIXME","unit": "string"}, {"name": "voltage_op","discription": "in kV","unit": "float"}, {"name": "s_nom","discription": "nominal apparent power in kVA","unit": "float"}, - {"name": "x","discription": "in Ohm","unitfloat"}, + {"name": "x","discription": "in Ohm","unit": "float"}, {"name": "r","discription": "in Ohm","unit": "float"}] } ], "metadata_version": "1.3", "_comment": { diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json index b04a3c3e..b85a8f70 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json @@ -11,11 +11,8 @@ "end": "none", "resolution": "none"}, "sources": [ - {"name": "open_eGo","description": "Metadata","url": "https://github.com/openego","license": "ODbL-1.0","copyright": "© Reiner Lemoine Institut"}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""}, - {"name": "","description": "","url": "","license": "","copyright": ""} ], + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", "name": "Open Data Commons Open Database License 1.0", @@ -25,7 +22,7 @@ "copyright": "© Reiner Lemoine Institut"}, "contributors": [ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, - {"name": "", "email": "", "date": "", "comment": ""} ], + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_versioning", "format": "PostgreSQL", From 05caf26008bda30d026d9a1dbe32d168bb74badc Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 12 Sep 2018 14:28:25 +0200 Subject: [PATCH 052/215] #270 minor changes to metadata --- .../ego_grid_ding0_hvmv_transformer_metadata_v1.3.json | 1 + .../ego_grid_ding0_line_metadata_v1.3.json | 1 + .../ego_grid_ding0_lv_branchtee_metadata_v1.3.json | 1 + .../ego_grid_ding0_lv_generator_metadata_v1.3.json | 1 + .../ego_grid_ding0_lv_grid_metadata_v1.3.json | 1 + .../ego_grid_ding0_lv_load_metadata_v1.3.json | 1 + .../ego_grid_ding0_lv_station_metadata_v1.3.json | 1 + .../ego_grid_ding0_mv_branchtee_metadata_v1.3.json | 1 + .../ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json | 1 + .../ego_grid_ding0_mv_generator_metadata_v1.3.json | 1 + .../ego_grid_ding0_mv_grid_metadata_v1.3.json | 1 + .../ego_grid_ding0_mv_load_metadata_v1.3.json | 1 + .../ego_grid_ding0_mv_station_metadata_v1.3.json | 1 + .../ego_grid_ding0_mvlv_mapping_metadata_v1.3.json | 1 + .../ego_grid_ding0_mvlv_transformer_metadata_v1.3.json | 1 + .../ego_grid_ding0_versioning_metadata_v1.3.json | 1 + 16 files changed, 16 insertions(+) diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json index 6352fd91..5c871f68 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json index 47379053..38ccc315 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json index 98123479..593940a7 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json index a8a06fe8..c45fb0e5 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json index b2b9da00..d638a88d 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json index 57a192d6..e03a53b3 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json index c6b211bd..03aa2ab3 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json index ec7e627f..3a62d92d 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json index 06cfd6dc..bd7747b2 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json index 1aa6962f..695091a9 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json index 63671b53..1df8da96 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json index 4fcb13e8..7e4aafa1 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json index 7cde8894..89bdb3f7 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json index 687ae496..ba292b8e 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json index 170e6039..0bd616a8 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json index b85a8f70..36010ebb 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json @@ -12,6 +12,7 @@ "resolution": "none"}, "sources": [ {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "© Copyright 2015-2018, open_eGo-Team"}, {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "© 2017 openego project group"} ], "license": {"id": "ODbL-1.0", From 790d3846baa9129bbcb097eab73632ddd965a35d Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 12 Sep 2018 14:54:16 +0200 Subject: [PATCH 053/215] #270 minor changes --- ding0/io/db_export.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index a26bbcc6..ee06403e 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -123,7 +123,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('i_max_th', Float(10)), Column('geom', Geometry('LINESTRING', 4326)), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment="""This is a comment on table for the ding0 lines table""" ) # ding0 lv_branchtee table @@ -134,7 +134,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('geom', Geometry('POINT', 4326)), Column('name', String(100)), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment="""This is a comment on table for the ding0 lines table""" ) # ding0 lv_generator table @@ -153,7 +153,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('weather_cell_id', BigInteger), Column('is_aggregated', Boolean), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment="""This is a comment on table for the ding0 lines table""" ) # ding0 lv_load table @@ -166,7 +166,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('geom', Geometry('POINT', 4326)), Column('consumption', String(100)), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment="""This is a comment on table for the ding0 lines table""" ) # ding0 lv_station table @@ -192,7 +192,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('x', Float(10)), Column('r', Float(10)), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment="""This is a comment on table for the ding0 lines table""" ) # ding0 mvlv_mapping table @@ -213,7 +213,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('geom', Geometry('POINT', 4326)), Column('name', String(100)), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment="""This is a comment on table for the ding0 lines table""" ) # ding0 mv_circuitbreaker table @@ -225,7 +225,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('name', String(100)), Column('status', String(10)), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment="""This is a comment on table for the ding0 lines table""" ) # ding0 mv_generator table @@ -242,7 +242,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('weather_cell_id', BigInteger), Column('is_aggregated', Boolean), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment="""This is a comment on table for the ding0 lines table""" ) # ding0 mv_load table @@ -255,7 +255,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('is_aggregated', Boolean), Column('consumption', String(100)), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment="""This is a comment on table for the ding0 lines table""" ) # ding0 mv_grid table @@ -263,12 +263,12 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), - Column('geom', Geometry('LINESTRING', 4326)), + Column('geom', Geometry('MULTIPOLYGON', 4326)), Column('name', String(100)), Column('population', BigInteger), Column('voltage_nom', Float(10)), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment="""This is a comment on table for the ding0 lines table""" ) @@ -278,10 +278,10 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), - Column('geom', Geometry('LINESTRING', 4326)), + Column('geom', Geometry('POINT', 4326)), Column('name', String(100)), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment="""This is a comment on table for the ding0 lines table""" ) # ding0 hvmv_transformer table @@ -289,14 +289,14 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), - Column('geom', Geometry('LINESTRING', 4326)), + Column('geom', Geometry('POINT', 4326)), Column('name', String(100)), Column('voltage_op', Float(10)), Column('s_nom', Float(10)), Column('x', Float(10)), Column('r', Float(10)), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment="""This is a comment on table for the ding0 lines table""" ) # create all the tables From 8f0437063ee5b99ca67456314987fede093d9ab3 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 12 Sep 2018 18:02:07 +0200 Subject: [PATCH 054/215] #270 added read metadata file fnc in new file --- ding0/io/read_metadata_file.py | 54 ++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 ding0/io/read_metadata_file.py diff --git a/ding0/io/read_metadata_file.py b/ding0/io/read_metadata_file.py new file mode 100644 index 00000000..f0bc8002 --- /dev/null +++ b/ding0/io/read_metadata_file.py @@ -0,0 +1,54 @@ +"""This file is part of DINGO, the DIstribution Network GeneratOr. +DINGO is a tool to generate synthetic medium and low voltage power +distribution grids based on open data. + +It is developed in the project open_eGo: https://openegoproject.wordpress.com + +DING0 lives at github: https://github.com/openego/ding0/ +The documentation is available on RTD: http://ding0.readthedocs.io""" + +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "jh-RLI" + +from egoio.tools.db import connection +from egoio.db_tables import model_draft as md + +#from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text +#from geoalchemy2.types import Geometry, Raster +#from sqlalchemy.orm import sessionmaker + +from pathlib import Path +import json +import os + +#con = connection() + +#query orm style +#Session = sessionmaker() +#Session.configure(bind=con) +#session = Session() + + + +#load data from json file +#mds = metadatastring +def load_json(): + + # JSON metadatastring folder. Static path for windows + FOLDER = Path('C:\ego_grid_ding0_metadatastrings') + print(FOLDER) + full_dir = os.walk(FOLDER.parent / FOLDER.name) + jsonmetadata = [] + + for jsonfiles in full_dir: + for jsonfile in jsonfiles: + #if jsonfile[-4:] == 'json': + jsonmetadata = jsonfile + + + #with open('JSONMETADATA') as f: + #mds = json.load(f) + +load_json() \ No newline at end of file From 1d38c74dea22bbb64ccb7c7529b38c7a4e8a0533 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 13 Sep 2018 11:54:38 +0200 Subject: [PATCH 055/215] #270 added new fnc. for metadatafile import --- ding0/io/read_metadata_file.py | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/ding0/io/read_metadata_file.py b/ding0/io/read_metadata_file.py index f0bc8002..f4a09302 100644 --- a/ding0/io/read_metadata_file.py +++ b/ding0/io/read_metadata_file.py @@ -30,25 +30,29 @@ #Session.configure(bind=con) #session = Session() +# JSON metadatastring folder. Static path for windows +FOLDER = Path('C:/ego_grid_ding0_metadatastrings') - -#load data from json file -#mds = metadatastring -def load_json(): - - # JSON metadatastring folder. Static path for windows - FOLDER = Path('C:\ego_grid_ding0_metadatastrings') +# load data from json file +def load_json_files(): print(FOLDER) full_dir = os.walk(FOLDER.parent / FOLDER.name) jsonmetadata = [] for jsonfiles in full_dir: for jsonfile in jsonfiles: - #if jsonfile[-4:] == 'json': + #if jsonfile[:4] == 'json': jsonmetadata = jsonfile + return jsonmetadata - #with open('JSONMETADATA') as f: - #mds = json.load(f) +# Prepares the JSON String for the sql comment on table +# mds = metadatastring +def add_metadata_todb(): + for file in load_json_files(): + JSONFILEPATH = FOLDER / file + with open(JSONFILEPATH) as f: + mds = json.load(f) + print(mds) -load_json() \ No newline at end of file +add_metadata_todb() \ No newline at end of file From e419e358d4545e02d621feea251b3f781f3e7ed4 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 13 Sep 2018 11:59:09 +0200 Subject: [PATCH 056/215] #270 verified all metadatastrings included in branch ding0_grid_metadatastring --- .../ego_grid_ding0_lv_generator_metadata_v1.3.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json index c45fb0e5..f52e8796 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json @@ -35,11 +35,11 @@ {"name": "name","discription": "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'","unit": "string"}, {"name": "lv_grid_id","discription": "unambiguous id_db of LV-Grid","unit": "integer"}, {"name": "geom","discription": "geometric coordinates","unit": "WGS84, POINT"}, - {"name": "type","discription": "type of generation","{solar; biomass}","unit": "string"}, + {"name": "type","discription": "type of generation {solar; biomass}","unit": "string"}, {"name": "subtype","discription": "subtype of generation: {solar_roof_mounted, unknown; biomass}","unit": "string"}, {"name": "v_level","discription": "voltage level of generator","unit": "integer"}, {"name": "nominal_capacity","discription": "nominal capacity","unit": "float"}, - {"name": "is_aggregated","discription": "True if load is aggregated load, else False","unit": "boolean"}; + {"name": "is_aggregated","discription": "True if load is aggregated load, else False","unit": "boolean"}, {"name": "weather_cell_id","discription": "unambiguous number of the corresponding weather cell","unit": "integer"} ] } ], "metadata_version": "1.3", "_comment": { From b1b716b33879883a8f519ed5dfd773d9ea1e29de Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 13 Sep 2018 12:06:23 +0200 Subject: [PATCH 057/215] #270 minor changes --- ding0/io/read_metadata_file.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ding0/io/read_metadata_file.py b/ding0/io/read_metadata_file.py index f4a09302..8401016f 100644 --- a/ding0/io/read_metadata_file.py +++ b/ding0/io/read_metadata_file.py @@ -12,8 +12,8 @@ __url__ = "https://github.com/openego/ding0/blob/master/LICENSE" __author__ = "jh-RLI" -from egoio.tools.db import connection -from egoio.db_tables import model_draft as md +#from egoio.tools.db import connection +#from egoio.db_tables import model_draft as md #from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text #from geoalchemy2.types import Geometry, Raster @@ -48,11 +48,11 @@ def load_json_files(): # Prepares the JSON String for the sql comment on table # mds = metadatastring -def add_metadata_todb(): +def prepare_metadatastring_fordb(): for file in load_json_files(): JSONFILEPATH = FOLDER / file with open(JSONFILEPATH) as f: mds = json.load(f) print(mds) -add_metadata_todb() \ No newline at end of file +prepare_metadatastring_fordb() \ No newline at end of file From 13a8b7805c09d6c723ac07544f508ff88de5f4da Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 13 Sep 2018 15:43:45 +0200 Subject: [PATCH 058/215] #270 completed fnc. to commit a metadata string as comment on table --- ding0/io/read_metadata_file.py | 62 ++++++++++++++++++++++++++++------ 1 file changed, 51 insertions(+), 11 deletions(-) diff --git a/ding0/io/read_metadata_file.py b/ding0/io/read_metadata_file.py index 8401016f..eff0099c 100644 --- a/ding0/io/read_metadata_file.py +++ b/ding0/io/read_metadata_file.py @@ -12,27 +12,50 @@ __url__ = "https://github.com/openego/ding0/blob/master/LICENSE" __author__ = "jh-RLI" -#from egoio.tools.db import connection +from egoio.tools.db import connection #from egoio.db_tables import model_draft as md -#from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text -#from geoalchemy2.types import Geometry, Raster -#from sqlalchemy.orm import sessionmaker +from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text +from geoalchemy2.types import Geometry, Raster +from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.declarative import declarative_base from pathlib import Path import json import os -#con = connection() +con = connection() #query orm style #Session = sessionmaker() #Session.configure(bind=con) #session = Session() -# JSON metadatastring folder. Static path for windows +Base = declarative_base() +metadata = Base.metadata + +# metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) +# Modify if folder name is different FOLDER = Path('C:/ego_grid_ding0_metadatastrings') +DING0_TABLES = {'versioning': 'ding0_versioning', + 'line': 'ding0_line', + 'lv_branchtee': 'ding0_lv_branchtee', + 'lv_generator': 'ding0_lv_generator', + 'lv_load': 'ding0_lv_load', + 'lv_grid': 'ding0_lv_grid', + 'lv_station': 'ding0_lv_station', + 'mvlv_transformer': 'ding0_mvlv_transformer', + 'mvlv_mapping': 'ding0_mvlv_mapping', + 'mv_branchtee': 'ding0_mv_branchtee', + 'mv_circuitbreaker': 'ding0_mv_circuitbreaker', + 'mv_generator': 'ding0_mv_generator', + 'mv_load': 'ding0_mv_load', + 'mv_grid': 'ding0_mv_grid', + 'mv_station': 'ding0_mv_station', + 'hvmv_transformer': 'ding0_hvmv_transformer'} + + # load data from json file def load_json_files(): print(FOLDER) @@ -41,18 +64,35 @@ def load_json_files(): for jsonfiles in full_dir: for jsonfile in jsonfiles: - #if jsonfile[:4] == 'json': + #if jsonfile[:4] == 'json': #ToDo: Add Execption jsonmetadata = jsonfile return jsonmetadata + # Prepares the JSON String for the sql comment on table # mds = metadatastring -def prepare_metadatastring_fordb(): +def prepare_metadatastring_fordb(table): for file in load_json_files(): JSONFILEPATH = FOLDER / file with open(JSONFILEPATH) as f: - mds = json.load(f) - print(mds) + if table in file: + mds = json.load(f) + return mds + + +def create_ding0_sql_tables(engine, ding0_schema): + # versioning table + versioning = Table(DING0_TABLES['versioning'], metadata, + Column('run_id', BigInteger, primary_key=True, autoincrement=False, nullable=False), + Column('description', String(3000)), + schema=ding0_schema, + comment="""This is a comment on table for the ding0 versioning table:""" + + str(prepare_metadatastring_fordb("versioning")) + ) + # create all the tables + metadata.create_all(engine, checkfirst=True) + +#prepare_metadatastring_fordb("versioning") -prepare_metadatastring_fordb() \ No newline at end of file +create_ding0_sql_tables(con, "topology") \ No newline at end of file From 3a1337a8e6705013641e526c9cab82a8c3d48dae Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 13 Sep 2018 15:48:26 +0200 Subject: [PATCH 059/215] #270 added comments for explanation --- ding0/io/read_metadata_file.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ding0/io/read_metadata_file.py b/ding0/io/read_metadata_file.py index eff0099c..713fd441 100644 --- a/ding0/io/read_metadata_file.py +++ b/ding0/io/read_metadata_file.py @@ -81,6 +81,7 @@ def prepare_metadatastring_fordb(table): return mds +# Copy from db_export for testing purpose included just for testing and review def create_ding0_sql_tables(engine, ding0_schema): # versioning table versioning = Table(DING0_TABLES['versioning'], metadata, @@ -93,6 +94,5 @@ def create_ding0_sql_tables(engine, ding0_schema): # create all the tables metadata.create_all(engine, checkfirst=True) -#prepare_metadatastring_fordb("versioning") - +# Test create_ding0_sql_tables(con, "topology") \ No newline at end of file From a056d95a0bc1a8bdc5a21be9adbb1b0d4d80e19c Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 13 Sep 2018 16:37:14 +0200 Subject: [PATCH 060/215] #270 fixed string syntax for comment on table, included pretty print --- ding0/io/read_metadata_file.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ding0/io/read_metadata_file.py b/ding0/io/read_metadata_file.py index 713fd441..e1cf00c7 100644 --- a/ding0/io/read_metadata_file.py +++ b/ding0/io/read_metadata_file.py @@ -78,7 +78,8 @@ def prepare_metadatastring_fordb(table): with open(JSONFILEPATH) as f: if table in file: mds = json.load(f) - return mds + mdsstring = json.dumps(mds, indent=4) + return mdsstring # Copy from db_export for testing purpose included just for testing and review @@ -88,8 +89,7 @@ def create_ding0_sql_tables(engine, ding0_schema): Column('run_id', BigInteger, primary_key=True, autoincrement=False, nullable=False), Column('description', String(3000)), schema=ding0_schema, - comment="""This is a comment on table for the ding0 versioning table:""" - + str(prepare_metadatastring_fordb("versioning")) + comment=prepare_metadatastring_fordb("versioning") ) # create all the tables metadata.create_all(engine, checkfirst=True) From fb1ff4904d4b14d6df9664bb33368daa6d62b2f6 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 13 Sep 2018 16:49:37 +0200 Subject: [PATCH 061/215] #270 minor changes --- ding0/io/db_export.py | 28 ++++++++++++++-------------- ding0/io/read_metadata_file.py | 12 ++++++++++++ 2 files changed, 26 insertions(+), 14 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index ee06403e..98d8c6f4 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -105,7 +105,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): # ding0 lines table - line = Table(DING0_TABLES['ding0_line'], metadata, + ding0_line = Table(DING0_TABLES['line'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), @@ -127,7 +127,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): ) # ding0 lv_branchtee table - lv_branchtee = Table(DING0_TABLES['ding0_lv_branchtee'], metadata, + ding0_lv_branchtee = Table(DING0_TABLES['lv_branchtee'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), @@ -138,7 +138,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): ) # ding0 lv_generator table - lv_generator = Table(DING0_TABLES['ding0_lv_generator'], metadata, + ding0_lv_generator = Table(DING0_TABLES['lv_generator'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), @@ -157,7 +157,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): ) # ding0 lv_load table - lv_load = Table(DING0_TABLES['ding0_lv_load'], metadata, + ding0_lv_load = Table(DING0_TABLES['lv_load'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), @@ -170,7 +170,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): ) # ding0 lv_station table - lv_station = Table(DING0_TABLES['ding0_lv_station'], metadata, + ding0_lv_station = Table(DING0_TABLES['lv_station'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), @@ -181,7 +181,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): ) # ding0 mvlv_transformer table - mvlv_transformer = Table(DING0_TABLES['ding0_mvlv_transformer'], metadata, + ding0_mvlv_transformer = Table(DING0_TABLES['mvlv_transformer'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), @@ -196,7 +196,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): ) # ding0 mvlv_mapping table - mvlv_mapping = Table(DING0_TABLES['ding0_mvlv_mapping'], metadata, + ding0_mvlv_mapping = Table(DING0_TABLES['mvlv_mapping'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('lv_grid_id', BigInteger), @@ -206,7 +206,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): ) # ding0 mv_branchtee table - mv_branchtee = Table(DING0_TABLES['ding0_mv_branchtee'], metadata, + ding0_mv_branchtee = Table(DING0_TABLES['mv_branchtee'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), @@ -217,7 +217,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): ) # ding0 mv_circuitbreaker table - mv_circuitbreaker = Table(DING0_TABLES['ding0_mv_circuitbreaker'], metadata, + ding0_mv_circuitbreaker = Table(DING0_TABLES['mv_circuitbreaker'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), @@ -229,7 +229,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): ) # ding0 mv_generator table - mv_generator = Table(DING0_TABLES['ding0_mv_generator'], metadata, + ding0_mv_generator = Table(DING0_TABLES['mv_generator'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), @@ -246,7 +246,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): ) # ding0 mv_load table - mv_load = Table(DING0_TABLES['ding0_mv_load'], metadata, + ding0_mv_load = Table(DING0_TABLES['mv_load'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), @@ -259,7 +259,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): ) # ding0 mv_grid table - mv_grid = Table(DING0_TABLES['ding0_mv_grid'], metadata, + ding0_mv_grid = Table(DING0_TABLES['mv_grid'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), @@ -274,7 +274,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): # ding0 mv_station table - mv_station = Table(DING0_TABLES['ding0_mv_station'], metadata, + ding0_mv_station = Table(DING0_TABLES['mv_station'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), @@ -285,7 +285,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): ) # ding0 hvmv_transformer table - hvmv_transformer = Table(DING0_TABLES['ding0_hvmv_transformer'], metadata, + ding0_hvmv_transformer = Table(DING0_TABLES['hvmv_transformer'], metadata, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), diff --git a/ding0/io/read_metadata_file.py b/ding0/io/read_metadata_file.py index e1cf00c7..ca1baea4 100644 --- a/ding0/io/read_metadata_file.py +++ b/ding0/io/read_metadata_file.py @@ -91,6 +91,18 @@ def create_ding0_sql_tables(engine, ding0_schema): schema=ding0_schema, comment=prepare_metadatastring_fordb("versioning") ) + + # ding0 mv_station table + ding0_mv_station = Table(DING0_TABLES['mv_station'], metadata, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("ding0_mv_station") + ) + # create all the tables metadata.create_all(engine, checkfirst=True) From 0aa404dbc762ce167da98314810c21c76819f1e8 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 14 Sep 2018 14:06:05 +0200 Subject: [PATCH 062/215] #270 minor changes --- ding0/io/read_metadata_file.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/ding0/io/read_metadata_file.py b/ding0/io/read_metadata_file.py index ca1baea4..affd8eea 100644 --- a/ding0/io/read_metadata_file.py +++ b/ding0/io/read_metadata_file.py @@ -13,23 +13,18 @@ __author__ = "jh-RLI" from egoio.tools.db import connection -#from egoio.db_tables import model_draft as md from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text from geoalchemy2.types import Geometry, Raster -from sqlalchemy.orm import sessionmaker from sqlalchemy.ext.declarative import declarative_base from pathlib import Path import json import os +# DB used for testing: reiners_db con = connection() -#query orm style -#Session = sessionmaker() -#Session.configure(bind=con) -#session = Session() Base = declarative_base() metadata = Base.metadata @@ -70,7 +65,7 @@ def load_json_files(): return jsonmetadata -# Prepares the JSON String for the sql comment on table +# Prepares the JSON String for the sql comment on table # ToDO: handel "Sonderzeichen" in SQL comment # mds = metadatastring def prepare_metadatastring_fordb(table): for file in load_json_files(): From 846a464a13aae029d01c17f4dcb054c2802596bd Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Sun, 16 Sep 2018 17:22:49 +0200 Subject: [PATCH 063/215] #270 minor changes --- ..._ding0_hvmv_transformer_metadata_v1.3.json | 18 +++++----- .../ego_grid_ding0_line_metadata_v1.3.json | 34 +++++++++---------- ...grid_ding0_lv_branchtee_metadata_v1.3.json | 2 +- ...grid_ding0_lv_generator_metadata_v1.3.json | 28 +++++++-------- .../ego_grid_ding0_lv_grid_metadata_v1.3.json | 16 ++++----- .../ego_grid_ding0_lv_load_metadata_v1.3.json | 16 ++++----- ...o_grid_ding0_lv_station_metadata_v1.3.json | 12 +++---- ...grid_ding0_mv_branchtee_metadata_v1.3.json | 12 +++---- ...ding0_mv_circuitbreaker_metadata_v1.3.json | 14 ++++---- ...grid_ding0_mv_generator_metadata_v1.3.json | 24 ++++++------- .../ego_grid_ding0_mv_grid_metadata_v1.3.json | 16 ++++----- .../ego_grid_ding0_mv_load_metadata_v1.3.json | 14 ++++---- ...o_grid_ding0_mv_station_metadata_v1.3.json | 12 +++---- ...grid_ding0_mvlv_mapping_metadata_v1.3.json | 14 ++++---- ..._ding0_mvlv_transformer_metadata_v1.3.json | 20 +++++------ ...o_grid_ding0_versioning_metadata_v1.3.json | 8 ++--- 16 files changed, 130 insertions(+), 130 deletions(-) diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json index 5c871f68..c8f82212 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for hvmv transformer", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,14 +28,14 @@ {"name": "model_draft.ego_grid_ding0_hvmv_transformer", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, - {"name": "name","discription": "FIXME","unit": "string"}, - {"name": "voltage_op","discription": "FIXME","unit": "float"}, - {"name": "s_nom","discription": "nominal apparent power in kVA","unit": "float"}, - {"name": "x","discription": "in Ohm","unit": "float"}, - {"name": "r","discription": "in Ohm","unit": "float"} ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","description": "FIXME","unit": "string"}, + {"name": "voltage_op","description": "FIXME","unit": "float"}, + {"name": "s_nom","description": "nominal apparent power as float","unit": "kVA"}, + {"name": "x","description": "as float","unit": "Ohm"}, + {"name": "r","description": "as float","unit": "Ohm"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json index 38ccc315..2318fdeb 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for line(cable)", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,22 +28,22 @@ {"name": "model_draft.ego_grid_ding0_line", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "id_db","discription": "unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge","unit": "integer"}, - {"name": "edge_name","discription": "unambiguous name of edge","unit": "string"}, - {"name": "grid_name","discription": "unambiguous name of grid","unit": "string"}, - {"name": "node1","discription": "id_db of first node","unit": "string"}, - {"name": "node2","discription": "id_db of second node","unit": "string"}, - {"name": "type_kind","discription": "n/a","unit": "string"}, - {"name": "type_name","discription": "n/a","unit": "string"}, - {"name": "length","discription": "length of line in km","unit": "float"}, - {"name": "u_n","discription": "nominal voltage in kV","unit": "float"}, - {"name": "c","discription": "inductive resistance at 50Hz in uF/km","unit": "float"}, - {"name": "l","discription": "in mH/km","unit": "float"}, - {"name": "r","discription": "in Ohm/km","unit": "float"}, - {"name": "i_max_th","discription": "in A","unit": "float"}, - {"name": "geom","discription": "geometric coordinates","unit": "WGS84 LINESTRING"} ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge","unit": "integer"}, + {"name": "edge_name","description": "unambiguous name of edge","unit": "string"}, + {"name": "grid_name","description": "unambiguous name of grid","unit": "string"}, + {"name": "node1","description": "id_db of first node","unit": "string"}, + {"name": "node2","description": "id_db of second node","unit": "string"}, + {"name": "type_kind","description": "n/a","unit": "string"}, + {"name": "type_name","description": "n/a","unit": "string"}, + {"name": "length","description": "length of line as float","unit": "km"}, + {"name": "u_n","description": "nominal voltage as float","unit": "kV"}, + {"name": "c","description": "inductive resistance at 50Hz as float","unit": "uF/km"}, + {"name": "l","description": "stored as float","unit": " mH/km"}, + {"name": "r","description": "stored as float","unit": "Ohm/km"}, + {"name": "i_max_th","description": "stored as float","unit": "A"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 LINESTRING"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json index 593940a7..ffb8e188 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for lv branchtee", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json index f52e8796..02d3cb0b 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for lv generator", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,19 +28,19 @@ {"name": "model_draft.ego_grid_ding0_lv_generator", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, - {"name": "la_id","discription": "FIXME","unit": "integer"}, - {"name": "name","discription": "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'","unit": "string"}, - {"name": "lv_grid_id","discription": "unambiguous id_db of LV-Grid","unit": "integer"}, - {"name": "geom","discription": "geometric coordinates","unit": "WGS84, POINT"}, - {"name": "type","discription": "type of generation {solar; biomass}","unit": "string"}, - {"name": "subtype","discription": "subtype of generation: {solar_roof_mounted, unknown; biomass}","unit": "string"}, - {"name": "v_level","discription": "voltage level of generator","unit": "integer"}, - {"name": "nominal_capacity","discription": "nominal capacity","unit": "float"}, - {"name": "is_aggregated","discription": "True if load is aggregated load, else False","unit": "boolean"}, - {"name": "weather_cell_id","discription": "unambiguous number of the corresponding weather cell","unit": "integer"} ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "la_id","description": "FIXME","unit": "integer"}, + {"name": "name","description": "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "lv_grid_id","description": "unambiguous id_db of LV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84, POINT"}, + {"name": "type","description": "type of generation {solar; biomass}","unit": "string"}, + {"name": "subtype","description": "subtype of generation: {solar_roof_mounted, unknown; biomass}","unit": "string"}, + {"name": "v_level","description": "voltage level of generator as integer","unit": "FIXME"}, + {"name": "nominal_capacity","description": "nominal capacity as float","unit": "FIXME"}, + {"name": "is_aggregated","description": "True if load is aggregated load, else False","unit": "boolean"}, + {"name": "weather_cell_id","description": "unambiguous number of the corresponding weather cell","unit": "integer"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json index d638a88d..41ce7da2 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data lv grid", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -26,13 +26,13 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ {"name": "model_draft.ego_grid_ding0_lv_grid", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, - {"name": "name","discription": "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#","unit": "string"}, - {"name": "geom","discription": "geometric coordinates","unit": "WGS84 MULTIPOLYGON"}, - {"name": "population","discription": "population in LV-Grid","unit": "integer"}, - {"name": "voltage_nom","discription": "voltage level of grid in kV","unit": "float "} ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "name","description": "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#","unit": "string"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 MULTIPOLYGON"}, + {"name": "population","description": "population in LV-Grid","unit": "integer"}, + {"name": "voltage_nom","description": "voltage level of grid as float","unit": "kV"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json index e03a53b3..897ef686 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for lv load areas", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,13 +28,13 @@ {"name": "model_draft.ego_grid_ding0_lv_load", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, - {"name": "name","discription": "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'","unit": "string"}, - {"name": "lv_grid_id","discription": "unambiguous id_db of LV-Grid","unit": "integer"}, - {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, - {"name": "consumption","discription": "type of load {residential, agricultural, industrial} and corresponding consumption","unit": "string "} ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "name","description": "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "lv_grid_id","description": "unambiguous id_db of LV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "consumption","description": "type of load {residential, agricultural, industrial} and corresponding consumption","unit": "string "} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json index 03aa2ab3..a15cc8f3 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for lv station", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,11 +28,11 @@ {"name": "model_draft.ego_grid_ding0_lv_station", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, - {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, - {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, - {"name": "name","discription": "FIXME","unit": "string"} ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, + {"name": "id_db","description": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","description": "FIXME","unit": "string"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json index 3a62d92d..411e5aea 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for mv branchtee", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,11 +28,11 @@ {"name": "model_draft.ego_grid_ding0_mv_branchtee", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "id_db","discription": "geometric coordinates","unit": "integer"}, - {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, - {"name": "name","discription": "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"} ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","description": "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json index bd7747b2..be5daee9 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for mv circuitbreaker", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,12 +28,12 @@ {"name": "model_draft.ego_grid_ding0_mv_circuitbreaker", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "id_db","discription": "unambiguous number of MV-Grid","unit": "integer"}, - {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, - {"name": "name","discription": "FIXME","unit": "string"}, - {"name": "status","discription": "FIXME","unit": "string "} ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","description": "FIXME","unit": "string"}, + {"name": "status","description": "FIXME","unit": "string "} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json index 695091a9..84b1f3aa 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for mv generator", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,17 +28,17 @@ {"name": "model_draft.ego_grid_ding0_mv_generator", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "id_db","discription": "unambiguous number of MV-Grid","unit": "integer"}, - {"name": "name","discription": "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"}, - {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, - {"name": "type","discription": "type of generation: {solar; biomass}","unit": "string"}, - {"name": "subtype","discription": "subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}","unit": "string"}, - {"name": "v_level","discription": "voltage level of generator","unit": "integer"}, - {"name": "nominal_capacity","discription": "nominal capacity","unit": "float"}, - {"name": "weather_cell_id","discription": "unambiguous number of the corresponding weather cell","unit": "integer"}, - {"name": "is_aggregated","discription": "True if load is aggregated load, else False","unit": "boolean"} ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "name","description": "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "type","description": "type of generation: {solar; biomass}","unit": "string"}, + {"name": "subtype","description": "subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}","unit": "string"}, + {"name": "v_level","description": "voltage level of generator as integer","unit": "FIXME"}, + {"name": "nominal_capacity","description": "nominal capacity as float","unit": "FIXME"}, + {"name": "weather_cell_id","description": "unambiguous number of the corresponding weather cell","unit": "integer"}, + {"name": "is_aggregated","description": "True if load is aggregated load, else False","unit": "boolean"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json index 1df8da96..72717c67 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for mv grid area", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,13 +28,13 @@ {"name": "model_draft.ego_grid_ding0_mv_grid", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "id_db","discription": "unambiguous number of MV-Grid","unit": "integer"}, - {"name": "geom","discription": "geometric coordinates","unit": "WGS84 MULTIPOLYGON"}, - {"name": "name","discription": "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'","unit": "string"}, - {"name": "population","discription": "population in MV-Grid","unit": "integer"}, - {"name": "voltage_nom","discription": "voltage level of grid in kV","unit": "float" } ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 MULTIPOLYGON"}, + {"name": "name","description": "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'","unit": "string"}, + {"name": "population","description": "population in MV-Grid","unit": "integer"}, + {"name": "voltage_nom","description": "voltage level of grid as float","unit": "kV" } ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json index 7e4aafa1..b570f3d5 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for mv load area", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,12 +28,12 @@ {"name": "model_draft.ego_grid_ding0_mv_load", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "name","discription": "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"}, - {"name": "geom","discription": "geometric coordinates","unit": "WGS84 GEOMETRY"}, - {"name": "is_aggregated","discription": "True if load is aggregated load, else False","unit": "boolean"}, - {"name": "consumption","discription": "type of load {retail, residential, agricultural, industrial} and corresponding consumption","unit": "string" } ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "name","description": "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 GEOMETRY"}, + {"name": "is_aggregated","description": "True if load is aggregated load, else False","unit": "boolean"}, + {"name": "consumption","description": "type of load {retail, residential, agricultural, industrial} and corresponding consumption","unit": "string" } ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json index 89bdb3f7..4b53397a 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for mv station", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,11 +28,11 @@ {"name": "model_draft.ego_grid_ding0_mv_station", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "geom","discription": "geometric coordinates","unit": "wkt"}, - {"name": "name","discription": "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#","unit": "string"}, - {"name": "","discription": "","unit": ""} ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","description": "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#","unit": "string"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json index ba292b8e..19d1a962 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for mvlv mapping", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,12 +28,12 @@ {"name": "model_draft.ego_grid_ding0_mvlv_mapping", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, - {"name": "lv_grid_id","discription": "unambiguous number of LV-Grid","unit": "integer"}, - {"name": "lv_grid_name","discription": "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'","unit": "string"}, - {"name": "mv_grid_id","discription": "unambiguous number of MV-Grid","unit": "integer"}, - {"name": "mv_grid_name","discription": "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'","unit": "string"} ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, + {"name": "lv_grid_id","description": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "lv_grid_name","description": "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'","unit": "string"}, + {"name": "mv_grid_id","description": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "mv_grid_name","description": "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'","unit": "string"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json index 0bd616a8..da46e8dc 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for mvlv transformer", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,15 +28,15 @@ {"name": "model_draft.ego_grid_ding0_mvlv_transformer", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, - {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, - {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, - {"name": "name","discription": "FIXME","unit": "string"}, - {"name": "voltage_op","discription": "in kV","unit": "float"}, - {"name": "s_nom","discription": "nominal apparent power in kVA","unit": "float"}, - {"name": "x","discription": "in Ohm","unit": "float"}, - {"name": "r","discription": "in Ohm","unit": "float"}] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, + {"name": "id_db","description": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","description": "FIXME","unit": "string"}, + {"name": "voltage_op","description": "as float ","unit": "kV"}, + {"name": "s_nom","description": "nominal apparent power as float ","unit": "kVA"}, + {"name": "x","description": "as float","unit": "Ohm"}, + {"name": "r","description": "as float","unit": "Ohm"}] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json index 36010ebb..8956b3b9 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json @@ -1,4 +1,4 @@ -{"title": "DING0 - Result data", +{"title": "DING0 - Result data for ding0 versioning", "description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", "language": [ "eng","ger "], "spatial": @@ -28,9 +28,9 @@ {"name": "model_draft.ego_grid_ding0_versioning", "format": "PostgreSQL", "fields": [ - {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, - {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "description","discription": "FIXME","unit": "string"} ] } ], + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "description","description": "FIXME","unit": "string"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", From 128e14424798d369a76bc59bf52e6e1f850c925a Mon Sep 17 00:00:00 2001 From: boltbeard Date: Mon, 17 Sep 2018 11:42:27 +0200 Subject: [PATCH 064/215] reverted changes on results.py back to commit #5f913f22d54bce1c3267558d23d4b7fe72eee2f3 --- ding0/tools/results.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 94292c66..f8531789 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -2315,11 +2315,11 @@ def export_network_to_oedb(session, table, tabletype, srid): type_kind=row['type_kind'], type_name=row['type_name'], length=row['length'], - u_n=row['u_n'], - c=row['c'], - l=row['l'], - r=row['r'], - i_max_th=row['i_max_th'], + u_n=row['U_n'], + c=row['C'], + l=row['L'], + r=row['R'], + i_max_th=row['I_max_th'], geom=row['geom'], )) , axis=1) @@ -2402,9 +2402,9 @@ def export_network_to_oedb(session, table, tabletype, srid): geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, voltage_op=row['voltage_op'], - s_nom=row['s_nom'], - x=row['x'], - r=row['r'], + s_nom=row['S_nom'], + x=row['X'], + r=row['R'], )) , axis=1) @@ -2506,9 +2506,9 @@ def export_network_to_oedb(session, table, tabletype, srid): geom="SRID={};{}".format(srid, row['geom']) if row[ 'geom'] else None, voltage_op=row['voltage_op'], - s_nom=row['s_nom'], - x=row['x'], - r=row['r'], + s_nom=row['S_nom'], + x=row['X'], + r=row['R'], )) , axis=1) # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): From 311b8cd7a5d7fdfc92e7449556d34924715ac7fc Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 17 Sep 2018 15:22:52 +0200 Subject: [PATCH 065/215] #270 included read_metadata_file.py in db_export --- ding0/io/db_export.py | 88 ++++++++++++++++++++++----- ding0/io/read_metadata_file.py | 105 --------------------------------- 2 files changed, 74 insertions(+), 119 deletions(-) delete mode 100644 ding0/io/read_metadata_file.py diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 98d8c6f4..1b9c5f22 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -14,11 +14,15 @@ import numpy as np import pandas as pd +from pathlib import Path +import json +import os import re from sqlalchemy import create_engine from egoio.db_tables import model_draft as md +from egoio.tools.db import connection from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text from geoalchemy2.types import Geometry, Raster @@ -28,6 +32,7 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION, INTEGER, NUMERIC, TEXT, BIGINT, TIMESTAMP, VARCHAR +con = connection() Base = declarative_base() metadata = Base.metadata @@ -81,6 +86,59 @@ def df_sql_write(dataframe, db_table, engine): sql_write_df.to_sql(db_table.name, con=engine, if_exists='append', index=None) +# metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) +# Modify if folder name is different +FOLDER = Path('C:/ego_grid_ding0_metadatastrings') + + +def load_json_files(): + """ + Creats a list of all .json files in FOLDER + + Parameters + ---------- + :return: dict: jsonmetadata + contains all .json files from the folder + """ + print(FOLDER) + full_dir = os.walk(FOLDER.parent / FOLDER.name) + jsonmetadata = [] + + for jsonfiles in full_dir: + for jsonfile in jsonfiles: + jsonmetadata = jsonfile + + return jsonmetadata + + + +def prepare_metadatastring_fordb(table): + """ + Prepares the JSON String for the sql comment on table + + Required: The .json file names must contain the table name (for example from create_ding0_sql_tables()) + Instruction: Check the SQL "comment on table" for each table (f.e. use pgAdmin) + + Parameters + ---------- + table: str + table name of the sqlAlchemy table + + return: mdsstring:str + Contains the .json file as string + + """ + for file in load_json_files(): + JSONFILEPATH = FOLDER / file + with open(JSONFILEPATH, encoding='UTF-8') as f: + if table in file: + # included for testing / or logging + # print("Comment on table: " + table + "\nusing this metadata file: " + file + "\n") + mds = json.load(f) + mdsstring = json.dumps(mds, indent=4, ensure_ascii=False) + return mdsstring + + def create_ding0_sql_tables(engine, ding0_schema=None): """ Create the ding0 tables @@ -100,7 +158,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('run_id', BigInteger, primary_key=True, autoincrement=False, nullable=False), Column('description', String(3000)), schema=ding0_schema, - comment="""This is a comment on table for the ding0 versioning table""" + comment=prepare_metadatastring_fordb("versioning") ) @@ -123,7 +181,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('i_max_th', Float(10)), Column('geom', Geometry('LINESTRING', 4326)), schema=ding0_schema, - comment="""This is a comment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_line") ) # ding0 lv_branchtee table @@ -134,7 +192,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('geom', Geometry('POINT', 4326)), Column('name', String(100)), schema=ding0_schema, - comment="""This is a comment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_lv_branchtee") ) # ding0 lv_generator table @@ -153,7 +211,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('weather_cell_id', BigInteger), Column('is_aggregated', Boolean), schema=ding0_schema, - comment="""This is a comment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_lv_generator") ) # ding0 lv_load table @@ -166,7 +224,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('geom', Geometry('POINT', 4326)), Column('consumption', String(100)), schema=ding0_schema, - comment="""This is a comment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_lv_load") ) # ding0 lv_station table @@ -177,7 +235,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('geom', Geometry('POINT', 4326)), Column('name', String(100)), schema=ding0_schema, - comment="""This is a commment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_lv_station") ) # ding0 mvlv_transformer table @@ -192,7 +250,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('x', Float(10)), Column('r', Float(10)), schema=ding0_schema, - comment="""This is a comment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_mvlv_transformer") ) # ding0 mvlv_mapping table @@ -203,6 +261,8 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('lv_grid_name', String(100)), Column('mv_grid_id', BigInteger), Column('mv_grid_name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("ding0_mvlv_mapping") ) # ding0 mv_branchtee table @@ -213,7 +273,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('geom', Geometry('POINT', 4326)), Column('name', String(100)), schema=ding0_schema, - comment="""This is a comment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_mv_branchtee") ) # ding0 mv_circuitbreaker table @@ -225,7 +285,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('name', String(100)), Column('status', String(10)), schema=ding0_schema, - comment="""This is a comment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_mv_circuitbreaker") ) # ding0 mv_generator table @@ -242,7 +302,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('weather_cell_id', BigInteger), Column('is_aggregated', Boolean), schema=ding0_schema, - comment="""This is a comment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_mv_generator") ) # ding0 mv_load table @@ -255,7 +315,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('is_aggregated', Boolean), Column('consumption', String(100)), schema=ding0_schema, - comment="""This is a comment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_mv_load") ) # ding0 mv_grid table @@ -268,7 +328,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('population', BigInteger), Column('voltage_nom', Float(10)), schema=ding0_schema, - comment="""This is a comment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_mv_grid") ) @@ -281,7 +341,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('geom', Geometry('POINT', 4326)), Column('name', String(100)), schema=ding0_schema, - comment="""This is a comment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_mv_station") ) # ding0 hvmv_transformer table @@ -296,7 +356,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): Column('x', Float(10)), Column('r', Float(10)), schema=ding0_schema, - comment="""This is a comment on table for the ding0 lines table""" + comment=prepare_metadatastring_fordb("ding0_hvmv_transformer") ) # create all the tables diff --git a/ding0/io/read_metadata_file.py b/ding0/io/read_metadata_file.py deleted file mode 100644 index affd8eea..00000000 --- a/ding0/io/read_metadata_file.py +++ /dev/null @@ -1,105 +0,0 @@ -"""This file is part of DINGO, the DIstribution Network GeneratOr. -DINGO is a tool to generate synthetic medium and low voltage power -distribution grids based on open data. - -It is developed in the project open_eGo: https://openegoproject.wordpress.com - -DING0 lives at github: https://github.com/openego/ding0/ -The documentation is available on RTD: http://ding0.readthedocs.io""" - -__copyright__ = "Reiner Lemoine Institut gGmbH" -__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" -__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" -__author__ = "jh-RLI" - -from egoio.tools.db import connection - -from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text -from geoalchemy2.types import Geometry, Raster -from sqlalchemy.ext.declarative import declarative_base - -from pathlib import Path -import json -import os - -# DB used for testing: reiners_db -con = connection() - - -Base = declarative_base() -metadata = Base.metadata - -# metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) -# Modify if folder name is different -FOLDER = Path('C:/ego_grid_ding0_metadatastrings') - -DING0_TABLES = {'versioning': 'ding0_versioning', - 'line': 'ding0_line', - 'lv_branchtee': 'ding0_lv_branchtee', - 'lv_generator': 'ding0_lv_generator', - 'lv_load': 'ding0_lv_load', - 'lv_grid': 'ding0_lv_grid', - 'lv_station': 'ding0_lv_station', - 'mvlv_transformer': 'ding0_mvlv_transformer', - 'mvlv_mapping': 'ding0_mvlv_mapping', - 'mv_branchtee': 'ding0_mv_branchtee', - 'mv_circuitbreaker': 'ding0_mv_circuitbreaker', - 'mv_generator': 'ding0_mv_generator', - 'mv_load': 'ding0_mv_load', - 'mv_grid': 'ding0_mv_grid', - 'mv_station': 'ding0_mv_station', - 'hvmv_transformer': 'ding0_hvmv_transformer'} - - -# load data from json file -def load_json_files(): - print(FOLDER) - full_dir = os.walk(FOLDER.parent / FOLDER.name) - jsonmetadata = [] - - for jsonfiles in full_dir: - for jsonfile in jsonfiles: - #if jsonfile[:4] == 'json': #ToDo: Add Execption - jsonmetadata = jsonfile - - return jsonmetadata - - -# Prepares the JSON String for the sql comment on table # ToDO: handel "Sonderzeichen" in SQL comment -# mds = metadatastring -def prepare_metadatastring_fordb(table): - for file in load_json_files(): - JSONFILEPATH = FOLDER / file - with open(JSONFILEPATH) as f: - if table in file: - mds = json.load(f) - mdsstring = json.dumps(mds, indent=4) - return mdsstring - - -# Copy from db_export for testing purpose included just for testing and review -def create_ding0_sql_tables(engine, ding0_schema): - # versioning table - versioning = Table(DING0_TABLES['versioning'], metadata, - Column('run_id', BigInteger, primary_key=True, autoincrement=False, nullable=False), - Column('description', String(3000)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("versioning") - ) - - # ding0 mv_station table - ding0_mv_station = Table(DING0_TABLES['mv_station'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_mv_station") - ) - - # create all the tables - metadata.create_all(engine, checkfirst=True) - -# Test -create_ding0_sql_tables(con, "topology") \ No newline at end of file From dc49c6f972ad6bfdd88a0b7bc8faaf280f278121 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 17 Sep 2018 15:25:12 +0200 Subject: [PATCH 066/215] #270 minor changes --- ding0/io/db_export.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 1b9c5f22..57c25679 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -161,7 +161,6 @@ def create_ding0_sql_tables(engine, ding0_schema=None): comment=prepare_metadatastring_fordb("versioning") ) - # ding0 lines table ding0_line = Table(DING0_TABLES['line'], metadata, Column('id', Integer, primary_key=True), From 72f3ea04b8600fb890d34dbcfa357db08570bb70 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 17 Sep 2018 15:27:13 +0200 Subject: [PATCH 067/215] #270 minor changes --- ding0/io/db_export.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 57c25679..227b4b04 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -100,6 +100,7 @@ def load_json_files(): :return: dict: jsonmetadata contains all .json files from the folder """ + print(FOLDER) full_dir = os.walk(FOLDER.parent / FOLDER.name) jsonmetadata = [] @@ -126,8 +127,8 @@ def prepare_metadatastring_fordb(table): return: mdsstring:str Contains the .json file as string - """ + for file in load_json_files(): JSONFILEPATH = FOLDER / file with open(JSONFILEPATH, encoding='UTF-8') as f: From 18d189913fa37efaf982a1116843baa07189b6a1 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 17 Sep 2018 16:45:48 +0200 Subject: [PATCH 068/215] #270 restructured file and stated to work on export_to_db() --- ding0/io/db_export.py | 520 +++++------------------------------------- 1 file changed, 63 insertions(+), 457 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 227b4b04..3f031ea2 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -10,7 +10,7 @@ __copyright__ = "Reiner Lemoine Institut gGmbH" __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" __url__ = "https://github.com/openego/ding0/blob/master/LICENSE" -__author__ = "nesnoj, gplssm" +__author__ = "nesnoj, gplssm, jh-RLI" import numpy as np import pandas as pd @@ -55,37 +55,6 @@ 'hvmv_transformer': 'ding0_hvmv_transformer'} -def df_sql_write(dataframe, db_table, engine): - """ - Convert dataframes such that their column names - are made small and the index is renamed 'id' so as to - correctly load its data to its appropriate sql table. - - .. ToDo: need to check for id_db instead of only 'id' in index label names - - NOTE: This function does not check if the dataframe columns - matches the db_table fields, if they do not then no warning - is given. - - Parameters - ---------- - dataframe: :pandas:`DataFrame` - The pandas dataframe to be transferred to its - apprpritate db_table - - db_table: :py:mod:`sqlalchemy.sql.schema.Table` - A table instance definition from sqlalchemy. - NOTE: This isn't an orm definition - - engine: :py:mod:`sqlalchemy.engine.base.Engine` - Sqlalchemy database engine - """ - sql_write_df = dataframe.copy() - sql_write_df.columns = sql_write_df.columns.map(str.lower) - # sql_write_df = sql_write_df.set_index('id') - sql_write_df.to_sql(db_table.name, con=engine, if_exists='append', index=None) - - # metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) # Modify if folder name is different FOLDER = Path('C:/ego_grid_ding0_metadatastrings') @@ -362,220 +331,102 @@ def create_ding0_sql_tables(engine, ding0_schema=None): # create all the tables metadata.create_all(engine, checkfirst=True) + +def df_sql_write(dataframe, db_table, engine): + """ + Convert dataframes such that their column names + are made small and the index is renamed 'id' so as to + correctly load its data to its appropriate sql table. + + .. ToDo: need to check for id_db instead of only 'id' in index label names + + NOTE: This function does not check if the dataframe columns + matches the db_table fields, if they do not then no warning + is given. + + Parameters + ---------- + dataframe: :pandas:`DataFrame` + The pandas dataframe to be transferred to its + apprpritate db_table + + db_table: :py:mod:`sqlalchemy.sql.schema.Table` + A table instance definition from sqlalchemy. + NOTE: This isn't an orm definition + + engine: :py:mod:`sqlalchemy.engine.base.Engine` + Sqlalchemy database engine + """ + sql_write_df = dataframe.copy() + sql_write_df.columns = sql_write_df.columns.map(str.lower) + # sql_write_df = sql_write_df.set_index('id') + sql_write_df.to_sql(db_table.name, con=engine, if_exists='append', index=None) + def export_network_to_db(session, schema, table, tabletype, srid): print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': - table.apply(lambda row: - session.add(schema.EgoGridDing0Line( - run_id=row['run_id'], - id_db=row['id'], - edge_name=row['edge_name'], - grid_name=row['grid_name'], - node1=row['node1'], - node2=row['node2'], - type_kind=row['type_kind'], - type_name=row['type_name'], - length=row['length'], - u_n=row['U_n'], - c=row['C'], - l=row['L'], - r=row['R'], - i_max_th=row['I_max_th'], - geom=row['geom'], - )) - , axis=1) + # create a dummy dataframe with lines + line1 = pd.DataFrame({'run_id': [1, 1], + 'id': [1, 2], + 'edge_name': ['line1', 'line2'], + 'grid_name': ['mv_grid5', 'mvgrid5'], + 'node1': [1, 2], + 'node2': [2, 3], + 'type_kind': ['line', 'line'], + 'type_name': ['NASX2Y', 'NA2SXX2Y'], + 'length': [1.3, 2.3], + 'U_n': [10, 10], + 'C': [0.002, 0.001], + 'L': [0.01, 0.02], + 'R': [0.0001, 0.00005], + 'I_max_th': [5, 6]}) + elif tabletype == 'lv_cd': - table.apply(lambda row: - session.add(schema.EgoGridDing0LvBranchtee( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - )) - , axis=1) + pass elif tabletype == 'lv_gen': - table.apply(lambda row: - session.add(schema.EgoGridDing0LvGenerator( - run_id=row['run_id'], - id_db=row['id'], - la_id=row['la_id'], - name=row['name'], - lv_grid_id=str(row['lv_grid_id']), - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - type=row['type'], - subtype=row['subtype'], - v_level=row['v_level'], - nominal_capacity=row['nominal_capacity'], - is_aggregated=row['is_aggregated'], - weather_cell_id=row['weather_cell_id'] if not(pd.isnull(row[ - 'weather_cell_id'])) else None, - - )) - , axis=1) + pass elif tabletype == 'lv_load': - table.apply(lambda row: - session.add(schema.EgoGridDing0LvLoad( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - lv_grid_id=row['lv_grid_id'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - consumption=row['consumption'] - )) - , axis=1) + pass elif tabletype == 'lv_grid': - table.apply(lambda row: - session.add(schema.EgoGridDing0LvGrid( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - population=row['population'], - voltage_nom=row['voltage_nom'], - )) - , axis=1) + pass elif tabletype == 'lv_station': - table.apply(lambda row: - session.add(schema.EgoGridDing0LvStation( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - )) - , axis=1) + pass elif tabletype == 'mvlv_trafo': - table.apply(lambda row: - session.add(schema.EgoGridDing0MvlvTransformer( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - voltage_op=row['voltage_op'], - s_nom=row['S_nom'], - x=row['X'], - r=row['R'], - )) - , axis=1) + pass elif tabletype == 'mvlv_mapping': - table.apply(lambda row: - session.add(schema.EgoGridDing0MvlvMapping( - run_id=row['run_id'], - lv_grid_id=row['lv_grid_id'], - lv_grid_name=row['lv_grid_name'], - mv_grid_id=row['mv_grid_id'], - mv_grid_name=row['mv_grid_name'], - )) - , axis=1) + pass elif tabletype == 'mv_cd': - table.apply(lambda row: - session.add(schema.EgoGridDing0MvBranchtee( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - )) - , axis=1) + pass elif tabletype == 'mv_cb': - table.apply(lambda row: - session.add(schema.EgoGridDing0MvCircuitbreaker( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - status=row['status'], - )) - , axis=1) + pass elif tabletype == 'mv_gen': - table.apply(lambda row: - session.add(schema.EgoGridDing0MvGenerator( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - type=row['type'], - subtype=row['subtype'], - v_level=row['v_level'], - nominal_capacity=row['nominal_capacity'], - is_aggregated=row['is_aggregated'], - weather_cell_id=row['weather_cell_id'] if not(pd.isnull(row[ - 'weather_cell_id'])) else None, - )) - , axis=1) + pass elif tabletype == 'mv_load': - table.apply(lambda row: - session.add(schema.EgoGridDing0MvLoad( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - is_aggregated=row['is_aggregated'], - consumption=row['consumption'], - )) - , axis=1) + pass elif tabletype == 'mv_grid': - table.apply(lambda row: - session.add(schema.EgoGridDing0MvGrid( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - population=row['population'], - voltage_nom=row['voltage_nom'], - )) - , axis=1) + pass elif tabletype == 'mv_station': - table.apply(lambda row: - session.add(schema.EgoGridDing0MvStation( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - )) - , axis=1) + pass elif tabletype == 'hvmv_trafo': - table.apply(lambda row: - session.add(schema.EgoGridDing0HvmvTransformer( - run_id=row['run_id'], - id_db=row['id'], - name=row['name'], - geom="SRID={};{}".format(srid, row['geom']) if row[ - 'geom'] else None, - voltage_op=row['voltage_op'], - s_nom=row['S_nom'], - x=row['X'], - r=row['R'], - )) - , axis=1) + pass # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): # print('helloworld') - session.commit() + #session.commit() def export_data_to_db(session, schema, run_id, metadata_json, srid, @@ -754,248 +605,3 @@ def change_owner(engine, table, role): change_owner(engine, tab, 'oeuser') engine.close() - - -class EgoGridDing0Versioning(Base): - __tablename__ = 'ego_grid_ding0_versioning' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, unique=True, nullable=False) - description = Column(String(3000)) - - -class EgoGridDing0MvStation(Base): - __tablename__ = 'ego_grid_ding0_mv_station' - __table_args__ = {'schema': 'model_draft'} - - id = Column(BigInteger, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - geom = Column(Geometry('POINT', 4326)) - name = Column(String(100)) - - -class EgoGridDing0HvmvTransformer(Base): - __tablename__ = 'ego_grid_ding0_hvmv_transformer' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - geom = Column(Geometry('POINT', 4326)) - name = Column(String(100)) - voltage_op = Column(Float(10)) - s_nom = Column(Float(10)) - x = Column(Float(10)) - r = Column(Float(10)) - - -class EgoGridDing0Line(Base): - __tablename__ = 'ego_grid_ding0_line' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - edge_name = Column(String(100)) - grid_name = Column(String(100)) - node1 = Column(String(100)) - node2 = Column(String(100)) - type_kind = Column(String(20)) - type_name = Column(String(30)) - length = Column(Float(10)) - u_n = Column(Float(10)) - c = Column(Float(10)) - l = Column(Float(10)) - r = Column(Float(10)) - i_max_th = Column(Float(10)) - geom = Column(Geometry('LINESTRING', 4326)) - - -class EgoGridDing0LvBranchtee(Base): - __tablename__ = 'ego_grid_ding0_lv_branchtee' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - geom = Column(Geometry('POINT', 4326)) - name = Column(String(100)) - - -class EgoGridDing0LvGenerator(Base): - __tablename__ = 'ego_grid_ding0_lv_generator' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - la_id = Column(BigInteger) - name = Column(String(100)) - lv_grid_id = Column(BigInteger) - geom = Column(Geometry('POINT', 4326)) - type = Column(String(22)) - subtype = Column(String(22)) - v_level = Column(Integer) - nominal_capacity = Column(Float(10)) - weather_cell_id = Column(BigInteger) - is_aggregated = Column(Boolean) - - -class EgoGridDing0LvGrid(Base): - __tablename__ = 'ego_grid_ding0_lv_grid' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - name = Column(String(100)) - geom = Column(Geometry('MULTIPOLYGON', 4326)) #Todo: check if right srid? - population = Column(BigInteger) - voltage_nom = Column(Float(10)) #Todo: Check Datatypes - - -class EgoGridDing0LvLoad(Base): - __tablename__ = 'ego_grid_ding0_lv_load' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - name = Column(String(100)) - lv_grid_id = Column(Integer) - geom = Column(Geometry('POINT', 4326)) - consumption = Column(String(100)) - - -class EgoGridDing0MvBranchtee(Base): - __tablename__ = 'ego_grid_ding0_mv_branchtee' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - geom = Column(Geometry('POINT', 4326)) - name = Column(String(100)) - -class EgoGridDing0MvCircuitbreaker(Base): - __tablename__ = 'ego_grid_ding0_mv_circuitbreaker' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - geom = Column(Geometry('POINT', 4326)) - name = Column(String(100)) - status = Column(String(10)) - -class EgoGridDing0MvGenerator(Base): - __tablename__ = 'ego_grid_ding0_mv_generator' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - name = Column(String(100)) - geom = Column(Geometry('POINT', 4326)) - type = Column(String(22)) - subtype = Column(String(22)) - v_level = Column(Integer) - nominal_capacity = Column(Float(10)) - weather_cell_id = Column(BigInteger) - is_aggregated = Column(Boolean) - - -class EgoGridDing0MvGrid(Base): - __tablename__ = 'ego_grid_ding0_mv_grid' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - geom = Column(Geometry('MULTIPOLYGON', 4326)) #Todo: check if right srid? - name = Column(String(100)) - population = Column(BigInteger) - voltage_nom = Column(Float(10)) #Todo: Check Datatypes - - -class EgoGridDing0MvLoad(Base): - __tablename__ = 'ego_grid_ding0_mv_load' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - name = Column(String(100)) - geom = Column(Geometry('GEOMETRY', 4326)) - is_aggregated = Column(Boolean) - consumption = Column(String(100)) - - -class EgoGridDing0MvlvMapping(Base): - __tablename__ = 'ego_grid_ding0_mvlv_mapping' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - lv_grid_id = Column(BigInteger) - lv_grid_name = Column(String(100)) - mv_grid_id = Column(BigInteger) - mv_grid_name = Column(String(100)) - - -class EgoGridDing0LvStation(Base): - __tablename__ = 'ego_grid_ding0_lv_station' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - geom = Column(Geometry('POINT', 4326)) - name = Column(String(100)) - - -class EgoGridDing0MvlvTransformer(Base): - __tablename__ = 'ego_grid_ding0_mvlv_transformer' - __table_args__ = {'schema': 'model_draft'} - - id = Column(Integer, primary_key=True) - run_id = Column(BigInteger, - ForeignKey('model_draft.ego_grid_ding0_versioning.run_id'), - nullable=False) - id_db = Column(BigInteger) - geom = Column(Geometry('POINT', 4326)) - name = Column(String(100)) - voltage_op = Column(Float(10)) - s_nom = Column(Float(10)) - x = Column(Float(10)) - r = Column(Float(10)) From 5a0d73261c5734f2f1512ce4793c7a6a6a75e6e3 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 17 Sep 2018 16:49:24 +0200 Subject: [PATCH 069/215] #270 minor changes --- ding0/io/db_export.py | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 3f031ea2..6566bdae 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -362,25 +362,27 @@ def df_sql_write(dataframe, db_table, engine): # sql_write_df = sql_write_df.set_index('id') sql_write_df.to_sql(db_table.name, con=engine, if_exists='append', index=None) + # create a dummy dataframe with lines + line1 = pd.DataFrame({'run_id': [1, 1], + 'id': [1, 2], + 'edge_name': ['line1', 'line2'], + 'grid_name': ['mv_grid5', 'mvgrid5'], + 'node1': [1, 2], + 'node2': [2, 3], + 'type_kind': ['line', 'line'], + 'type_name': ['NASX2Y', 'NA2SXX2Y'], + 'length': [1.3, 2.3], + 'U_n': [10, 10], + 'C': [0.002, 0.001], + 'L': [0.01, 0.02], + 'R': [0.0001, 0.00005], + 'I_max_th': [5, 6]}) + + def export_network_to_db(session, schema, table, tabletype, srid): print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': - # create a dummy dataframe with lines - line1 = pd.DataFrame({'run_id': [1, 1], - 'id': [1, 2], - 'edge_name': ['line1', 'line2'], - 'grid_name': ['mv_grid5', 'mvgrid5'], - 'node1': [1, 2], - 'node2': [2, 3], - 'type_kind': ['line', 'line'], - 'type_name': ['NASX2Y', 'NA2SXX2Y'], - 'length': [1.3, 2.3], - 'U_n': [10, 10], - 'C': [0.002, 0.001], - 'L': [0.01, 0.02], - 'R': [0.0001, 0.00005], - 'I_max_th': [5, 6]}) - + pass elif tabletype == 'lv_cd': pass From 77423a19642c7bf92c43197a6eb9cf252d224dc6 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 20 Sep 2018 15:37:59 +0200 Subject: [PATCH 070/215] #270 include example data frame, outlined some tables for testing --- ding0/io/db_export.py | 82 ++++++++++++++++++++++++------------------- 1 file changed, 45 insertions(+), 37 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 6566bdae..04f77649 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -37,22 +37,22 @@ Base = declarative_base() metadata = Base.metadata -DING0_TABLES = {'versioning': 'ding0_versioning', - 'line': 'ding0_line', - 'lv_branchtee': 'ding0_lv_branchtee', - 'lv_generator': 'ding0_lv_generator', - 'lv_load': 'ding0_lv_load', - 'lv_grid': 'ding0_lv_grid', - 'lv_station': 'ding0_lv_station', - 'mvlv_transformer': 'ding0_mvlv_transformer', - 'mvlv_mapping': 'ding0_mvlv_mapping', - 'mv_branchtee': 'ding0_mv_branchtee', - 'mv_circuitbreaker': 'ding0_mv_circuitbreaker', - 'mv_generator': 'ding0_mv_generator', - 'mv_load': 'ding0_mv_load', - 'mv_grid': 'ding0_mv_grid', - 'mv_station': 'ding0_mv_station', - 'hvmv_transformer': 'ding0_hvmv_transformer'} +DING0_TABLES = {'versioning': 'ego_ding0_versioning', + 'line': 'ego_ding0_line', + 'lv_branchtee': 'ego_ding0_lv_branchtee', + 'lv_generator': 'ego_ding0_lv_generator', + 'lv_load': 'ego_ding0_lv_load', + 'lv_grid': 'ego_ding0_lv_grid', + 'lv_station': 'ego_ding0_lv_station', + 'mvlv_transformer': 'ego_ding0_mvlv_transformer', + 'mvlv_mapping': 'ego_ding0_mvlv_mapping', + 'mv_branchtee': 'ego_ding0_mv_branchtee', + 'mv_circuitbreaker': 'ego_ding0_mv_circuitbreaker', + 'mv_generator': 'ego_ding0_mv_generator', + 'mv_load': 'ego_ding0_mv_load', + 'mv_grid': 'ego_ding0_mv_grid', + 'mv_station': 'ego_ding0_mv_station', + 'hvmv_transformer': 'ego_ding0_hvmv_transformer'} # metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) @@ -70,7 +70,7 @@ def load_json_files(): contains all .json files from the folder """ - print(FOLDER) + #print(FOLDER) full_dir = os.walk(FOLDER.parent / FOLDER.name) jsonmetadata = [] @@ -153,6 +153,7 @@ def create_ding0_sql_tables(engine, ding0_schema=None): comment=prepare_metadatastring_fordb("ding0_line") ) + """ # ding0 lv_branchtee table ding0_lv_branchtee = Table(DING0_TABLES['lv_branchtee'], metadata, Column('id', Integer, primary_key=True), @@ -327,11 +328,10 @@ def create_ding0_sql_tables(engine, ding0_schema=None): schema=ding0_schema, comment=prepare_metadatastring_fordb("ding0_hvmv_transformer") ) - +""" # create all the tables metadata.create_all(engine, checkfirst=True) - def df_sql_write(dataframe, db_table, engine): """ Convert dataframes such that their column names @@ -362,24 +362,10 @@ def df_sql_write(dataframe, db_table, engine): # sql_write_df = sql_write_df.set_index('id') sql_write_df.to_sql(db_table.name, con=engine, if_exists='append', index=None) - # create a dummy dataframe with lines - line1 = pd.DataFrame({'run_id': [1, 1], - 'id': [1, 2], - 'edge_name': ['line1', 'line2'], - 'grid_name': ['mv_grid5', 'mvgrid5'], - 'node1': [1, 2], - 'node2': [2, 3], - 'type_kind': ['line', 'line'], - 'type_name': ['NASX2Y', 'NA2SXX2Y'], - 'length': [1.3, 2.3], - 'U_n': [10, 10], - 'C': [0.002, 0.001], - 'L': [0.01, 0.02], - 'R': [0.0001, 0.00005], - 'I_max_th': [5, 6]}) - - -def export_network_to_db(session, schema, table, tabletype, srid): + + + +def export_network_to_db(engine, schema, table, tabletype, srid): print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': pass @@ -607,3 +593,25 @@ def change_owner(engine, table, role): change_owner(engine, tab, 'oeuser') engine.close() + +# create a dummy dataframe with lines +line1 = pd.DataFrame({'run_id': [1, 1], + 'id_db': [1, 2], + 'edge_name': ['line1', 'line2'], + 'grid_name': ['mv_grid5', 'mvgrid5'], + 'node1': [1, 2], + 'node2': [2, 3], + 'type_kind': ['line', 'line'], + 'type_name': ['NASX2Y', 'NA2SXX2Y'], + 'length': [1.3, 2.3], + 'U_n': [10, 10], + 'C': [0.002, 0.001], + 'L': [0.01, 0.02], + 'R': [0.0001, 0.00005], + 'I_max_th': [5, 6]}) + +# included for testing +#df_sql_write(line1, , con) + +# tested with reiners_db +create_ding0_sql_tables(con, "topology") \ No newline at end of file From f187ed3a1e12d0c012a2f86f748d434c81196c8f Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 21 Sep 2018 13:48:00 +0200 Subject: [PATCH 071/215] #270 committed first data frames to database --- ding0/io/db_export.py | 38 +++++++++++++++++++++++++++++++------- 1 file changed, 31 insertions(+), 7 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 04f77649..821f948d 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -32,6 +32,8 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION, INTEGER, NUMERIC, TEXT, BIGINT, TIMESTAMP, VARCHAR + + con = connection() Base = declarative_base() @@ -55,7 +57,7 @@ 'hvmv_transformer': 'ego_ding0_hvmv_transformer'} -# metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) +# metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) and Change to not static # Modify if folder name is different FOLDER = Path('C:/ego_grid_ding0_metadatastrings') @@ -329,10 +331,18 @@ def create_ding0_sql_tables(engine, ding0_schema=None): comment=prepare_metadatastring_fordb("ding0_hvmv_transformer") ) """ + # create all the tables metadata.create_all(engine, checkfirst=True) -def df_sql_write(dataframe, db_table, engine): + +def select_db_table(db_table): + for table in metadata.tables.keys(): + if db_table in table: + pass + + +def df_sql_write(dataframe, db_table, schema,engine): """ Convert dataframes such that their column names are made small and the index is renamed 'id' so as to @@ -357,10 +367,16 @@ def df_sql_write(dataframe, db_table, engine): engine: :py:mod:`sqlalchemy.engine.base.Engine` Sqlalchemy database engine """ + #for table in metadata.tables.keys(): + + + sql_write_df = dataframe.copy() sql_write_df.columns = sql_write_df.columns.map(str.lower) # sql_write_df = sql_write_df.set_index('id') - sql_write_df.to_sql(db_table.name, con=engine, if_exists='append', index=None) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) + + #con.connect().execute() @@ -368,7 +384,7 @@ def df_sql_write(dataframe, db_table, engine): def export_network_to_db(engine, schema, table, tabletype, srid): print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': - pass + metadata.execute(df_sql_write(line1, "topology.ego_ding0_line", con)) elif tabletype == 'lv_cd': pass @@ -594,6 +610,8 @@ def change_owner(engine, table, role): engine.close() + + # create a dummy dataframe with lines line1 = pd.DataFrame({'run_id': [1, 1], 'id_db': [1, 2], @@ -610,8 +628,14 @@ def change_owner(engine, table, role): 'R': [0.0001, 0.00005], 'I_max_th': [5, 6]}) -# included for testing -#df_sql_write(line1, , con) +versioning1 = pd.DataFrame({'run_id': [6], 'description': str(line1.to_dict())}) # tested with reiners_db -create_ding0_sql_tables(con, "topology") \ No newline at end of file +create_ding0_sql_tables(con, "topology") + +# Test fnc. +select_db_table("ego_ding0_line") + +# included for testing +df_sql_write(line1, "ego_ding0_line", "topology", con) +# df_sql_write(versioning1, "ego_ding0_versioning", "topology", con) \ No newline at end of file From 6d4f822431283c567ba25db8e233a554a6f2910e Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 21 Sep 2018 16:35:25 +0200 Subject: [PATCH 072/215] #270 add data frames to table decide by tabletype --- ding0/io/db_export.py | 117 +++++++++++++++--------------------------- 1 file changed, 41 insertions(+), 76 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 821f948d..f723efe3 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -61,6 +61,9 @@ # Modify if folder name is different FOLDER = Path('C:/ego_grid_ding0_metadatastrings') +# Set the Database schema which you want to add the tables to +SCHEMA = "topology" + def load_json_files(): """ @@ -111,7 +114,7 @@ def prepare_metadatastring_fordb(table): return mdsstring -def create_ding0_sql_tables(engine, ding0_schema=None): +def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): """ Create the ding0 tables @@ -342,7 +345,7 @@ def select_db_table(db_table): pass -def df_sql_write(dataframe, db_table, schema,engine): +def df_sql_write(engine, schema, db_table, dataframe): """ Convert dataframes such that their column names are made small and the index is renamed 'id' so as to @@ -380,111 +383,69 @@ def df_sql_write(dataframe, db_table, schema,engine): +def export_network_to_db(engine, schema, df, tabletype, srid=None): + """ + Exports pre created Pands data frames to a connected database. + + :param engine: + :param schema: + :param df: + :param tabletype: + :param srid: + """ + # ToDo: check if versioning table exists -def export_network_to_db(engine, schema, table, tabletype, srid): print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': - metadata.execute(df_sql_write(line1, "topology.ego_ding0_line", con)) + df_sql_write(engine, schema, DING0_TABLES['line'], df) elif tabletype == 'lv_cd': - pass + df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df) elif tabletype == 'lv_gen': - pass + df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df) elif tabletype == 'lv_load': - pass + df_sql_write(engine, schema, DING0_TABLES['lv_load'], df) elif tabletype == 'lv_grid': - pass + df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df) elif tabletype == 'lv_station': - pass + df_sql_write(engine, schema, DING0_TABLES['lv_station'], df) elif tabletype == 'mvlv_trafo': - pass + df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df) elif tabletype == 'mvlv_mapping': - pass + df_sql_write(engine, schema, DING0_TABLES['mvlv_mapping'], df) elif tabletype == 'mv_cd': - pass + df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df) elif tabletype == 'mv_cb': - pass + df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df) elif tabletype == 'mv_gen': - pass + df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df) elif tabletype == 'mv_load': - pass + df_sql_write(engine, schema, DING0_TABLES['mv_load'], df) elif tabletype == 'mv_grid': - pass + df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df) elif tabletype == 'mv_station': - pass + df_sql_write(engine, schema, DING0_TABLES['mv_station'], df) elif tabletype == 'hvmv_trafo': - pass + df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df) + + # else: + # pass # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): # print('helloworld') - #session.commit() - - -def export_data_to_db(session, schema, run_id, metadata_json, srid, - lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, - lv_loads, - mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, - mv_loads, lines, mvlv_mapping): - # only for testing - # engine = create_engine('sqlite:///:memory:') - - # get the run_id from model_draft.ego_grid_ding0_versioning - # compare the run_id from table to the current run_id - - # oedb_versioning_query = session.query( - # schema.EgoGridDing0Versioning.run_id, - # schema.EgoGridDing0Versioning.description - # ).filter(schema.EgoGridDing0Versioning.run_id == run_id) - # - # oedb_versioning = pd.read_sql_query(oedb_versioning_query.statement, - # session.bind) - db_versioning = pd.DataFrame() - - if db_versioning.empty: - # if the run_id doesn't exist then - # create entry into ego_grid_ding0_versioning: - metadata_df = pd.DataFrame({'run_id': run_id, - 'description': metadata_json}, - index=[0]) - metadata_df.apply(lambda row: - session.add(schema.EgoGridDing0Versioning( - run_id=row['run_id'], - description=row['description'], - )) - , axis=1) - session.commit() - - export_network_to_db(session, lv_grid, 'lv_grid', srid) - export_network_to_db(session, lv_gen, 'lv_gen', srid) - export_network_to_db(session, lv_cd, 'lv_cd', srid) - export_network_to_db(session, lv_stations, 'lv_station', srid) - export_network_to_db(session, mvlv_trafos, 'mvlv_trafo', srid) - export_network_to_db(session, lv_loads, 'lv_load', srid) - export_network_to_db(session, mv_grid, 'mv_grid', srid) - export_network_to_db(session, mv_gen, 'mv_gen', srid) - export_network_to_db(session, mv_cb, 'mv_cb', srid) - export_network_to_db(session, mv_cd, 'mv_cd', srid) - export_network_to_db(session, mv_stations, 'mv_station', srid) - export_network_to_db(session, hvmv_trafos, 'hvmv_trafo', srid) - export_network_to_db(session, mv_loads, 'mv_load', srid) - export_network_to_db(session, lines, 'line', srid) - export_network_to_db(session, mvlv_mapping, 'mvlv_mapping', srid) - else: - raise KeyError("run_id already present! No tables are input!") - def create_ding0_db_tables(engine, schema,): tables = [schema.EgoGridDing0Versioning, @@ -613,7 +574,7 @@ def change_owner(engine, table, role): # create a dummy dataframe with lines -line1 = pd.DataFrame({'run_id': [1, 1], +line1 = pd.DataFrame({'run_id': [2, 2], 'id_db': [1, 2], 'edge_name': ['line1', 'line2'], 'grid_name': ['mv_grid5', 'mvgrid5'], @@ -637,5 +598,9 @@ def change_owner(engine, table, role): select_db_table("ego_ding0_line") # included for testing -df_sql_write(line1, "ego_ding0_line", "topology", con) -# df_sql_write(versioning1, "ego_ding0_versioning", "topology", con) \ No newline at end of file +# df_sql_write(line1, "ego_ding0_line", "topology", con) +# df_sql_write(versioning1, "ego_ding0_versioning", "topology", con) + +# ToDo: Include the Pandas Dataframes from script x? which are created all 16/(15) tables +# export_network_to_db(engine, schema, df, tabletype, srid=None) +export_network_to_db(con, SCHEMA, line1, "line") \ No newline at end of file From 0f95146e6371a0c7465a9b728087b9b6a95c906b Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 21 Sep 2018 16:45:27 +0200 Subject: [PATCH 073/215] #270 minor changes --- ding0/io/db_export.py | 37 ++----------------------------------- 1 file changed, 2 insertions(+), 35 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index f723efe3..8d6f6ec5 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -339,12 +339,6 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): metadata.create_all(engine, checkfirst=True) -def select_db_table(db_table): - for table in metadata.tables.keys(): - if db_table in table: - pass - - def df_sql_write(engine, schema, db_table, dataframe): """ Convert dataframes such that their column names @@ -370,22 +364,16 @@ def df_sql_write(engine, schema, db_table, dataframe): engine: :py:mod:`sqlalchemy.engine.base.Engine` Sqlalchemy database engine """ - #for table in metadata.tables.keys(): - - sql_write_df = dataframe.copy() sql_write_df.columns = sql_write_df.columns.map(str.lower) # sql_write_df = sql_write_df.set_index('id') sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) - #con.connect().execute() - - def export_network_to_db(engine, schema, df, tabletype, srid=None): """ - Exports pre created Pands data frames to a connected database. + Exports pre created Pands data frames to a connected database schema. :param engine: :param schema: @@ -447,27 +435,6 @@ def export_network_to_db(engine, schema, df, tabletype, srid=None): # print('helloworld') -def create_ding0_db_tables(engine, schema,): - tables = [schema.EgoGridDing0Versioning, - schema.EgoGridDing0Line, - schema.EgoGridDing0LvBranchtee, - schema.EgoGridDing0LvGenerator, - schema.EgoGridDing0LvLoad, - schema.EgoGridDing0LvGrid, - schema.EgoGridDing0LvStation, - schema.EgoGridDing0MvlvTransformer, - schema.EgoGridDing0MvlvMapping, - schema.EgoGridDing0MvBranchtee, - schema.EgoGridDing0MvCircuitbreaker, - schema.EgoGridDing0MvGenerator, - schema.EgoGridDing0MvLoad, - schema.EgoGridDing0MvGrid, - schema.EgoGridDing0MvStation, - schema.EgoGridDing0HvmvTransformer] - - for tab in tables: - tab().__table__.create(bind=engine, checkfirst=True) - def drop_ding0_db_tables(engine, schema): tables = [schema.EgoGridDing0Line, @@ -601,6 +568,6 @@ def change_owner(engine, table, role): # df_sql_write(line1, "ego_ding0_line", "topology", con) # df_sql_write(versioning1, "ego_ding0_versioning", "topology", con) -# ToDo: Include the Pandas Dataframes from script x? which are created all 16/(15) tables +# ToDo: Include the Pandas Dataframes from script x? which are created for all 16/(15) tables # export_network_to_db(engine, schema, df, tabletype, srid=None) export_network_to_db(con, SCHEMA, line1, "line") \ No newline at end of file From 189afc50d4ac01ce711572fef386eb603946701d Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 21 Sep 2018 16:46:01 +0200 Subject: [PATCH 074/215] #270 minor changes --- ding0/io/db_export.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 8d6f6ec5..9dce2071 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -561,9 +561,6 @@ def change_owner(engine, table, role): # tested with reiners_db create_ding0_sql_tables(con, "topology") -# Test fnc. -select_db_table("ego_ding0_line") - # included for testing # df_sql_write(line1, "ego_ding0_line", "topology", con) # df_sql_write(versioning1, "ego_ding0_versioning", "topology", con) From c3ab009fee36609fec90e8aace65d1f40188f5e5 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 21 Sep 2018 16:57:05 +0200 Subject: [PATCH 075/215] #270 minor changes --- ding0/io/db_export.py | 19 +------------------ ..._ding0_hvmv_transformer_metadata_v1.3.json | 2 +- .../ego_grid_ding0_line_metadata_v1.3.json | 2 +- ...grid_ding0_lv_branchtee_metadata_v1.3.json | 2 +- ...grid_ding0_lv_generator_metadata_v1.3.json | 2 +- .../ego_grid_ding0_lv_grid_metadata_v1.3.json | 2 +- .../ego_grid_ding0_lv_load_metadata_v1.3.json | 2 +- ...o_grid_ding0_lv_station_metadata_v1.3.json | 2 +- ...grid_ding0_mv_branchtee_metadata_v1.3.json | 2 +- ...ding0_mv_circuitbreaker_metadata_v1.3.json | 2 +- ...grid_ding0_mv_generator_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_grid_metadata_v1.3.json | 2 +- .../ego_grid_ding0_mv_load_metadata_v1.3.json | 2 +- ...o_grid_ding0_mv_station_metadata_v1.3.json | 2 +- ...grid_ding0_mvlv_mapping_metadata_v1.3.json | 2 +- ..._ding0_mvlv_transformer_metadata_v1.3.json | 2 +- ...o_grid_ding0_versioning_metadata_v1.3.json | 2 +- 17 files changed, 17 insertions(+), 34 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 9dce2071..df7357c4 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -435,27 +435,10 @@ def export_network_to_db(engine, schema, df, tabletype, srid=None): # print('helloworld') - def drop_ding0_db_tables(engine, schema): - tables = [schema.EgoGridDing0Line, - schema.EgoGridDing0LvBranchtee, - schema.EgoGridDing0LvGenerator, - schema.EgoGridDing0LvLoad, - schema.EgoGridDing0LvGrid, - schema.EgoGridDing0LvStation, - schema.EgoGridDing0MvlvTransformer, - schema.EgoGridDing0MvlvMapping, - schema.EgoGridDing0MvBranchtee, - schema.EgoGridDing0MvCircuitbreaker, - schema.EgoGridDing0MvGenerator, - schema.EgoGridDing0MvLoad, - schema.EgoGridDing0MvGrid, - schema.EgoGridDing0MvStation, - schema.EgoGridDing0HvmvTransformer, - schema.EgoGridDing0Versioning] print("Please confirm that you would like to drop the following tables:") - for n, tab in enumerate(tables): + for n, tab in enumerate(metadata.tables.keys()): print("{: 3d}. {}".format(n, tab)) print("Please confirm with either of the choices below:\n" + diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json index c8f82212..a61c1ecc 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json @@ -46,4 +46,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } \ No newline at end of file + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json index 2318fdeb..a577a50c 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json @@ -54,4 +54,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } + "_none": "If not applicable use: none"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json index ffb8e188..4d9a17eb 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json @@ -43,4 +43,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } \ No newline at end of file + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json index 02d3cb0b..68ef3661 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json @@ -51,4 +51,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } + "_none": "If not applicable use: none"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json index 41ce7da2..4ef26f8c 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json @@ -43,4 +43,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } \ No newline at end of file + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json index 897ef686..66f3f992 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json @@ -45,4 +45,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } + "_none": "If not applicable use: none"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json index a15cc8f3..6f83d319 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json @@ -43,4 +43,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } + "_none": "If not applicable use: none"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json index 411e5aea..495b0f04 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json @@ -43,4 +43,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } + "_none": "If not applicable use: none"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json index be5daee9..2cd2e00d 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json @@ -44,4 +44,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } + "_none": "If not applicable use: none"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json index 84b1f3aa..4aa15423 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json @@ -49,4 +49,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } + "_none": "If not applicable use: none"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json index 72717c67..55dddaf0 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json @@ -45,4 +45,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } + "_none": "If not applicable use: none"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json index b570f3d5..64856203 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json @@ -44,4 +44,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } \ No newline at end of file + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json index 4b53397a..5799d419 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json @@ -43,4 +43,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } \ No newline at end of file + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json index 19d1a962..48ce5032 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json @@ -44,4 +44,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } + "_none": "If not applicable use: none"} } } diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json index da46e8dc..290982c9 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json @@ -47,4 +47,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } \ No newline at end of file + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json index 8956b3b9..460beae9 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json @@ -41,4 +41,4 @@ "_additional_information": { "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", "_units": "Use a space between Numbers and units (100 m)", - "_none": "If not applicable use 'none'"} } } \ No newline at end of file + "_none": "If not applicable use: none"} } } \ No newline at end of file From c3438256e3b369274d7c640e44a5c4e050f004bb Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 21 Sep 2018 18:21:49 +0200 Subject: [PATCH 076/215] #270 minor changes --- ding0/io/db_export.py | 139 +++++++++++++++++++++++++----------------- 1 file changed, 83 insertions(+), 56 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index df7357c4..86addb0a 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -33,7 +33,6 @@ from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION, INTEGER, NUMERIC, TEXT, BIGINT, TIMESTAMP, VARCHAR - con = connection() Base = declarative_base() @@ -371,7 +370,11 @@ def df_sql_write(engine, schema, db_table, dataframe): sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) -def export_network_to_db(engine, schema, df, tabletype, srid=None): +def check_run_id(): + pass + + +def export_network_to_db(engine, schema, df, tabletype, srid=None, run_id=None): """ Exports pre created Pands data frames to a connected database schema. @@ -381,64 +384,101 @@ def export_network_to_db(engine, schema, df, tabletype, srid=None): :param tabletype: :param srid: """ - # ToDo: check if versioning table exists + # ToDo: extend the fnc: insert run_id to versioning table if not exists + + # db_versioning = pd.DataFrame() + # + # if db_versioning.empty: + # # if the run_id doesn't exist then + # # create entry into ego_grid_ding0_versioning: + # metadata_df = pd.DataFrame({'run_id': run_id, + # 'description': metadata_json}, + # index=[0]) + # metadata_df.apply(lambda row: + # session.add(schema.EgoGridDing0Versioning( + # run_id=row['run_id'], + # description=row['description'], + # )) + # , axis=1) + # session.commit() + + db_versioning = pd.read_sql_table(DING0_TABLES['versioning'], engine, schema, + columns=['run_id', 'description']) + - print("Exporting table type : {}".format(tabletype)) - if tabletype == 'line': - df_sql_write(engine, schema, DING0_TABLES['line'], df) - elif tabletype == 'lv_cd': - df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df) + if engine.dialect.has_table(engine, DING0_TABLES["versioning"]): + if db_versioning.empty: + # if the run_id doesn't exist then + # create entry into ego_grid_ding0_versioning: + # metadata_df = pd.DataFrame({'run_id': run_id, + # 'description': metadata_json}, + # index=[0]) + pass - elif tabletype == 'lv_gen': - df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df) + else: + print("Exporting table type : {}".format(tabletype)) + if tabletype == 'line': + df_sql_write(engine, schema, DING0_TABLES['line'], df) - elif tabletype == 'lv_load': - df_sql_write(engine, schema, DING0_TABLES['lv_load'], df) + elif tabletype == 'lv_cd': + df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df) - elif tabletype == 'lv_grid': - df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df) + elif tabletype == 'lv_gen': + df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df) - elif tabletype == 'lv_station': - df_sql_write(engine, schema, DING0_TABLES['lv_station'], df) + elif tabletype == 'lv_load': + df_sql_write(engine, schema, DING0_TABLES['lv_load'], df) - elif tabletype == 'mvlv_trafo': - df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df) + elif tabletype == 'lv_grid': + df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df) - elif tabletype == 'mvlv_mapping': - df_sql_write(engine, schema, DING0_TABLES['mvlv_mapping'], df) + elif tabletype == 'lv_station': + df_sql_write(engine, schema, DING0_TABLES['lv_station'], df) - elif tabletype == 'mv_cd': - df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df) + elif tabletype == 'mvlv_trafo': + df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df) - elif tabletype == 'mv_cb': - df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df) + elif tabletype == 'mvlv_mapping': + df_sql_write(engine, schema, DING0_TABLES['mvlv_mapping'], df) - elif tabletype == 'mv_gen': - df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df) + elif tabletype == 'mv_cd': + df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df) - elif tabletype == 'mv_load': - df_sql_write(engine, schema, DING0_TABLES['mv_load'], df) + elif tabletype == 'mv_cb': + df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df) - elif tabletype == 'mv_grid': - df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df) + elif tabletype == 'mv_gen': + df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df) - elif tabletype == 'mv_station': - df_sql_write(engine, schema, DING0_TABLES['mv_station'], df) + elif tabletype == 'mv_load': + df_sql_write(engine, schema, DING0_TABLES['mv_load'], df) - elif tabletype == 'hvmv_trafo': - df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df) + elif tabletype == 'mv_grid': + df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df) - # else: - # pass - # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): - # print('helloworld') + elif tabletype == 'mv_station': + df_sql_write(engine, schema, DING0_TABLES['mv_station'], df) + + elif tabletype == 'hvmv_trafo': + df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df) + + # else: + # pass + # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): + # print('helloworld') + # else: + # raise KeyError("run_id already present! No tables are input!") + + else: + print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + SCHEMA) def drop_ding0_db_tables(engine, schema): + tables = metadata.tables.keys() print("Please confirm that you would like to drop the following tables:") - for n, tab in enumerate(metadata.tables.keys()): + for n, tab in enumerate(tables): print("{: 3d}. {}".format(n, tab)) print("Please confirm with either of the choices below:\n" + @@ -475,22 +515,7 @@ def drop_ding0_db_tables(engine, schema): def db_tables_change_owner(engine, schema): - tables = [schema.EgoGridDing0Line, - schema.EgoGridDing0LvBranchtee, - schema.EgoGridDing0LvGenerator, - schema.EgoGridDing0LvLoad, - schema.EgoGridDing0LvGrid, - schema.EgoGridDing0LvStation, - schema.EgoGridDing0MvlvTransformer, - schema.EgoGridDing0MvlvMapping, - schema.EgoGridDing0MvBranchtee, - schema.EgoGridDing0MvCircuitbreaker, - schema.EgoGridDing0MvGenerator, - schema.EgoGridDing0MvLoad, - schema.EgoGridDing0MvGrid, - schema.EgoGridDing0MvStation, - schema.EgoGridDing0HvmvTransformer, - schema.EgoGridDing0Versioning] + tables = metadata.tables.keys() def change_owner(engine, table, role): @@ -523,6 +548,7 @@ def change_owner(engine, table, role): + # create a dummy dataframe with lines line1 = pd.DataFrame({'run_id': [2, 2], 'id_db': [1, 2], @@ -539,7 +565,7 @@ def change_owner(engine, table, role): 'R': [0.0001, 0.00005], 'I_max_th': [5, 6]}) -versioning1 = pd.DataFrame({'run_id': [6], 'description': str(line1.to_dict())}) +versioning1 = pd.DataFrame({'run_id': [2], 'description': str(line1.to_dict())}) # tested with reiners_db create_ding0_sql_tables(con, "topology") @@ -550,4 +576,5 @@ def change_owner(engine, table, role): # ToDo: Include the Pandas Dataframes from script x? which are created for all 16/(15) tables # export_network_to_db(engine, schema, df, tabletype, srid=None) +#export_network_to_db(con, SCHEMA, line1, "versioning1") export_network_to_db(con, SCHEMA, line1, "line") \ No newline at end of file From 0e9426e144751387384896768805addd7855736c Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 21 Sep 2018 21:13:36 +0200 Subject: [PATCH 077/215] #270 included ding0.io.export, added parm. to export_network_to_db and modified the function for adding run_id if the db table is empty --- ding0/io/db_export.py | 43 +++++++++++++++---------------------------- 1 file changed, 15 insertions(+), 28 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 86addb0a..1b20e5e1 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -20,17 +20,12 @@ import re -from sqlalchemy import create_engine -from egoio.db_tables import model_draft as md from egoio.tools.db import connection +from ding0.io.export import export_network from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text from geoalchemy2.types import Geometry, Raster -from sqlalchemy.orm import relationship -from sqlalchemy.dialects.postgresql.hstore import HSTORE -from sqlalchemy.dialects.postgresql.base import OID from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION, INTEGER, NUMERIC, TEXT, BIGINT, TIMESTAMP, VARCHAR con = connection() @@ -74,7 +69,6 @@ def load_json_files(): contains all .json files from the folder """ - #print(FOLDER) full_dir = os.walk(FOLDER.parent / FOLDER.name) jsonmetadata = [] @@ -370,11 +364,7 @@ def df_sql_write(engine, schema, db_table, dataframe): sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) -def check_run_id(): - pass - - -def export_network_to_db(engine, schema, df, tabletype, srid=None, run_id=None): +def export_network_to_db(engine, schema, df, tabletype, metadata_json, run_id=None, srid=None): """ Exports pre created Pands data frames to a connected database schema. @@ -405,18 +395,16 @@ def export_network_to_db(engine, schema, df, tabletype, srid=None, run_id=None): db_versioning = pd.read_sql_table(DING0_TABLES['versioning'], engine, schema, columns=['run_id', 'description']) - - if engine.dialect.has_table(engine, DING0_TABLES["versioning"]): if db_versioning.empty: # if the run_id doesn't exist then # create entry into ego_grid_ding0_versioning: - # metadata_df = pd.DataFrame({'run_id': run_id, - # 'description': metadata_json}, - # index=[0]) - pass + metadata_df = pd.DataFrame({'run_id': run_id, + 'description': metadata_json}, + index=[0]) + + df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) - else: print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': df_sql_write(engine, schema, DING0_TABLES['line'], df) @@ -462,14 +450,14 @@ def export_network_to_db(engine, schema, df, tabletype, srid=None, run_id=None): elif tabletype == 'hvmv_trafo': df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df) + else: + # if the run_id doesn't exist then + # create entry into ego_grid_ding0_versioning: + metadata_df = pd.DataFrame({'run_id': run_id, + 'description': metadata_json}, + index=[0]) - # else: - # pass - # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): - # print('helloworld') - # else: - # raise KeyError("run_id already present! No tables are input!") - + # compare df db_versioning and metadata_df run_id field else: print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + SCHEMA) @@ -517,7 +505,6 @@ def drop_ding0_db_tables(engine, schema): def db_tables_change_owner(engine, schema): tables = metadata.tables.keys() - def change_owner(engine, table, role): r"""Gives access to database users/ groups Parameters @@ -576,5 +563,5 @@ def change_owner(engine, table, role): # ToDo: Include the Pandas Dataframes from script x? which are created for all 16/(15) tables # export_network_to_db(engine, schema, df, tabletype, srid=None) -#export_network_to_db(con, SCHEMA, line1, "versioning1") +#export_network_to_db(con, SCHEMA, line1, "versioning") export_network_to_db(con, SCHEMA, line1, "line") \ No newline at end of file From a20c52ee164055af6d578d7c347cc956f44260a4 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Sat, 22 Sep 2018 17:57:21 +0200 Subject: [PATCH 078/215] #270 added metadata_json witch can be any list of Ding0Network must contain run_id --- ding0/io/db_export.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 1b20e5e1..ae68100a 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -33,6 +33,12 @@ Base = declarative_base() metadata = Base.metadata +# any list of NetworkDing0 provides run_id +# metadata_json = json.dumps(nw.metadata) +metadata_json = { + "run_id":"2" + } + DING0_TABLES = {'versioning': 'ego_ding0_versioning', 'line': 'ego_ding0_line', 'lv_branchtee': 'ego_ding0_lv_branchtee', @@ -364,7 +370,7 @@ def df_sql_write(engine, schema, db_table, dataframe): sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) -def export_network_to_db(engine, schema, df, tabletype, metadata_json, run_id=None, srid=None): +def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None): """ Exports pre created Pands data frames to a connected database schema. @@ -399,7 +405,7 @@ def export_network_to_db(engine, schema, df, tabletype, metadata_json, run_id=No if db_versioning.empty: # if the run_id doesn't exist then # create entry into ego_grid_ding0_versioning: - metadata_df = pd.DataFrame({'run_id': run_id, + metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], 'description': metadata_json}, index=[0]) @@ -451,11 +457,7 @@ def export_network_to_db(engine, schema, df, tabletype, metadata_json, run_id=No elif tabletype == 'hvmv_trafo': df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df) else: - # if the run_id doesn't exist then - # create entry into ego_grid_ding0_versioning: - metadata_df = pd.DataFrame({'run_id': run_id, - 'description': metadata_json}, - index=[0]) + pass # compare df db_versioning and metadata_df run_id field else: @@ -564,4 +566,5 @@ def change_owner(engine, table, role): # ToDo: Include the Pandas Dataframes from script x? which are created for all 16/(15) tables # export_network_to_db(engine, schema, df, tabletype, srid=None) #export_network_to_db(con, SCHEMA, line1, "versioning") -export_network_to_db(con, SCHEMA, line1, "line") \ No newline at end of file +export_network_to_db(con, SCHEMA, line1, "line", metadata_json) + From 22c67b32c8a57cbb291ce0cb9263f560330efb25 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Sat, 22 Sep 2018 22:20:07 +0200 Subject: [PATCH 079/215] #270 new fnc.: export_df_to_db selects the table by tabletype where the data frame get inserted, new fnc.: run_id_in_db returns true if run_id is available in db and false if not available, export_network_to_db changed --- ding0/io/db_export.py | 157 +++++++++++++++++++++++++----------------- 1 file changed, 92 insertions(+), 65 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index ae68100a..4f559e8c 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -369,10 +369,89 @@ def df_sql_write(engine, schema, db_table, dataframe): # sql_write_df = sql_write_df.set_index('id') sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) +def export_df_to_db(engine, schema, df, tabletype): + """ + Writes values to the connected DB. Values from Pandas data frame. + + :param engine: + :param schema: + :param df: + :param tabletype: + :return: + """ + print("Exporting table type : {}".format(tabletype)) + if tabletype == 'line': + df_sql_write(engine, schema, DING0_TABLES['line'], df) + + elif tabletype == 'lv_cd': + df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df) + + elif tabletype == 'lv_gen': + df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df) + + elif tabletype == 'lv_load': + df_sql_write(engine, schema, DING0_TABLES['lv_load'], df) + + elif tabletype == 'lv_grid': + df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df) + + elif tabletype == 'lv_station': + df_sql_write(engine, schema, DING0_TABLES['lv_station'], df) + + elif tabletype == 'mvlv_trafo': + df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df) + + elif tabletype == 'mvlv_mapping': + df_sql_write(engine, schema, DING0_TABLES['mvlv_mapping'], df) + + elif tabletype == 'mv_cd': + df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df) + + elif tabletype == 'mv_cb': + df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df) + + elif tabletype == 'mv_gen': + df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df) + + elif tabletype == 'mv_load': + df_sql_write(engine, schema, DING0_TABLES['mv_load'], df) + + elif tabletype == 'mv_grid': + df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df) + + elif tabletype == 'mv_station': + df_sql_write(engine, schema, DING0_TABLES['mv_station'], df) + + elif tabletype == 'hvmv_trafo': + df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df) + + +def run_id_in_db(df, db_versioning): + """ + Filter data frame row run_id and compares the values. + returns true if the value (run_id) for the new data frame (df) is available in the DB + table: ego_ding0_versioning. + + :param df: pandas data frame for any dingo table + :param db_versioning: pandas data frame created from the versionig Table (from DB) + :return: True if value (run_id) is available in the database + """ + db_run_id = db_versioning.filter(items=['run_id']) + df_run_id = df.filter(items=['run_id']) + + for i in db_run_id.values: + for j in df_run_id.values: + if j in i: + return True + else: + return False + def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None): """ Exports pre created Pands data frames to a connected database schema. + Creates new entry in ego_ding0_versioning if the table is empty. + Checks if the given pandas data frame "run_id" is available in the DB table. :param engine: :param schema: @@ -380,23 +459,6 @@ def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None :param tabletype: :param srid: """ - # ToDo: extend the fnc: insert run_id to versioning table if not exists - - # db_versioning = pd.DataFrame() - # - # if db_versioning.empty: - # # if the run_id doesn't exist then - # # create entry into ego_grid_ding0_versioning: - # metadata_df = pd.DataFrame({'run_id': run_id, - # 'description': metadata_json}, - # index=[0]) - # metadata_df.apply(lambda row: - # session.add(schema.EgoGridDing0Versioning( - # run_id=row['run_id'], - # description=row['description'], - # )) - # , axis=1) - # session.commit() db_versioning = pd.read_sql_table(DING0_TABLES['versioning'], engine, schema, columns=['run_id', 'description']) @@ -410,56 +472,21 @@ def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None index=[0]) df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) + export_network_to_db(engine, schema, df, tabletype, metadata_json) - print("Exporting table type : {}".format(tabletype)) - if tabletype == 'line': - df_sql_write(engine, schema, DING0_TABLES['line'], df) - - elif tabletype == 'lv_cd': - df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df) - - elif tabletype == 'lv_gen': - df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df) - - elif tabletype == 'lv_load': - df_sql_write(engine, schema, DING0_TABLES['lv_load'], df) - - elif tabletype == 'lv_grid': - df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df) - - elif tabletype == 'lv_station': - df_sql_write(engine, schema, DING0_TABLES['lv_station'], df) - - elif tabletype == 'mvlv_trafo': - df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df) - - elif tabletype == 'mvlv_mapping': - df_sql_write(engine, schema, DING0_TABLES['mvlv_mapping'], df) - - elif tabletype == 'mv_cd': - df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df) - - elif tabletype == 'mv_cb': - df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df) - - elif tabletype == 'mv_gen': - df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df) - - elif tabletype == 'mv_load': - df_sql_write(engine, schema, DING0_TABLES['mv_load'], df) - - elif tabletype == 'mv_grid': - df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df) - - elif tabletype == 'mv_station': - df_sql_write(engine, schema, DING0_TABLES['mv_station'], df) + elif run_id_in_db(df, db_versioning) == True: + export_df_to_db(engine, schema, df, tabletype) - elif tabletype == 'hvmv_trafo': - df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df) - else: - pass + elif run_id_in_db(df, db_versioning) == False: + # # if the run_id from the data frame doesn't exist then + # # create new entry into ego_grid_ding0_versioning: + # newrunid_df = pd.DataFrame({'run_id': df['run_id'], + # 'description': metadata_json}, + # index=[0]) + # + # df_sql_write(con, SCHEMA, "ego_ding0_versioning", newrunid_df) - # compare df db_versioning and metadata_df run_id field + print("The run_id from the Pandas data frame is not available in the connected database table: "+DING0_TABLES['versioning']) else: print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + SCHEMA) @@ -539,7 +566,7 @@ def change_owner(engine, table, role): # create a dummy dataframe with lines -line1 = pd.DataFrame({'run_id': [2, 2], +line1 = pd.DataFrame({'run_id': [3, 3], 'id_db': [1, 2], 'edge_name': ['line1', 'line2'], 'grid_name': ['mv_grid5', 'mvgrid5'], From c6e9928c99860bac94daab0c0e41d62769636593 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Sat, 22 Sep 2018 22:39:47 +0200 Subject: [PATCH 080/215] #270 minor changes --- ding0/io/db_export.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 4f559e8c..98707052 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -586,12 +586,7 @@ def change_owner(engine, table, role): # tested with reiners_db create_ding0_sql_tables(con, "topology") -# included for testing -# df_sql_write(line1, "ego_ding0_line", "topology", con) -# df_sql_write(versioning1, "ego_ding0_versioning", "topology", con) - # ToDo: Include the Pandas Dataframes from script x? which are created for all 16/(15) tables -# export_network_to_db(engine, schema, df, tabletype, srid=None) -#export_network_to_db(con, SCHEMA, line1, "versioning") +# parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) export_network_to_db(con, SCHEMA, line1, "line", metadata_json) From e3ac0b12792fdb079b9c3a187c7ff9e2979f5d2c Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 24 Sep 2018 00:52:38 +0200 Subject: [PATCH 081/215] #270 included json dump fnc, added parm. to fnc. run_id_in_db and extended it to create db entry for data frames with multiple run_id rows, minor changes to fnc. export_network --- ding0/io/db_export.py | 75 +++++++++++++++++++++++++++---------------- 1 file changed, 48 insertions(+), 27 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 98707052..b3c854fb 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -33,11 +33,10 @@ Base = declarative_base() metadata = Base.metadata -# any list of NetworkDing0 provides run_id +# any list of NetworkDing0 also provides run_id # metadata_json = json.dumps(nw.metadata) -metadata_json = { - "run_id":"2" - } +meta_json = '{"run_id":"20"}' + DING0_TABLES = {'versioning': 'ego_ding0_versioning', 'line': 'ego_ding0_line', @@ -64,6 +63,12 @@ # Set the Database schema which you want to add the tables to SCHEMA = "topology" +def dump_json_obj(meta_json): + # included for testing / or logging + # print("Comment on table: " + table + "\nusing this metadata file: " + file + "\n") + dumped_json = json.loads(meta_json) + return dumped_json +metadata_json = dump_json_obj(meta_json) def load_json_files(): """ @@ -426,7 +431,7 @@ def export_df_to_db(engine, schema, df, tabletype): df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df) -def run_id_in_db(df, db_versioning): +def run_id_in_db(engine, schema, df, db_versioning, tabletype): """ Filter data frame row run_id and compares the values. returns true if the value (run_id) for the new data frame (df) is available in the DB @@ -439,12 +444,26 @@ def run_id_in_db(df, db_versioning): db_run_id = db_versioning.filter(items=['run_id']) df_run_id = df.filter(items=['run_id']) - for i in db_run_id.values: - for j in df_run_id.values: - if j in i: - return True - else: - return False + n = [] + for j in db_run_id["run_id"]: + n.append(j) + + # ToDo: problem: if the same run_id is passed in with the pandas data frame 4 entry are created in the db + for i in df_run_id["run_id"]: + if i in n: + # the run_id value needs to be only present in the run_id column + df_by_run_id = df[(df["run_id"]==i) & (df["run_id"]==i)] + export_df_to_db(engine, schema, df_by_run_id, tabletype) + elif i not in n: + metadata_df = pd.DataFrame({'run_id': i, + 'description': ""}, index=[0]) + # create the new run_id from df in db table + df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) + + # insert df with the new run_id + df_by_run_id = df[df["run_id"] == i] + export_df_to_db(engine, schema, df_by_run_id, tabletype) + def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None): @@ -468,25 +487,27 @@ def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None # if the run_id doesn't exist then # create entry into ego_grid_ding0_versioning: metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], - 'description': metadata_json}, - index=[0]) + 'description': metadata_json}) df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) export_network_to_db(engine, schema, df, tabletype, metadata_json) - elif run_id_in_db(df, db_versioning) == True: - export_df_to_db(engine, schema, df, tabletype) - - elif run_id_in_db(df, db_versioning) == False: - # # if the run_id from the data frame doesn't exist then - # # create new entry into ego_grid_ding0_versioning: - # newrunid_df = pd.DataFrame({'run_id': df['run_id'], - # 'description': metadata_json}, - # index=[0]) - # - # df_sql_write(con, SCHEMA, "ego_ding0_versioning", newrunid_df) + # elif run_id_in_db(df, db_versioning) == True: + # export_df_to_db(engine, schema, df, tabletype) + # + # elif run_id_in_db(df, db_versioning) == False: + # # # if the run_id from the data frame doesn't exist then + # # # create new entry into ego_grid_ding0_versioning: + # # newrunid_df = pd.DataFrame({'run_id': df['run_id'], + # # 'description': metadata_json}, + # # index=[0]) + # # + # # df_sql_write(con, SCHEMA, "ego_ding0_versioning", newrunid_df) + # + # print("The run_id from the Pandas data frame is not available in the connected database table: "+DING0_TABLES['versioning']) + else: + run_id_in_db(engine, schema, df, db_versioning, tabletype) - print("The run_id from the Pandas data frame is not available in the connected database table: "+DING0_TABLES['versioning']) else: print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + SCHEMA) @@ -564,9 +585,9 @@ def change_owner(engine, table, role): - +# ToDo: Testfailed: Cant insert 2 run_ids at a time, if one of them is not availabe in the database # create a dummy dataframe with lines -line1 = pd.DataFrame({'run_id': [3, 3], +line1 = pd.DataFrame({'run_id': [30, 30], 'id_db': [1, 2], 'edge_name': ['line1', 'line2'], 'grid_name': ['mv_grid5', 'mvgrid5'], From c97ad28794fbda5137e785e80c92fb993c05162b Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 24 Sep 2018 15:14:42 +0200 Subject: [PATCH 082/215] #270 included code documentation for better understanding, completed fnc. run_id_in_db it will now insert new run_id from df to db and prevent the insert for n times to DB if the given data frame includes multiple run_id values which are the same, tested func. with data frame with two rows (cases: run_id =same, run_id = different) --- ding0/io/db_export.py | 63 ++++++++++++++++++++++++++----------------- 1 file changed, 39 insertions(+), 24 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index b3c854fb..6a28a5de 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -63,12 +63,18 @@ # Set the Database schema which you want to add the tables to SCHEMA = "topology" -def dump_json_obj(meta_json): +# ToDo: Include the metadata_json variable returned form fun. in export.py +def loads_json_obj(meta_json): + """ + Fnc. included for testing, loads json obj as dict + -> Value should be available as metadata_json from export.py skript + :param meta_json: + """ # included for testing / or logging # print("Comment on table: " + table + "\nusing this metadata file: " + file + "\n") - dumped_json = json.loads(meta_json) - return dumped_json -metadata_json = dump_json_obj(meta_json) + loads_json = json.loads(meta_json) + return loads_json +metadata_json = loads_json_obj(meta_json) def load_json_files(): """ @@ -382,7 +388,6 @@ def export_df_to_db(engine, schema, df, tabletype): :param schema: :param df: :param tabletype: - :return: """ print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': @@ -433,30 +438,42 @@ def export_df_to_db(engine, schema, df, tabletype): def run_id_in_db(engine, schema, df, db_versioning, tabletype): """ - Filter data frame row run_id and compares the values. - returns true if the value (run_id) for the new data frame (df) is available in the DB - table: ego_ding0_versioning. - - :param df: pandas data frame for any dingo table - :param db_versioning: pandas data frame created from the versionig Table (from DB) - :return: True if value (run_id) is available in the database + Check if the run_id values from the new data frames are available in the DB. + Creates new run_id in the db if not exist. + Filter data frame for column=row run_id and compares the values. + db_run_id values are from the DB table: ego_ding0_versioning. + + :param engine: DB connection + :param schema: DB schema + :param df: pandas data frame -> gets inserted to the db + :param db_versioning: pandas df created from the versioning table in the DB + :param tabletype: select the db table "value relevant in export_df_to_db()" """ + + db_run_id = db_versioning.filter(items=['run_id']) df_run_id = df.filter(items=['run_id']) - n = [] + # temp stores all run_id values that are available in the DB + db_0temp = [] for j in db_run_id["run_id"]: - n.append(j) + db_0temp.append(j) - # ToDo: problem: if the same run_id is passed in with the pandas data frame 4 entry are created in the db + # temp stores run_id value from data frame + df_1temp = [] for i in df_run_id["run_id"]: - if i in n: - # the run_id value needs to be only present in the run_id column - df_by_run_id = df[(df["run_id"]==i) & (df["run_id"]==i)] - export_df_to_db(engine, schema, df_by_run_id, tabletype) - elif i not in n: + if i in db_0temp: + if i not in df_1temp: + # the run_id value needs to be only present in the run_id column + df_by_run_id = df[df["run_id"]==i] + export_df_to_db(engine, schema, df_by_run_id, tabletype) + # stores the run_id(i) from the df in order to compare with the next loop iteration run_id(i) -> + # df with multiple rows which include the same run_id will not be inserted n times to the db + df_1temp.append(i) + elif i not in db_0temp: metadata_df = pd.DataFrame({'run_id': i, - 'description': ""}, index=[0]) + # ToDo: Optional: insert the current df as description to db + 'description': str(df[df["run_id"]==i].to_dict())}, index=[0]) # create the new run_id from df in db table df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) @@ -584,10 +601,8 @@ def change_owner(engine, table, role): engine.close() - -# ToDo: Testfailed: Cant insert 2 run_ids at a time, if one of them is not availabe in the database # create a dummy dataframe with lines -line1 = pd.DataFrame({'run_id': [30, 30], +line1 = pd.DataFrame({'run_id': [90, 101], 'id_db': [1, 2], 'edge_name': ['line1', 'line2'], 'grid_name': ['mv_grid5', 'mvgrid5'], From edd3b1226ad086c5a2b16b304d899ae8652a5f27 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 24 Sep 2018 15:22:40 +0200 Subject: [PATCH 083/215] #270 changed path --- ding0/io/db_export.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 6a28a5de..abb9eb4c 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -57,8 +57,8 @@ # metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) and Change to not static -# Modify if folder name is different -FOLDER = Path('C:/ego_grid_ding0_metadatastrings') +# Modify if folder name is different -> use: "/" +FOLDER = Path('/ego_grid_ding0_metadatastrings') # Set the Database schema which you want to add the tables to SCHEMA = "topology" From 7c8ffefbf2295aa3a5ea6aca8ee7736c348b3648 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 24 Sep 2018 15:36:01 +0200 Subject: [PATCH 084/215] #270 minor changes --- ding0/io/db_export.py | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index abb9eb4c..fa50f294 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -509,19 +509,7 @@ def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) export_network_to_db(engine, schema, df, tabletype, metadata_json) - # elif run_id_in_db(df, db_versioning) == True: - # export_df_to_db(engine, schema, df, tabletype) - # - # elif run_id_in_db(df, db_versioning) == False: - # # # if the run_id from the data frame doesn't exist then - # # # create new entry into ego_grid_ding0_versioning: - # # newrunid_df = pd.DataFrame({'run_id': df['run_id'], - # # 'description': metadata_json}, - # # index=[0]) - # # - # # df_sql_write(con, SCHEMA, "ego_ding0_versioning", newrunid_df) - # - # print("The run_id from the Pandas data frame is not available in the connected database table: "+DING0_TABLES['versioning']) + print("The database table: "+DING0_TABLES['versioning'] + " had no values for the run_id. Inserted metadata_json") else: run_id_in_db(engine, schema, df, db_versioning, tabletype) @@ -617,12 +605,11 @@ def change_owner(engine, table, role): 'R': [0.0001, 0.00005], 'I_max_th': [5, 6]}) -versioning1 = pd.DataFrame({'run_id': [2], 'description': str(line1.to_dict())}) # tested with reiners_db create_ding0_sql_tables(con, "topology") -# ToDo: Include the Pandas Dataframes from script x? which are created for all 16/(15) tables +# ToDo: Include the Pandas Dataframes from script export.py which are created for all 16/(15) tables # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) export_network_to_db(con, SCHEMA, line1, "line", metadata_json) From a0a9096dcc01be6ca495eb970ddc916bbd431644 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 24 Sep 2018 18:43:31 +0200 Subject: [PATCH 085/215] #270 included NetworkDing0 and export_network(), added second engine for testing, this commit is not stable for this script --- ding0/io/db_export.py | 96 +++++++++++++++++++++++++++---------------- 1 file changed, 60 insertions(+), 36 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index fa50f294..93719cb1 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -22,20 +22,24 @@ from egoio.tools.db import connection from ding0.io.export import export_network +from ding0.core import NetworkDing0 from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text from geoalchemy2.types import Geometry, Raster from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker +##########SQLAlchemy and DB table################ +engine2 = connection(section='oedb') +session = sessionmaker(bind=engine2)() con = connection() Base = declarative_base() metadata = Base.metadata -# any list of NetworkDing0 also provides run_id -# metadata_json = json.dumps(nw.metadata) -meta_json = '{"run_id":"20"}' +# Set the Database schema which you want to add the tables to +SCHEMA = "topology" DING0_TABLES = {'versioning': 'ego_ding0_versioning', @@ -56,25 +60,35 @@ 'hvmv_transformer': 'ego_ding0_hvmv_transformer'} -# metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) and Change to not static -# Modify if folder name is different -> use: "/" -FOLDER = Path('/ego_grid_ding0_metadatastrings') +##########Ding0 Network and NW Metadata################ -# Set the Database schema which you want to add the tables to -SCHEMA = "topology" +# create ding0 Network instance +nw = NetworkDing0(name='network') + + +# choose MV Grid Districts to import +mv_grid_districts = [3040] + +# run DING0 on selected MV Grid District +nw.run_ding0(session=session, + mv_grid_districts_no=mv_grid_districts) + +# return values from export_network() as tupels +run_id, nw_metadata, \ +lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ +mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ +lines, mvlv_mapping = export_network(nw) # ToDo: Include the metadata_json variable returned form fun. in export.py -def loads_json_obj(meta_json): - """ - Fnc. included for testing, loads json obj as dict - -> Value should be available as metadata_json from export.py skript - :param meta_json: - """ - # included for testing / or logging - # print("Comment on table: " + table + "\nusing this metadata file: " + file + "\n") - loads_json = json.loads(meta_json) - return loads_json -metadata_json = loads_json_obj(meta_json) +# any list of NetworkDing0 also provides run_id +# nw_metadata = json.dumps(nw.metadata) +metadata_json = json.loads(nw_metadata) + +###################################################### + +# metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) and Change to not static +# Modify if folder name is different -> use: "/" +FOLDER = Path('/ego_grid_ding0_metadatastrings') def load_json_files(): """ @@ -83,7 +97,7 @@ def load_json_files(): Parameters ---------- :return: dict: jsonmetadata - contains all .json files from the folder + contains all .json file names from the folder """ full_dir = os.walk(FOLDER.parent / FOLDER.name) @@ -96,13 +110,12 @@ def load_json_files(): return jsonmetadata - def prepare_metadatastring_fordb(table): """ Prepares the JSON String for the sql comment on table Required: The .json file names must contain the table name (for example from create_ding0_sql_tables()) - Instruction: Check the SQL "comment on table" for each table (f.e. use pgAdmin) + Instruction: Check the SQL "comment on table" for each table (e.g. use pgAdmin) Parameters ---------- @@ -141,7 +154,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): # versioning table versioning = Table(DING0_TABLES['versioning'], metadata, Column('run_id', BigInteger, primary_key=True, autoincrement=False, nullable=False), - Column('description', String(3000)), + Column('description', String(6000)), schema=ding0_schema, comment=prepare_metadatastring_fordb("versioning") ) @@ -373,6 +386,7 @@ def df_sql_write(engine, schema, db_table, dataframe): engine: :py:mod:`sqlalchemy.engine.base.Engine` Sqlalchemy database engine + schema: DB schema """ sql_write_df = dataframe.copy() @@ -380,10 +394,14 @@ def df_sql_write(engine, schema, db_table, dataframe): # sql_write_df = sql_write_df.set_index('id') sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) + def export_df_to_db(engine, schema, df, tabletype): """ Writes values to the connected DB. Values from Pandas data frame. + Decides which table by tabletype + Parameters + ---------- :param engine: :param schema: :param df: @@ -443,6 +461,8 @@ def run_id_in_db(engine, schema, df, db_versioning, tabletype): Filter data frame for column=row run_id and compares the values. db_run_id values are from the DB table: ego_ding0_versioning. + Parameters + ---------- :param engine: DB connection :param schema: DB schema :param df: pandas data frame -> gets inserted to the db @@ -464,15 +484,17 @@ def run_id_in_db(engine, schema, df, db_versioning, tabletype): for i in df_run_id["run_id"]: if i in db_0temp: if i not in df_1temp: - # the run_id value needs to be only present in the run_id column + # the run_id value needs to be only present in the run_id column else the filter + # might not work correctly df_by_run_id = df[df["run_id"]==i] export_df_to_db(engine, schema, df_by_run_id, tabletype) # stores the run_id(i) from the df in order to compare with the next loop iteration run_id(i) -> # df with multiple rows which include the same run_id will not be inserted n times to the db df_1temp.append(i) + # ToDo: Check if this can be the case following the Ding0 logic elif i not in db_0temp: metadata_df = pd.DataFrame({'run_id': i, - # ToDo: Optional: insert the current df as description to db + # ToDo: decide optional: insert the current df as description to db 'description': str(df[df["run_id"]==i].to_dict())}, index=[0]) # create the new run_id from df in db table df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) @@ -482,13 +504,14 @@ def run_id_in_db(engine, schema, df, db_versioning, tabletype): export_df_to_db(engine, schema, df_by_run_id, tabletype) - def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None): """ Exports pre created Pands data frames to a connected database schema. Creates new entry in ego_ding0_versioning if the table is empty. Checks if the given pandas data frame "run_id" is available in the DB table. + Parameters + ---------- :param engine: :param schema: :param df: @@ -504,12 +527,12 @@ def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None # if the run_id doesn't exist then # create entry into ego_grid_ding0_versioning: metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], - 'description': metadata_json}) + 'description': str(metadata_json)}, index=[0]) df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) export_network_to_db(engine, schema, df, tabletype, metadata_json) - print("The database table: "+DING0_TABLES['versioning'] + " had no values for the run_id. Inserted metadata_json") + print("The database table: "+DING0_TABLES['versioning'] + " had no values for the run_id. Inserted the value from metadata_json") else: run_id_in_db(engine, schema, df, db_versioning, tabletype) @@ -517,7 +540,7 @@ def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + SCHEMA) -def drop_ding0_db_tables(engine, schema): +def drop_ding0_db_tables(engine, schema=SCHEMA): tables = metadata.tables.keys() print("Please confirm that you would like to drop the following tables:") @@ -557,10 +580,10 @@ def drop_ding0_db_tables(engine, schema): print("Confirmation unclear, no action taken") -def db_tables_change_owner(engine, schema): +def db_tables_change_owner(engine, schema=SCHEMA): tables = metadata.tables.keys() - def change_owner(engine, table, role): + def change_owner(engine, table, role, schema): r"""Gives access to database users/ groups Parameters ---------- @@ -571,8 +594,8 @@ def change_owner(engine, table, role): role : str database role that access is granted to """ - tablename = table.__table__.name - schema = table.__table__.schema + tablename = table + schema = SCHEMA grant_str = """ALTER TABLE {schema}.{table} OWNER TO {role};""".format(schema=schema, table=tablename, @@ -584,7 +607,7 @@ def change_owner(engine, table, role): # engine.echo=True for tab in tables: - change_owner(engine, tab, 'oeuser') + change_owner(engine, tab, 'oeuser', schema) engine.close() @@ -608,8 +631,9 @@ def change_owner(engine, table, role): # tested with reiners_db create_ding0_sql_tables(con, "topology") +# drop_ding0_db_tables(con, "topology") +# db_tables_change_owner(con, "topology") # ToDo: Include the Pandas Dataframes from script export.py which are created for all 16/(15) tables # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) -export_network_to_db(con, SCHEMA, line1, "line", metadata_json) - +export_network_to_db(con, SCHEMA, lines, "line", metadata_json) From c6a8ea22987dd6bfed38db0d9064d83eacce8f9f Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 25 Sep 2018 15:48:06 +0200 Subject: [PATCH 086/215] #270 restructured func for new input Pandas data frames from db_export(), can insert data frames without a wkb and with a wkt_dumps geometry --- .gitignore | 1 + ding0/io/db_export.py | 53 +++++++++++-------- ...o_grid_ding0_versioning_metadata_v1.3.json | 2 +- 3 files changed, 32 insertions(+), 24 deletions(-) diff --git a/.gitignore b/.gitignore index 3fb578c5..9bbcebf0 100644 --- a/.gitignore +++ b/.gitignore @@ -58,6 +58,7 @@ target/ # OWN STUFF #.gitignore +ding0/examples/ding0_grids_example.pkl .idea/ ding0/output/ /Line_loading_feed-in_case.png diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 93719cb1..424b65bd 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -181,7 +181,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb("ding0_line") ) - """ + # ding0 lv_branchtee table ding0_lv_branchtee = Table(DING0_TABLES['lv_branchtee'], metadata, Column('id', Integer, primary_key=True), @@ -192,7 +192,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): schema=ding0_schema, comment=prepare_metadatastring_fordb("ding0_lv_branchtee") ) - + """ # ding0 lv_generator table ding0_lv_generator = Table(DING0_TABLES['lv_generator'], metadata, Column('id', Integer, primary_key=True), @@ -388,11 +388,17 @@ def df_sql_write(engine, schema, db_table, dataframe): Sqlalchemy database engine schema: DB schema """ - - sql_write_df = dataframe.copy() - sql_write_df.columns = sql_write_df.columns.map(str.lower) - # sql_write_df = sql_write_df.set_index('id') - sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) + if 'id' in dataframe.columns: + dataframe.rename(columns={'id':'id_db'}, inplace=True) + sql_write_df = dataframe.copy() + sql_write_df.columns = sql_write_df.columns.map(str.lower) + # sql_write_df = sql_write_df.set_index('id') + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) + else: + sql_write_df = dataframe.copy() + sql_write_df.columns = sql_write_df.columns.map(str.lower) + # sql_write_df = sql_write_df.set_index('id') + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) def export_df_to_db(engine, schema, df, tabletype): @@ -412,6 +418,7 @@ def export_df_to_db(engine, schema, df, tabletype): df_sql_write(engine, schema, DING0_TABLES['line'], df) elif tabletype == 'lv_cd': + df = df.drop(['lv_grid_id'], axis=1) df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df) elif tabletype == 'lv_gen': @@ -470,7 +477,6 @@ def run_id_in_db(engine, schema, df, db_versioning, tabletype): :param tabletype: select the db table "value relevant in export_df_to_db()" """ - db_run_id = db_versioning.filter(items=['run_id']) df_run_id = df.filter(items=['run_id']) @@ -494,10 +500,10 @@ def run_id_in_db(engine, schema, df, db_versioning, tabletype): # ToDo: Check if this can be the case following the Ding0 logic elif i not in db_0temp: metadata_df = pd.DataFrame({'run_id': i, - # ToDo: decide optional: insert the current df as description to db - 'description': str(df[df["run_id"]==i].to_dict())}, index=[0]) + 'description': str(metadata_json)}, index=[0]) # create the new run_id from df in db table df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) + db_0temp.append(i) # insert df with the new run_id df_by_run_id = df[df["run_id"] == i] @@ -523,18 +529,19 @@ def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None columns=['run_id', 'description']) if engine.dialect.has_table(engine, DING0_TABLES["versioning"]): - if db_versioning.empty: - # if the run_id doesn't exist then - # create entry into ego_grid_ding0_versioning: - metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], - 'description': str(metadata_json)}, index=[0]) - - df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) - export_network_to_db(engine, schema, df, tabletype, metadata_json) - - print("The database table: "+DING0_TABLES['versioning'] + " had no values for the run_id. Inserted the value from metadata_json") - else: - run_id_in_db(engine, schema, df, db_versioning, tabletype) + # if db_versioning.empty: + # # if the run_id doesn't exist then + # # create entry into ego_grid_ding0_versioning: + # metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], + # 'description': str(metadata_json)}, index=[0]) + # + # df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) + # export_network_to_db(engine, schema, df, tabletype, metadata_json) + # + # print("The database table: "+DING0_TABLES['versioning'] + " had no values for the run_id. Inserted the value from metadata_json") + # else: + + run_id_in_db(engine, schema, df, db_versioning, tabletype) else: print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + SCHEMA) @@ -636,4 +643,4 @@ def change_owner(engine, table, role, schema): # ToDo: Include the Pandas Dataframes from script export.py which are created for all 16/(15) tables # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) -export_network_to_db(con, SCHEMA, lines, "line", metadata_json) +export_network_to_db(con, SCHEMA, lv_gen, "lv_gen", metadata_json) diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json index 460beae9..51dec620 100644 --- a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json +++ b/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json @@ -30,7 +30,7 @@ "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, - {"name": "description","description": "FIXME","unit": "string"} ] } ], + {"name": "description","description": "Used parameters for this run","unit": "string"} ] } ], "metadata_version": "1.3", "_comment": { "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", From f965d33278e4b3798616a03656e525a52012161c Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 25 Sep 2018 17:01:15 +0200 Subject: [PATCH 087/215] #270 successfully tested fnc.: drop_ding0_db_tables() --- ding0/io/db_export.py | 55 ++++++++++--------------------------------- 1 file changed, 13 insertions(+), 42 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 424b65bd..bbcc131b 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -181,7 +181,6 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb("ding0_line") ) - # ding0 lv_branchtee table ding0_lv_branchtee = Table(DING0_TABLES['lv_branchtee'], metadata, Column('id', Integer, primary_key=True), @@ -465,7 +464,7 @@ def run_id_in_db(engine, schema, df, db_versioning, tabletype): """ Check if the run_id values from the new data frames are available in the DB. Creates new run_id in the db if not exist. - Filter data frame for column=row run_id and compares the values. + Filter data frame for column=run_id and compares the values with existing run_id. db_run_id values are from the DB table: ego_ding0_versioning. Parameters @@ -492,12 +491,11 @@ def run_id_in_db(engine, schema, df, db_versioning, tabletype): if i not in df_1temp: # the run_id value needs to be only present in the run_id column else the filter # might not work correctly - df_by_run_id = df[df["run_id"]==i] + df_by_run_id = df[df["run_id"] == i] export_df_to_db(engine, schema, df_by_run_id, tabletype) # stores the run_id(i) from the df in order to compare with the next loop iteration run_id(i) -> # df with multiple rows which include the same run_id will not be inserted n times to the db df_1temp.append(i) - # ToDo: Check if this can be the case following the Ding0 logic elif i not in db_0temp: metadata_df = pd.DataFrame({'run_id': i, 'description': str(metadata_json)}, index=[0]) @@ -510,7 +508,7 @@ def run_id_in_db(engine, schema, df, db_versioning, tabletype): export_df_to_db(engine, schema, df_by_run_id, tabletype) -def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None): +def export_network_to_db(engine, schema, df, tabletype, srid=None): """ Exports pre created Pands data frames to a connected database schema. Creates new entry in ego_ding0_versioning if the table is empty. @@ -529,17 +527,6 @@ def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None columns=['run_id', 'description']) if engine.dialect.has_table(engine, DING0_TABLES["versioning"]): - # if db_versioning.empty: - # # if the run_id doesn't exist then - # # create entry into ego_grid_ding0_versioning: - # metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], - # 'description': str(metadata_json)}, index=[0]) - # - # df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) - # export_network_to_db(engine, schema, df, tabletype, metadata_json) - # - # print("The database table: "+DING0_TABLES['versioning'] + " had no values for the run_id. Inserted the value from metadata_json") - # else: run_id_in_db(engine, schema, df, db_versioning, tabletype) @@ -548,7 +535,8 @@ def export_network_to_db(engine, schema, df, tabletype, metadata_json, srid=None def drop_ding0_db_tables(engine, schema=SCHEMA): - tables = metadata.tables.keys() + tables = metadata.sorted_tables + reversed_tables = reversed(tables) print("Please confirm that you would like to drop the following tables:") for n, tab in enumerate(tables): @@ -561,8 +549,8 @@ def drop_ding0_db_tables(engine, schema=SCHEMA): confirmation = input( "Please type the choice completely as there is no default choice.") if re.fullmatch('[Yy]es', confirmation): - for tab in tables: - tab().__table__.drop(bind=engine, checkfirst=True) + for tab in reversed_tables: + tab.drop(engine, checkfirst=True) elif re.fullmatch('[Nn]o', confirmation): print("Cancelled dropping of tables") else: @@ -570,7 +558,7 @@ def drop_ding0_db_tables(engine, schema=SCHEMA): indlist = confirmation.split(',') indlist = list(map(int, indlist)) print("Please confirm deletion of the following tables:") - tablist = np.array(tables)[indlist].tolist() + tablist = np.array(reversed_tables)[indlist].tolist() for n, tab in enumerate(tablist): print("{: 3d}. {}".format(n, tab)) con2 = input("Please confirm with either of the choices below:\n" + @@ -578,7 +566,7 @@ def drop_ding0_db_tables(engine, schema=SCHEMA): "- no") if re.fullmatch('[Yy]es', con2): for tab in tablist: - tab().__table__.drop(bind=engine, checkfirst=True) + tab.drop(engine, checkfirst=True) elif re.fullmatch('[Nn]o', con2): print("Cancelled dropping of tables") else: @@ -588,7 +576,7 @@ def drop_ding0_db_tables(engine, schema=SCHEMA): def db_tables_change_owner(engine, schema=SCHEMA): - tables = metadata.tables.keys() + tables = metadata.sorted_tables def change_owner(engine, table, role, schema): r"""Gives access to database users/ groups @@ -619,28 +607,11 @@ def change_owner(engine, table, role, schema): engine.close() -# create a dummy dataframe with lines -line1 = pd.DataFrame({'run_id': [90, 101], - 'id_db': [1, 2], - 'edge_name': ['line1', 'line2'], - 'grid_name': ['mv_grid5', 'mvgrid5'], - 'node1': [1, 2], - 'node2': [2, 3], - 'type_kind': ['line', 'line'], - 'type_name': ['NASX2Y', 'NA2SXX2Y'], - 'length': [1.3, 2.3], - 'U_n': [10, 10], - 'C': [0.002, 0.001], - 'L': [0.01, 0.02], - 'R': [0.0001, 0.00005], - 'I_max_th': [5, 6]}) - - # tested with reiners_db create_ding0_sql_tables(con, "topology") -# drop_ding0_db_tables(con, "topology") +# drop_ding0_db_tables(con) # db_tables_change_owner(con, "topology") -# ToDo: Include the Pandas Dataframes from script export.py which are created for all 16/(15) tables +# ToDo: Insert line df: Geometry is wkb and fails to be inserted to db table, get tabletype? # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) -export_network_to_db(con, SCHEMA, lv_gen, "lv_gen", metadata_json) +# export_network_to_db(con, SCHEMA, lv_gen, "lv_gen", metadata_json) From dda222861a4e2f88fd4a7baf4359f63bb3e868a6 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 25 Sep 2018 17:17:26 +0200 Subject: [PATCH 088/215] #270 minor changes for testing --- ding0/io/db_export.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index bbcc131b..3b47822d 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -191,7 +191,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): schema=ding0_schema, comment=prepare_metadatastring_fordb("ding0_lv_branchtee") ) - """ + # ding0 lv_generator table ding0_lv_generator = Table(DING0_TABLES['lv_generator'], metadata, Column('id', Integer, primary_key=True), @@ -355,7 +355,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): schema=ding0_schema, comment=prepare_metadatastring_fordb("ding0_hvmv_transformer") ) -""" + # create all the tables metadata.create_all(engine, checkfirst=True) @@ -614,4 +614,5 @@ def change_owner(engine, table, role, schema): # ToDo: Insert line df: Geometry is wkb and fails to be inserted to db table, get tabletype? # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) -# export_network_to_db(con, SCHEMA, lv_gen, "lv_gen", metadata_json) +export_network_to_db(con, SCHEMA, lv_gen, "lv_gen", metadata_json) +export_network_to_db(con, SCHEMA, mv_stations, "mv_stations", metadata_json) From c93a6b82fc10451933d6366aae8c6bbed4f45d64 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 25 Sep 2018 18:01:58 +0200 Subject: [PATCH 089/215] #270 added new fnc.: execute_export_network_to_db() --- ding0/io/db_export.py | 45 +++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 43 insertions(+), 2 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 3b47822d..156e5a74 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -606,6 +606,47 @@ def change_owner(engine, table, role, schema): engine.close() +def execute_export_network_to_db(con, schema=SCHEMA): + """ + exportes all data frames to the db tables + + + :param con: + :param schema: + :return: + """ + + # 1 + export_network_to_db(con, schema, lines, "line", metadata_json) + # 2 + export_network_to_db(con, schema, lv_cd, "lv_cd", metadata_json) + # 3 + export_network_to_db(con, schema, lv_gen, "lv_gen", metadata_json) + # 4 + export_network_to_db(con, schema, lv_stations, "lv_station", metadata_json) + # 5 + export_network_to_db(con, schema, lv_loads, "lv_load", metadata_json) + # 6 + export_network_to_db(con, schema, lv_grid, "lv_grid", metadata_json) + # 7 + export_network_to_db(con, schema, mv_cb, "mv_cb", metadata_json) + # 8 + export_network_to_db(con, schema, mv_cd, "mv_cd", metadata_json) + # 9 + export_network_to_db(con, schema, mv_gen, "mv_gen", metadata_json) + # 10 + export_network_to_db(con, schema, mv_stations, "mv_station", metadata_json) + # 11 + export_network_to_db(con, schema, mv_loads, "mv_load", metadata_json) + # 12 + export_network_to_db(con, schema, mv_grid, "mv_grid", metadata_json) + # 13 + export_network_to_db(con, schema, mvlv_trafos, "mvlv_trafo", metadata_json) + # 14 + export_network_to_db(con, schema, hvmv_trafos, "hvmv_trafo", metadata_json) + # 15 + export_network_to_db(con, schema, mvlv_mapping, "mvlv_mapping", metadata_json) + # tested with reiners_db create_ding0_sql_tables(con, "topology") @@ -614,5 +655,5 @@ def change_owner(engine, table, role, schema): # ToDo: Insert line df: Geometry is wkb and fails to be inserted to db table, get tabletype? # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) -export_network_to_db(con, SCHEMA, lv_gen, "lv_gen", metadata_json) -export_network_to_db(con, SCHEMA, mv_stations, "mv_stations", metadata_json) +export_network_to_db(con, SCHEMA, lines, "line", metadata_json) +# export_network_to_db(con, SCHEMA, mv_stations, "mv_stations", metadata_json) From 8323a2bc1f1cf6dc7ef0debd14d9cfdf724450a4 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 26 Sep 2018 16:25:52 +0200 Subject: [PATCH 090/215] #270 minor changes --- ding0/io/db_export.py | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 156e5a74..0dd3d981 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -29,6 +29,7 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker + ##########SQLAlchemy and DB table################ engine2 = connection(section='oedb') session = sessionmaker(bind=engine2)() @@ -71,7 +72,7 @@ # run DING0 on selected MV Grid District nw.run_ding0(session=session, - mv_grid_districts_no=mv_grid_districts) + mv_grid_districts_no=mv_grid_districts) # return values from export_network() as tupels run_id, nw_metadata, \ @@ -88,7 +89,7 @@ # metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) and Change to not static # Modify if folder name is different -> use: "/" -FOLDER = Path('/ego_grid_ding0_metadatastrings') +FOLDER = Path('/Users/Jonas.Huber/PycharmProjects/ding0_metadata/ding0/ego_grid_ding0_metadatastrings') def load_json_files(): """ @@ -387,12 +388,20 @@ def df_sql_write(engine, schema, db_table, dataframe): Sqlalchemy database engine schema: DB schema """ + # if 'geom' in dataframe.columns: + # sql_write_geom = dataframe.filter(items=['geom']) + # for i in sql_write_geom['geom']: + # + # # insert_geom = "UPDATE {} SET {}=ST_GeomFromText('{}') WHERE (id={}) ".format(db_table, "geom", i, "1") + # insert_geom = "INSERT INTO {} ({}) VALUES (ST_GeomFromText('{}'))".format(db_table, "geom", i) + # engine.execute(insert_geom) + if 'id' in dataframe.columns: dataframe.rename(columns={'id':'id_db'}, inplace=True) sql_write_df = dataframe.copy() sql_write_df.columns = sql_write_df.columns.map(str.lower) # sql_write_df = sql_write_df.set_index('id') - sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, ) else: sql_write_df = dataframe.copy() sql_write_df.columns = sql_write_df.columns.map(str.lower) @@ -535,6 +544,12 @@ def export_network_to_db(engine, schema, df, tabletype, srid=None): def drop_ding0_db_tables(engine, schema=SCHEMA): + """ + Instructions: In order to drop tables all tables need to be stored in metadata (create tables before dropping them) + :param engine: + :param schema: + :return: + """ tables = metadata.sorted_tables reversed_tables = reversed(tables) @@ -579,7 +594,9 @@ def db_tables_change_owner(engine, schema=SCHEMA): tables = metadata.sorted_tables def change_owner(engine, table, role, schema): - r"""Gives access to database users/ groups + """ + Gives access to database users/ groups + Parameters ---------- session : sqlalchemy session object @@ -606,14 +623,15 @@ def change_owner(engine, table, role, schema): engine.close() + def execute_export_network_to_db(con, schema=SCHEMA): """ - exportes all data frames to the db tables - + exports all data frames to the db tables + Parameters + ---------- :param con: :param schema: - :return: """ # 1 @@ -655,5 +673,5 @@ def execute_export_network_to_db(con, schema=SCHEMA): # ToDo: Insert line df: Geometry is wkb and fails to be inserted to db table, get tabletype? # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) -export_network_to_db(con, SCHEMA, lines, "line", metadata_json) +export_network_to_db(con, SCHEMA, lv_gen, "lv_gen", metadata_json) # export_network_to_db(con, SCHEMA, mv_stations, "mv_stations", metadata_json) From 5da4b4e08a5411b88ad029c0187d896376fe52d0 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Wed, 26 Sep 2018 16:44:33 +0200 Subject: [PATCH 091/215] Added an if __name__== "__main__" block for better testing and importing, some merge conflicts solved --- ding0/io/db_export.py | 89 ++++++++++++++++++++++++------------------- 1 file changed, 49 insertions(+), 40 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 0dd3d981..efddb8b4 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -60,37 +60,6 @@ 'mv_station': 'ego_ding0_mv_station', 'hvmv_transformer': 'ego_ding0_hvmv_transformer'} - -##########Ding0 Network and NW Metadata################ - -# create ding0 Network instance -nw = NetworkDing0(name='network') - - -# choose MV Grid Districts to import -mv_grid_districts = [3040] - -# run DING0 on selected MV Grid District -nw.run_ding0(session=session, - mv_grid_districts_no=mv_grid_districts) - -# return values from export_network() as tupels -run_id, nw_metadata, \ -lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ -mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ -lines, mvlv_mapping = export_network(nw) - -# ToDo: Include the metadata_json variable returned form fun. in export.py -# any list of NetworkDing0 also provides run_id -# nw_metadata = json.dumps(nw.metadata) -metadata_json = json.loads(nw_metadata) - -###################################################### - -# metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) and Change to not static -# Modify if folder name is different -> use: "/" -FOLDER = Path('/Users/Jonas.Huber/PycharmProjects/ding0_metadata/ding0/ego_grid_ding0_metadatastrings') - def load_json_files(): """ Creats a list of all .json files in FOLDER @@ -182,6 +151,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb("ding0_line") ) + # ding0 lv_branchtee table ding0_lv_branchtee = Table(DING0_TABLES['lv_branchtee'], metadata, Column('id', Integer, primary_key=True), @@ -401,7 +371,7 @@ def df_sql_write(engine, schema, db_table, dataframe): sql_write_df = dataframe.copy() sql_write_df.columns = sql_write_df.columns.map(str.lower) # sql_write_df = sql_write_df.set_index('id') - sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, ) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) else: sql_write_df = dataframe.copy() sql_write_df.columns = sql_write_df.columns.map(str.lower) @@ -665,13 +635,52 @@ def execute_export_network_to_db(con, schema=SCHEMA): # 15 export_network_to_db(con, schema, mvlv_mapping, "mvlv_mapping", metadata_json) + +if __name__ == "__main__": + + ##########Ding0 Network and NW Metadata################ + + # create ding0 Network instance + nw = NetworkDing0(name='network') + + + # choose MV Grid Districts to import + mv_grid_districts = [3040] + + # run DING0 on selected MV Grid District + nw.run_ding0(session=session, + mv_grid_districts_no=mv_grid_districts) + + # return values from export_network() as tupels + run_id, nw_metadata, \ + lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ + mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ + lines, mvlv_mapping = export_network(nw) + + # ToDo: Include the metadata_json variable returned form fun. in export.py + # any list of NetworkDing0 also provides run_id + # nw_metadata = json.dumps(nw.metadata) + metadata_json = json.loads(nw_metadata) + + ###################################################### + + # metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) and Change to not static + # Modify if folder name is different -> use: "/" + FOLDER = Path('/ego_grid_ding0_metadatastrings') + + # tested with reiners_db + create_ding0_sql_tables(con, "topology") + # drop_ding0_db_tables(con, "topology") + # db_tables_change_owner(con, "topology") + + + # tested with reiners_db + create_ding0_sql_tables(con, "topology") + # drop_ding0_db_tables(con) + # db_tables_change_owner(con, "topology") -# tested with reiners_db -create_ding0_sql_tables(con, "topology") -# drop_ding0_db_tables(con) -# db_tables_change_owner(con, "topology") + # ToDo: Insert line df: Geometry is wkb and fails to be inserted to db table, get tabletype? + # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) + export_network_to_db(con, SCHEMA, lv_gen, "lv_gen", metadata_json) + # export_network_to_db(con, SCHEMA, mv_stations, "mv_stations", metadata_json) -# ToDo: Insert line df: Geometry is wkb and fails to be inserted to db table, get tabletype? -# parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) -export_network_to_db(con, SCHEMA, lv_gen, "lv_gen", metadata_json) -# export_network_to_db(con, SCHEMA, mv_stations, "mv_stations", metadata_json) From 750bfcb454e69ee2b252b919600f699dab3752e4 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Wed, 26 Sep 2018 18:18:20 +0200 Subject: [PATCH 092/215] Changed location and renamed metadatastring json files --- .../io/metadatastrings/hvmv_transformer.json | 0 .../io/metadatastrings/line.json | 0 .../io/metadatastrings/lv_branchtee.json | 0 .../io/metadatastrings/lv_generator.json | 0 .../io/metadatastrings/lv_grid.json | 0 .../io/metadatastrings/lv_load.json | 0 .../io/metadatastrings/lv_station.json | 0 .../io/metadatastrings/mv_branchtee.json | 0 .../io/metadatastrings/mv_circuitbreaker.json | 0 .../io/metadatastrings/mv_generator.json | 0 .../io/metadatastrings/mv_grid.json | 0 .../io/metadatastrings/mv_load.json | 0 .../io/metadatastrings/mv_station.json | 0 .../io/metadatastrings/mvlv_mapping.json | 0 .../io/metadatastrings/mvlv_transformer.json | 0 .../io/metadatastrings/versioning.json | 0 16 files changed, 0 insertions(+), 0 deletions(-) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json => ding0/io/metadatastrings/hvmv_transformer.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json => ding0/io/metadatastrings/line.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json => ding0/io/metadatastrings/lv_branchtee.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json => ding0/io/metadatastrings/lv_generator.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json => ding0/io/metadatastrings/lv_grid.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json => ding0/io/metadatastrings/lv_load.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json => ding0/io/metadatastrings/lv_station.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json => ding0/io/metadatastrings/mv_branchtee.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json => ding0/io/metadatastrings/mv_circuitbreaker.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json => ding0/io/metadatastrings/mv_generator.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json => ding0/io/metadatastrings/mv_grid.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json => ding0/io/metadatastrings/mv_load.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json => ding0/io/metadatastrings/mv_station.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json => ding0/io/metadatastrings/mvlv_mapping.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json => ding0/io/metadatastrings/mvlv_transformer.json (100%) rename ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json => ding0/io/metadatastrings/versioning.json (100%) diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json b/ding0/io/metadatastrings/hvmv_transformer.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_hvmv_transformer_metadata_v1.3.json rename to ding0/io/metadatastrings/hvmv_transformer.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json b/ding0/io/metadatastrings/line.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_line_metadata_v1.3.json rename to ding0/io/metadatastrings/line.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json b/ding0/io/metadatastrings/lv_branchtee.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_branchtee_metadata_v1.3.json rename to ding0/io/metadatastrings/lv_branchtee.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json b/ding0/io/metadatastrings/lv_generator.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_generator_metadata_v1.3.json rename to ding0/io/metadatastrings/lv_generator.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json b/ding0/io/metadatastrings/lv_grid.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_grid_metadata_v1.3.json rename to ding0/io/metadatastrings/lv_grid.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json b/ding0/io/metadatastrings/lv_load.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_load_metadata_v1.3.json rename to ding0/io/metadatastrings/lv_load.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json b/ding0/io/metadatastrings/lv_station.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_lv_station_metadata_v1.3.json rename to ding0/io/metadatastrings/lv_station.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json b/ding0/io/metadatastrings/mv_branchtee.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_branchtee_metadata_v1.3.json rename to ding0/io/metadatastrings/mv_branchtee.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json b/ding0/io/metadatastrings/mv_circuitbreaker.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_circuitbreaker_metadata_v1.3.json rename to ding0/io/metadatastrings/mv_circuitbreaker.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json b/ding0/io/metadatastrings/mv_generator.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_generator_metadata_v1.3.json rename to ding0/io/metadatastrings/mv_generator.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json b/ding0/io/metadatastrings/mv_grid.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_grid_metadata_v1.3.json rename to ding0/io/metadatastrings/mv_grid.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json b/ding0/io/metadatastrings/mv_load.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_load_metadata_v1.3.json rename to ding0/io/metadatastrings/mv_load.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json b/ding0/io/metadatastrings/mv_station.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_mv_station_metadata_v1.3.json rename to ding0/io/metadatastrings/mv_station.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json b/ding0/io/metadatastrings/mvlv_mapping.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_mapping_metadata_v1.3.json rename to ding0/io/metadatastrings/mvlv_mapping.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json b/ding0/io/metadatastrings/mvlv_transformer.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_mvlv_transformer_metadata_v1.3.json rename to ding0/io/metadatastrings/mvlv_transformer.json diff --git a/ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json b/ding0/io/metadatastrings/versioning.json similarity index 100% rename from ego_grid_ding0_metadatastrings/ego_grid_ding0_versioning_metadata_v1.3.json rename to ding0/io/metadatastrings/versioning.json From 1bf7c7a02c9fa6c8523de01f02e36fae635165bc Mon Sep 17 00:00:00 2001 From: boltbeard Date: Wed, 26 Sep 2018 18:18:55 +0200 Subject: [PATCH 093/215] included metadata version in config files --- ding0/config/config_files.cfg | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ding0/config/config_files.cfg b/ding0/config/config_files.cfg index 79c474d2..cee97f2f 100644 --- a/ding0/config/config_files.cfg +++ b/ding0/config/config_files.cfg @@ -26,4 +26,7 @@ model_grids_lv_apartment_string = model_grids-lv_relation_apartment_string.csv animation_file_prefix = mv-routing_ani_ nd_pickle = ding0_grids_{}.pkl edges_stats = mvgd_edges_stats_{}.csv -nodes_stats = mvgd_nodes_stats_{}.csv \ No newline at end of file +nodes_stats = mvgd_nodes_stats_{}.csv + +[metadata_strings] +version = 1.3 \ No newline at end of file From f4ec1f682c9946cf73db2721b482d8fe03012797 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Wed, 26 Sep 2018 18:36:48 +0200 Subject: [PATCH 094/215] modified to use internal ding0 path to get the metadata strings from the new locations --- ding0/io/db_export.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index efddb8b4..ba94e4c6 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -21,6 +21,8 @@ import re from egoio.tools.db import connection + +import ding0 from ding0.io.export import export_network from ding0.core import NetworkDing0 @@ -42,6 +44,8 @@ # Set the Database schema which you want to add the tables to SCHEMA = "topology" +METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'config', 'metadatastrings') + DING0_TABLES = {'versioning': 'ego_ding0_versioning', 'line': 'ego_ding0_line', @@ -62,7 +66,7 @@ def load_json_files(): """ - Creats a list of all .json files in FOLDER + Creats a list of all .json files in METADATA_STRING_FOLDER Parameters ---------- @@ -70,7 +74,7 @@ def load_json_files(): contains all .json file names from the folder """ - full_dir = os.walk(FOLDER.parent / FOLDER.name) + full_dir = os.walk(METADATA_STRING_FOLDER) jsonmetadata = [] for jsonfiles in full_dir: @@ -96,13 +100,13 @@ def prepare_metadatastring_fordb(table): Contains the .json file as string """ - for file in load_json_files(): - JSONFILEPATH = FOLDER / file - with open(JSONFILEPATH, encoding='UTF-8') as f: - if table in file: + for json_file in load_json_files(): + json_file_path = os.path.join(METADATA_STRING_FOLDER, json_file) + with open(json_file_path, encoding='UTF-8') as jf: + if table in json_file: # included for testing / or logging - # print("Comment on table: " + table + "\nusing this metadata file: " + file + "\n") - mds = json.load(f) + # print("Comment on table: " + table + "\nusing this METADATA file: " + file + "\n") + mds = json.load(jf) mdsstring = json.dumps(mds, indent=4, ensure_ascii=False) return mdsstring From 3e649d2ae8eb112cea6eaface975e38d89db4366 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Wed, 26 Sep 2018 18:38:34 +0200 Subject: [PATCH 095/215] Changed Base and metadata to GLOBAL PEP8 as BASE and METADATA --- ding0/io/db_export.py | 358 +++++++++++++++++++++--------------------- 1 file changed, 175 insertions(+), 183 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index ba94e4c6..3036d641 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -32,14 +32,8 @@ from sqlalchemy.orm import sessionmaker -##########SQLAlchemy and DB table################ -engine2 = connection(section='oedb') -session = sessionmaker(bind=engine2)() - -con = connection() - -Base = declarative_base() -metadata = Base.metadata +DECLARATIVE_BASE = declarative_base() +METADATA = DECLARATIVE_BASE.metadata # Set the Database schema which you want to add the tables to SCHEMA = "topology" @@ -126,214 +120,212 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): """ # versioning table - versioning = Table(DING0_TABLES['versioning'], metadata, + versioning = Table(DING0_TABLES['versioning'], METADATA, Column('run_id', BigInteger, primary_key=True, autoincrement=False, nullable=False), Column('description', String(6000)), schema=ding0_schema, - comment=prepare_metadatastring_fordb("versioning") + comment=prepare_metadatastring_fordb('versioning') ) # ding0 lines table - ding0_line = Table(DING0_TABLES['line'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('edge_name', String(100)), - Column('grid_name', String(100)), - Column('node1', String(100)), - Column('node2', String(100)), - Column('type_kind', String(100)), - Column('type_name', String(100)), - Column('length', Float(10)), - Column('u_n', Float(10)), - Column('c', Float(10)), - Column('l', Float(10)), - Column('r', Float(10)), - Column('i_max_th', Float(10)), - Column('geom', Geometry('LINESTRING', 4326)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_line") - ) + ding0_line = Table(DING0_TABLES['line'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('edge_name', String(100)), + Column('grid_name', String(100)), + Column('node1', String(100)), + Column('node2', String(100)), + Column('type_kind', String(100)), + Column('type_name', String(100)), + Column('length', Float(10)), + Column('u_n', Float(10)), + Column('c', Float(10)), + Column('l', Float(10)), + Column('r', Float(10)), + Column('i_max_th', Float(10)), + Column('geom', Geometry('LINESTRING', 4326)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('line') + ) # ding0 lv_branchtee table - ding0_lv_branchtee = Table(DING0_TABLES['lv_branchtee'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_lv_branchtee") - ) + ding0_lv_branchtee = Table(DING0_TABLES['lv_branchtee'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_branchtee') + ) # ding0 lv_generator table - ding0_lv_generator = Table(DING0_TABLES['lv_generator'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('la_id', BigInteger), - Column('name', String(100)), - Column('lv_grid_id', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('type', String(22)), - Column('subtype', String(22)), - Column('v_level', Integer), - Column('nominal_capacity', Float(10)), - Column('weather_cell_id', BigInteger), - Column('is_aggregated', Boolean), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_lv_generator") - ) + ding0_lv_generator = Table(DING0_TABLES['lv_generator'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('la_id', BigInteger), + Column('name', String(100)), + Column('lv_grid_id', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('type', String(22)), + Column('subtype', String(22)), + Column('v_level', Integer), + Column('nominal_capacity', Float(10)), + Column('weather_cell_id', BigInteger), + Column('is_aggregated', Boolean), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_generator') + ) # ding0 lv_load table - ding0_lv_load = Table(DING0_TABLES['lv_load'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('name', String(100)), - Column('lv_grid_id', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('consumption', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_lv_load") - ) + ding0_lv_load = Table(DING0_TABLES['lv_load'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('lv_grid_id', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('consumption', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_load') + ) # ding0 lv_station table - ding0_lv_station = Table(DING0_TABLES['lv_station'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_lv_station") - ) + ding0_lv_station = Table(DING0_TABLES['lv_station'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_station') + ) # ding0 mvlv_transformer table - ding0_mvlv_transformer = Table(DING0_TABLES['mvlv_transformer'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - Column('voltage_op', Float(10)), - Column('s_nom', Float(10)), - Column('x', Float(10)), - Column('r', Float(10)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_mvlv_transformer") - ) + ding0_mvlv_transformer = Table(DING0_TABLES['mvlv_transformer'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + Column('voltage_op', Float(10)), + Column('s_nom', Float(10)), + Column('x', Float(10)), + Column('r', Float(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mvlv_transformer") + ) # ding0 mvlv_mapping table - ding0_mvlv_mapping = Table(DING0_TABLES['mvlv_mapping'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('lv_grid_id', BigInteger), - Column('lv_grid_name', String(100)), - Column('mv_grid_id', BigInteger), - Column('mv_grid_name', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_mvlv_mapping") - ) + ding0_mvlv_mapping = Table(DING0_TABLES['mvlv_mapping'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('lv_grid_id', BigInteger), + Column('lv_grid_name', String(100)), + Column('mv_grid_id', BigInteger), + Column('mv_grid_name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mvlv_mapping") + ) # ding0 mv_branchtee table - ding0_mv_branchtee = Table(DING0_TABLES['mv_branchtee'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_mv_branchtee") - ) + ding0_mv_branchtee = Table(DING0_TABLES['mv_branchtee'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_branchtee") + ) # ding0 mv_circuitbreaker table - ding0_mv_circuitbreaker = Table(DING0_TABLES['mv_circuitbreaker'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - Column('status', String(10)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_mv_circuitbreaker") - ) + ding0_mv_circuitbreaker = Table(DING0_TABLES['mv_circuitbreaker'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + Column('status', String(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_circuitbreaker") + ) # ding0 mv_generator table - ding0_mv_generator = Table(DING0_TABLES['mv_generator'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('name', String(100)), - Column('geom', Geometry('POINT', 4326)), - Column('type', String(22)), - Column('subtype', String(22)), - Column('v_level', Integer), - Column('nominal_capacity', Float(10)), - Column('weather_cell_id', BigInteger), - Column('is_aggregated', Boolean), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_mv_generator") - ) + ding0_mv_generator = Table(DING0_TABLES['mv_generator'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('geom', Geometry('POINT', 4326)), + Column('type', String(22)), + Column('subtype', String(22)), + Column('v_level', Integer), + Column('nominal_capacity', Float(10)), + Column('weather_cell_id', BigInteger), + Column('is_aggregated', Boolean), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_generator") + ) # ding0 mv_load table - ding0_mv_load = Table(DING0_TABLES['mv_load'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('name', String(100)), - Column('geom', Geometry('LINESTRING', 4326)), - Column('is_aggregated', Boolean), - Column('consumption', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_mv_load") - ) + ding0_mv_load = Table(DING0_TABLES['mv_load'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('geom', Geometry('LINESTRING', 4326)), + Column('is_aggregated', Boolean), + Column('consumption', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_load") + ) # ding0 mv_grid table - ding0_mv_grid = Table(DING0_TABLES['mv_grid'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('MULTIPOLYGON', 4326)), - Column('name', String(100)), - Column('population', BigInteger), - Column('voltage_nom', Float(10)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_mv_grid") - ) - + ding0_mv_grid = Table(DING0_TABLES['mv_grid'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('MULTIPOLYGON', 4326)), + Column('name', String(100)), + Column('population', BigInteger), + Column('voltage_nom', Float(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_grid") + ) # ding0 mv_station table - ding0_mv_station = Table(DING0_TABLES['mv_station'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_mv_station") - ) + ding0_mv_station = Table(DING0_TABLES['mv_station'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_station") + ) # ding0 hvmv_transformer table - ding0_hvmv_transformer = Table(DING0_TABLES['hvmv_transformer'], metadata, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - Column('voltage_op', Float(10)), - Column('s_nom', Float(10)), - Column('x', Float(10)), - Column('r', Float(10)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("ding0_hvmv_transformer") - ) - + ding0_hvmv_transformer = Table(DING0_TABLES['hvmv_transformer'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + Column('voltage_op', Float(10)), + Column('s_nom', Float(10)), + Column('x', Float(10)), + Column('r', Float(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("hvmv_transformer") + ) # create all the tables - metadata.create_all(engine, checkfirst=True) + METADATA.create_all(engine, checkfirst=True) def df_sql_write(engine, schema, db_table, dataframe): @@ -519,12 +511,12 @@ def export_network_to_db(engine, schema, df, tabletype, srid=None): def drop_ding0_db_tables(engine, schema=SCHEMA): """ - Instructions: In order to drop tables all tables need to be stored in metadata (create tables before dropping them) + Instructions: In order to drop tables all tables need to be stored in METADATA (create tables before dropping them) :param engine: :param schema: :return: """ - tables = metadata.sorted_tables + tables = METADATA.sorted_tables reversed_tables = reversed(tables) print("Please confirm that you would like to drop the following tables:") @@ -565,7 +557,7 @@ def drop_ding0_db_tables(engine, schema=SCHEMA): def db_tables_change_owner(engine, schema=SCHEMA): - tables = metadata.sorted_tables + tables = METADATA.sorted_tables def change_owner(engine, table, role, schema): """ From 77fc5ed44b8ac219a5217f4c8d47961a7da8ff76 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Wed, 26 Sep 2018 18:43:31 +0200 Subject: [PATCH 096/215] minor bug fixes, spelling mistakes, PEP8 fixes --- ding0/io/db_export.py | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 3036d641..4ceec533 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -99,7 +99,7 @@ def prepare_metadatastring_fordb(table): with open(json_file_path, encoding='UTF-8') as jf: if table in json_file: # included for testing / or logging - # print("Comment on table: " + table + "\nusing this METADATA file: " + file + "\n") + # print("Comment on table: " + table + "\nusing this metadata string file: " + file + "\n") mds = json.load(jf) mdsstring = json.dumps(mds, indent=4, ensure_ascii=False) return mdsstring @@ -475,7 +475,7 @@ def run_id_in_db(engine, schema, df, db_versioning, tabletype): metadata_df = pd.DataFrame({'run_id': i, 'description': str(metadata_json)}, index=[0]) # create the new run_id from df in db table - df_sql_write(con, SCHEMA, "ego_ding0_versioning", metadata_df) + df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) db_0temp.append(i) # insert df with the new run_id @@ -634,6 +634,10 @@ def execute_export_network_to_db(con, schema=SCHEMA): if __name__ == "__main__": + ##########SQLAlchemy and DB table################ + engine = connection(section='oedb') + session = sessionmaker(bind=engine)() + ##########Ding0 Network and NW Metadata################ # create ding0 Network instance @@ -655,28 +659,26 @@ def execute_export_network_to_db(con, schema=SCHEMA): # ToDo: Include the metadata_json variable returned form fun. in export.py # any list of NetworkDing0 also provides run_id - # nw_metadata = json.dumps(nw.metadata) + # nw_metadata = json.dumps(nw_metadata) metadata_json = json.loads(nw_metadata) ###################################################### - # metadatastring file folder. #ToDO: Test if Path works on other os (Tested on Windows7) and Change to not static - # Modify if folder name is different -> use: "/" - FOLDER = Path('/ego_grid_ding0_metadatastrings') + # tested with reiners_db - create_ding0_sql_tables(con, "topology") - # drop_ding0_db_tables(con, "topology") - # db_tables_change_owner(con, "topology") + create_ding0_sql_tables(engine, SCHEMA) + # drop_ding0_db_tables(engine, SCHEMA) + # db_tables_change_owner(engine, SCHEMA) # tested with reiners_db - create_ding0_sql_tables(con, "topology") - # drop_ding0_db_tables(con) - # db_tables_change_owner(con, "topology") + create_ding0_sql_tables(engine, SCHEMA) + # drop_ding0_db_tables(engine) + # db_tables_change_owner(engine, SCHEMA) # ToDo: Insert line df: Geometry is wkb and fails to be inserted to db table, get tabletype? # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) - export_network_to_db(con, SCHEMA, lv_gen, "lv_gen", metadata_json) - # export_network_to_db(con, SCHEMA, mv_stations, "mv_stations", metadata_json) + export_network_to_db(engine, SCHEMA, lv_gen, "lv_gen", metadata_json) + # export_network_to_db(CONNECTION, SCHEMA, mv_stations, "mv_stations", metadata_json) From be3aaacde827a88e8aff2002182c68c738215b1b Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 27 Sep 2018 16:12:15 +0200 Subject: [PATCH 097/215] #270 fixed func. load_json_files(), added missing table definition, restructured skriped and dropped 2 func. run_id_in_db() and export_network_to_db() --- ding0/io/db_export.py | 242 ++++++++++++++++++------------------------ 1 file changed, 102 insertions(+), 140 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 4ceec533..17ff4891 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -14,9 +14,9 @@ import numpy as np import pandas as pd -from pathlib import Path import json import os +from pathlib import Path import re @@ -38,8 +38,8 @@ # Set the Database schema which you want to add the tables to SCHEMA = "topology" -METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'config', 'metadatastrings') - +METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') +CLEANED_METADATA_STRING_FOLDER_PATH = Path(METADATA_STRING_FOLDER) DING0_TABLES = {'versioning': 'ego_ding0_versioning', 'line': 'ego_ding0_line', @@ -58,6 +58,7 @@ 'mv_station': 'ego_ding0_mv_station', 'hvmv_transformer': 'ego_ding0_hvmv_transformer'} + def load_json_files(): """ Creats a list of all .json files in METADATA_STRING_FOLDER @@ -68,7 +69,7 @@ def load_json_files(): contains all .json file names from the folder """ - full_dir = os.walk(METADATA_STRING_FOLDER) + full_dir = os.walk(str(CLEANED_METADATA_STRING_FOLDER_PATH)) jsonmetadata = [] for jsonfiles in full_dir: @@ -95,7 +96,7 @@ def prepare_metadatastring_fordb(table): """ for json_file in load_json_files(): - json_file_path = os.path.join(METADATA_STRING_FOLDER, json_file) + json_file_path = os.path.join(CLEANED_METADATA_STRING_FOLDER_PATH, json_file) with open(json_file_path, encoding='UTF-8') as jf: if table in json_file: # included for testing / or logging @@ -119,7 +120,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): Default: None """ - # versioning table + # 1 versioning table versioning = Table(DING0_TABLES['versioning'], METADATA, Column('run_id', BigInteger, primary_key=True, autoincrement=False, nullable=False), Column('description', String(6000)), @@ -127,7 +128,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb('versioning') ) - # ding0 lines table + # 2 ding0 lines table ding0_line = Table(DING0_TABLES['line'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -150,7 +151,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): ) - # ding0 lv_branchtee table + # 3 ding0 lv_branchtee table ding0_lv_branchtee = Table(DING0_TABLES['lv_branchtee'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -161,7 +162,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb('lv_branchtee') ) - # ding0 lv_generator table + # 4 ding0 lv_generator table ding0_lv_generator = Table(DING0_TABLES['lv_generator'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -180,7 +181,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb('lv_generator') ) - # ding0 lv_load table + # 5 ding0 lv_load table ding0_lv_load = Table(DING0_TABLES['lv_load'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -193,7 +194,20 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb('lv_load') ) - # ding0 lv_station table + # 6 + ding0_lv_grid = Table(DING0_TABLES['lv_grid'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('geom', Geometry('MULTIPOLYGON', 4326)), + Column('population', BigInteger), + Column('voltage_nom', Float(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_grid') + ) + + # 7 ding0 lv_station table ding0_lv_station = Table(DING0_TABLES['lv_station'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -204,7 +218,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb('lv_station') ) - # ding0 mvlv_transformer table + # 8 ding0 mvlv_transformer table ding0_mvlv_transformer = Table(DING0_TABLES['mvlv_transformer'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -219,7 +233,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb("mvlv_transformer") ) - # ding0 mvlv_mapping table + # 9 ding0 mvlv_mapping table ding0_mvlv_mapping = Table(DING0_TABLES['mvlv_mapping'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -231,7 +245,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb("mvlv_mapping") ) - # ding0 mv_branchtee table + # 10 ding0 mv_branchtee table ding0_mv_branchtee = Table(DING0_TABLES['mv_branchtee'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -242,7 +256,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb("mv_branchtee") ) - # ding0 mv_circuitbreaker table + # 11 ding0 mv_circuitbreaker table ding0_mv_circuitbreaker = Table(DING0_TABLES['mv_circuitbreaker'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -254,7 +268,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb("mv_circuitbreaker") ) - # ding0 mv_generator table + # 12 ding0 mv_generator table ding0_mv_generator = Table(DING0_TABLES['mv_generator'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -271,7 +285,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb("mv_generator") ) - # ding0 mv_load table + # 13 ding0 mv_load table ding0_mv_load = Table(DING0_TABLES['mv_load'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -284,7 +298,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb("mv_load") ) - # ding0 mv_grid table + # 14 ding0 mv_grid table ding0_mv_grid = Table(DING0_TABLES['mv_grid'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -298,7 +312,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): ) - # ding0 mv_station table + # 15 ding0 mv_station table ding0_mv_station = Table(DING0_TABLES['mv_station'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -309,7 +323,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb("mv_station") ) - # ding0 hvmv_transformer table + # 16 ding0 hvmv_transformer table ding0_hvmv_transformer = Table(DING0_TABLES['hvmv_transformer'], METADATA, Column('id', Integer, primary_key=True), Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), @@ -393,6 +407,7 @@ def export_df_to_db(engine, schema, df, tabletype): elif tabletype == 'lv_cd': df = df.drop(['lv_grid_id'], axis=1) + df['geom'].apply() df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df) elif tabletype == 'lv_gen': @@ -435,81 +450,7 @@ def export_df_to_db(engine, schema, df, tabletype): df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df) -def run_id_in_db(engine, schema, df, db_versioning, tabletype): - """ - Check if the run_id values from the new data frames are available in the DB. - Creates new run_id in the db if not exist. - Filter data frame for column=run_id and compares the values with existing run_id. - db_run_id values are from the DB table: ego_ding0_versioning. - - Parameters - ---------- - :param engine: DB connection - :param schema: DB schema - :param df: pandas data frame -> gets inserted to the db - :param db_versioning: pandas df created from the versioning table in the DB - :param tabletype: select the db table "value relevant in export_df_to_db()" - """ - - db_run_id = db_versioning.filter(items=['run_id']) - df_run_id = df.filter(items=['run_id']) - - # temp stores all run_id values that are available in the DB - db_0temp = [] - for j in db_run_id["run_id"]: - db_0temp.append(j) - - # temp stores run_id value from data frame - df_1temp = [] - for i in df_run_id["run_id"]: - if i in db_0temp: - if i not in df_1temp: - # the run_id value needs to be only present in the run_id column else the filter - # might not work correctly - df_by_run_id = df[df["run_id"] == i] - export_df_to_db(engine, schema, df_by_run_id, tabletype) - # stores the run_id(i) from the df in order to compare with the next loop iteration run_id(i) -> - # df with multiple rows which include the same run_id will not be inserted n times to the db - df_1temp.append(i) - elif i not in db_0temp: - metadata_df = pd.DataFrame({'run_id': i, - 'description': str(metadata_json)}, index=[0]) - # create the new run_id from df in db table - df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) - db_0temp.append(i) - - # insert df with the new run_id - df_by_run_id = df[df["run_id"] == i] - export_df_to_db(engine, schema, df_by_run_id, tabletype) - - -def export_network_to_db(engine, schema, df, tabletype, srid=None): - """ - Exports pre created Pands data frames to a connected database schema. - Creates new entry in ego_ding0_versioning if the table is empty. - Checks if the given pandas data frame "run_id" is available in the DB table. - - Parameters - ---------- - :param engine: - :param schema: - :param df: - :param tabletype: - :param srid: - """ - - db_versioning = pd.read_sql_table(DING0_TABLES['versioning'], engine, schema, - columns=['run_id', 'description']) - - if engine.dialect.has_table(engine, DING0_TABLES["versioning"]): - - run_id_in_db(engine, schema, df, db_versioning, tabletype) - - else: - print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + SCHEMA) - - -def drop_ding0_db_tables(engine, schema=SCHEMA): +def drop_ding0_db_tables(engine, schema): """ Instructions: In order to drop tables all tables need to be stored in METADATA (create tables before dropping them) :param engine: @@ -556,7 +497,7 @@ def drop_ding0_db_tables(engine, schema=SCHEMA): print("Confirmation unclear, no action taken") -def db_tables_change_owner(engine, schema=SCHEMA): +def db_tables_change_owner(engine, schema): tables = METADATA.sorted_tables def change_owner(engine, table, role, schema): @@ -573,6 +514,7 @@ def change_owner(engine, table, role, schema): database role that access is granted to """ tablename = table + # ToDo: Still using globally def. variable "SCHEMA" inside this func. schema = SCHEMA grant_str = """ALTER TABLE {schema}.{table} @@ -590,9 +532,9 @@ def change_owner(engine, table, role, schema): engine.close() -def execute_export_network_to_db(con, schema=SCHEMA): +def export_all_dataframes_to_db(engine, schema, srid=None): """ - exports all data frames to the db tables + exports all data frames from func. export_network() to the db tables Parameters ---------- @@ -600,43 +542,67 @@ def execute_export_network_to_db(con, schema=SCHEMA): :param schema: """ - # 1 - export_network_to_db(con, schema, lines, "line", metadata_json) - # 2 - export_network_to_db(con, schema, lv_cd, "lv_cd", metadata_json) - # 3 - export_network_to_db(con, schema, lv_gen, "lv_gen", metadata_json) - # 4 - export_network_to_db(con, schema, lv_stations, "lv_station", metadata_json) - # 5 - export_network_to_db(con, schema, lv_loads, "lv_load", metadata_json) - # 6 - export_network_to_db(con, schema, lv_grid, "lv_grid", metadata_json) - # 7 - export_network_to_db(con, schema, mv_cb, "mv_cb", metadata_json) - # 8 - export_network_to_db(con, schema, mv_cd, "mv_cd", metadata_json) - # 9 - export_network_to_db(con, schema, mv_gen, "mv_gen", metadata_json) - # 10 - export_network_to_db(con, schema, mv_stations, "mv_station", metadata_json) - # 11 - export_network_to_db(con, schema, mv_loads, "mv_load", metadata_json) - # 12 - export_network_to_db(con, schema, mv_grid, "mv_grid", metadata_json) - # 13 - export_network_to_db(con, schema, mvlv_trafos, "mvlv_trafo", metadata_json) - # 14 - export_network_to_db(con, schema, hvmv_trafos, "hvmv_trafo", metadata_json) - # 15 - export_network_to_db(con, schema, mvlv_mapping, "mvlv_mapping", metadata_json) - - + if engine.dialect.has_table(engine, DING0_TABLES["versioning"]): + + db_versioning = pd.read_sql_table(DING0_TABLES['versioning'], engine, schema, + columns=['run_id', 'description']) + + if db_versioning.empty: + + metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], + 'description': str(metadata_json)}, index=[0]) + + df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) + + # ToDo: UseCase done: 1. run_id from metadata_json to db 2. Insert all data frames + # ToDo: UseCase maybe?: 1. There are run_id in the db 2. Insert all data frames (might never be the case?) + # 1 + export_df_to_db(engine, schema, lines, "line") + # 2 + # export_df_to_db(engine, schema, lv_cd, "lv_cd") + # # 3 + # export_df_to_db(engine, schema, lv_gen, "lv_gen") + # # 4 + # export_df_to_db(engine, schema, lv_stations, "lv_station") + # # 5 + # export_df_to_db(engine, schema, lv_loads, "lv_load") + # # 6 + # export_df_to_db(engine, schema, lv_grid, "lv_grid") + # # 7 + # export_df_to_db(engine, schema, mv_cb, "mv_cb") + # # 8 + # export_df_to_db(engine, schema, mv_cd, "mv_cd") + # # 9 + # export_df_to_db(engine, schema, mv_gen, "mv_gen") + # # 10 + # export_df_to_db(engine, schema, mv_stations, "mv_station") + # # 11 + # export_df_to_db(engine, schema, mv_loads, "mv_load") + # # 12 + # export_df_to_db(engine, schema, mv_grid, "mv_grid") + # # 13 + # export_df_to_db(engine, schema, mvlv_trafos, "mvlv_trafo") + # # 14 + # export_df_to_db(engine, schema, hvmv_trafos, "hvmv_trafo") + # # 15 + # export_df_to_db(engine, schema, mvlv_mapping, "mvlv_mapping") + + else: + raise KeyError("run_id already present! No tables are input!") + + else: + print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + SCHEMA) + + + if __name__ == "__main__": ##########SQLAlchemy and DB table################ - engine = connection(section='oedb') - session = sessionmaker(bind=engine)() + oedb_engine = connection(section='oedb') + session = sessionmaker(bind=oedb_engine)() + + # Testing Database + reiners_engine = connection(section='reiners_db') ##########Ding0 Network and NW Metadata################ @@ -667,18 +633,14 @@ def execute_export_network_to_db(con, schema=SCHEMA): # tested with reiners_db - create_ding0_sql_tables(engine, SCHEMA) - # drop_ding0_db_tables(engine, SCHEMA) + create_ding0_sql_tables(reiners_engine, SCHEMA) + # drop_ding0_db_tables(reiners_engine, SCHEMA) # db_tables_change_owner(engine, SCHEMA) - # tested with reiners_db - create_ding0_sql_tables(engine, SCHEMA) - # drop_ding0_db_tables(engine) - # db_tables_change_owner(engine, SCHEMA) - # ToDo: Insert line df: Geometry is wkb and fails to be inserted to db table, get tabletype? # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) - export_network_to_db(engine, SCHEMA, lv_gen, "lv_gen", metadata_json) + # export_network_to_db(reiners_engine, SCHEMA, lv_gen, "lv_gen", metadata_json) # export_network_to_db(CONNECTION, SCHEMA, mv_stations, "mv_stations", metadata_json) + export_all_dataframes_to_db(reiners_engine, SCHEMA) From 3bdbcdfb493bdbd2a2923befa49f0c21e9c02bcb Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 11 Oct 2018 13:10:53 +0200 Subject: [PATCH 098/215] #270 included GeoAlchemy2.WKTElement, fixed metadatastring import, new func. create_wkt_element (also for wkb), modifiyed df_sql_write to import the geometry to db given by export.py and handle column naming also added parameter geom_type, modified export.py geom column so they returns a wkt geometry except for linestrings --- ding0/io/db_export.py | 135 +++++++++++++++++++++++++++--------------- ding0/io/export.py | 22 +++---- 2 files changed, 98 insertions(+), 59 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 17ff4891..fe5e161c 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -16,7 +16,6 @@ import pandas as pd import json import os -from pathlib import Path import re @@ -25,9 +24,10 @@ import ding0 from ding0.io.export import export_network from ding0.core import NetworkDing0 +from ding0.tools.results import save_nd_to_pickle, load_nd_from_pickle from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text -from geoalchemy2.types import Geometry, Raster +from geoalchemy2.types import Geometry, Raster, WKTElement, WKBElement from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker @@ -36,10 +36,9 @@ METADATA = DECLARATIVE_BASE.metadata # Set the Database schema which you want to add the tables to -SCHEMA = "topology" +SCHEMA = "model_draft" METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') -CLEANED_METADATA_STRING_FOLDER_PATH = Path(METADATA_STRING_FOLDER) DING0_TABLES = {'versioning': 'ego_ding0_versioning', 'line': 'ego_ding0_line', @@ -69,7 +68,7 @@ def load_json_files(): contains all .json file names from the folder """ - full_dir = os.walk(str(CLEANED_METADATA_STRING_FOLDER_PATH)) + full_dir = os.walk(str(METADATA_STRING_FOLDER)) jsonmetadata = [] for jsonfiles in full_dir: @@ -96,7 +95,7 @@ def prepare_metadatastring_fordb(table): """ for json_file in load_json_files(): - json_file_path = os.path.join(CLEANED_METADATA_STRING_FOLDER_PATH, json_file) + json_file_path = os.path.join(METADATA_STRING_FOLDER, json_file) with open(json_file_path, encoding='UTF-8') as jf: if table in json_file: # included for testing / or logging @@ -180,7 +179,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): schema=ding0_schema, comment=prepare_metadatastring_fordb('lv_generator') ) - + # ToDo: Check if right geom type # 5 ding0 lv_load table ding0_lv_load = Table(DING0_TABLES['lv_load'], METADATA, Column('id', Integer, primary_key=True), @@ -291,6 +290,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), Column('name', String(100)), + # ToDo: Check geometry type Column('geom', Geometry('LINESTRING', 4326)), Column('is_aggregated', Boolean), Column('consumption', String(100)), @@ -342,7 +342,13 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): METADATA.create_all(engine, checkfirst=True) -def df_sql_write(engine, schema, db_table, dataframe): +def create_wkt_element(geom): + return WKTElement(geom, srid=int(SRID)) + +def create_wkb_element(geom): + return WKBElement(geom, srid=int(SRID)) + +def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): """ Convert dataframes such that their column names are made small and the index is renamed 'id' so as to @@ -368,25 +374,58 @@ def df_sql_write(engine, schema, db_table, dataframe): Sqlalchemy database engine schema: DB schema """ - # if 'geom' in dataframe.columns: - # sql_write_geom = dataframe.filter(items=['geom']) - # for i in sql_write_geom['geom']: - # - # # insert_geom = "UPDATE {} SET {}=ST_GeomFromText('{}') WHERE (id={}) ".format(db_table, "geom", i, "1") - # insert_geom = "INSERT INTO {} ({}) VALUES (ST_GeomFromText('{}'))".format(db_table, "geom", i) - # engine.execute(insert_geom) + # rename dataframe column DB like if 'id' in dataframe.columns: dataframe.rename(columns={'id':'id_db'}, inplace=True) sql_write_df = dataframe.copy() sql_write_df.columns = sql_write_df.columns.map(str.lower) - # sql_write_df = sql_write_df.set_index('id') - sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) + # sql_write_df = sql_write_df.set_index('id_db') + + # Insert pd Dataframe with geom column + if 'geom' in dataframe.columns: + if geom_type == 'POINT': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('POINT', srid=int(SRID))}) + + elif geom_type == 'MULTIPOLYGON': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('MULTIPOLYGON', srid=int(SRID))}) + + elif geom_type == 'LINESTRING': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkb_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('LINESTRING', srid=int(SRID))}) + else: + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) + # If the Dataframe does not contain id named column (like already named id_db) else: sql_write_df = dataframe.copy() sql_write_df.columns = sql_write_df.columns.map(str.lower) # sql_write_df = sql_write_df.set_index('id') - sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) + + if 'geom' in dataframe.columns: + # Insert pd Dataframe with geom column + if geom_type == 'POINT': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('POINT', srid=int(SRID))}) + + elif geom_type == 'MULTIPOLYGON': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('POINT', srid=int(SRID))}) + + elif geom_type == 'LINESTRING': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('POINT', srid=int(SRID))}) + else: + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) def export_df_to_db(engine, schema, df, tabletype): @@ -403,51 +442,50 @@ def export_df_to_db(engine, schema, df, tabletype): """ print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': - df_sql_write(engine, schema, DING0_TABLES['line'], df) + df_sql_write(engine, schema, DING0_TABLES['line'], df, 'LINESTRING') elif tabletype == 'lv_cd': df = df.drop(['lv_grid_id'], axis=1) - df['geom'].apply() - df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df) + df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df, 'POINT') elif tabletype == 'lv_gen': - df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df) + df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df, 'POINT') elif tabletype == 'lv_load': - df_sql_write(engine, schema, DING0_TABLES['lv_load'], df) + df_sql_write(engine, schema, DING0_TABLES['lv_load'], df, 'POINT') elif tabletype == 'lv_grid': - df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df) + df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df, 'MULTIPOLYGON') elif tabletype == 'lv_station': - df_sql_write(engine, schema, DING0_TABLES['lv_station'], df) + df_sql_write(engine, schema, DING0_TABLES['lv_station'], df, 'POINT') elif tabletype == 'mvlv_trafo': - df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df) + df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df, 'POINT') elif tabletype == 'mvlv_mapping': df_sql_write(engine, schema, DING0_TABLES['mvlv_mapping'], df) elif tabletype == 'mv_cd': - df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df) + df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df, 'POINT') elif tabletype == 'mv_cb': - df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df) + df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df, 'POINT') elif tabletype == 'mv_gen': - df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df) + df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df, 'POINT') elif tabletype == 'mv_load': - df_sql_write(engine, schema, DING0_TABLES['mv_load'], df) + df_sql_write(engine, schema, DING0_TABLES['mv_load'], df, 'LINESTRING') elif tabletype == 'mv_grid': - df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df) + df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df, 'MULTIPOLYGON') elif tabletype == 'mv_station': - df_sql_write(engine, schema, DING0_TABLES['mv_station'], df) + df_sql_write(engine, schema, DING0_TABLES['mv_station'], df, 'POINT') elif tabletype == 'hvmv_trafo': - df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df) + df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df, 'POINT') def drop_ding0_db_tables(engine, schema): @@ -514,11 +552,9 @@ def change_owner(engine, table, role, schema): database role that access is granted to """ tablename = table - # ToDo: Still using globally def. variable "SCHEMA" inside this func. - schema = SCHEMA grant_str = """ALTER TABLE {schema}.{table} - OWNER TO {role};""".format(schema=schema, table=tablename, + OWNER TO {role};""".format(schema=schema, table=tablename.name, role=role) # engine.execute(grant_str) @@ -532,7 +568,7 @@ def change_owner(engine, table, role, schema): engine.close() -def export_all_dataframes_to_db(engine, schema, srid=None): +def export_all_dataframes_to_db(engine, schema): """ exports all data frames from func. export_network() to the db tables @@ -556,9 +592,9 @@ def export_all_dataframes_to_db(engine, schema, srid=None): # ToDo: UseCase done: 1. run_id from metadata_json to db 2. Insert all data frames # ToDo: UseCase maybe?: 1. There are run_id in the db 2. Insert all data frames (might never be the case?) - # 1 - export_df_to_db(engine, schema, lines, "line") - # 2 + # # 1 + # export_df_to_db(engine, schema, lines, "line") + # # 2 # export_df_to_db(engine, schema, lv_cd, "lv_cd") # # 3 # export_df_to_db(engine, schema, lv_gen, "lv_gen") @@ -578,7 +614,7 @@ def export_all_dataframes_to_db(engine, schema, srid=None): # export_df_to_db(engine, schema, mv_stations, "mv_station") # # 11 # export_df_to_db(engine, schema, mv_loads, "mv_load") - # # 12 + # 12 # export_df_to_db(engine, schema, mv_grid, "mv_grid") # # 13 # export_df_to_db(engine, schema, mvlv_trafos, "mvlv_trafo") @@ -608,8 +644,12 @@ def export_all_dataframes_to_db(engine, schema, srid=None): # create ding0 Network instance nw = NetworkDing0(name='network') + # nw = load_nd_from_pickle(filename='ding0_grids_example.pkl', path='ding0\ding0\examples\ding0_grids_example.pkl') - + #srid + #ToDo: Check why converted to int and string + SRID = str(int(nw.config['geo']['srid'])) + # SRID = 4326 # choose MV Grid Districts to import mv_grid_districts = [3040] @@ -623,7 +663,6 @@ def export_all_dataframes_to_db(engine, schema, srid=None): mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ lines, mvlv_mapping = export_network(nw) - # ToDo: Include the metadata_json variable returned form fun. in export.py # any list of NetworkDing0 also provides run_id # nw_metadata = json.dumps(nw_metadata) metadata_json = json.loads(nw_metadata) @@ -632,15 +671,15 @@ def export_all_dataframes_to_db(engine, schema, srid=None): - # tested with reiners_db - create_ding0_sql_tables(reiners_engine, SCHEMA) - # drop_ding0_db_tables(reiners_engine, SCHEMA) - # db_tables_change_owner(engine, SCHEMA) + # tested with reiners_db and oedb + create_ding0_sql_tables(oedb_engine, SCHEMA) + # drop_ding0_db_tables(oedb_engine, SCHEMA) + # db_tables_change_owner(oedb_engine, SCHEMA) # ToDo: Insert line df: Geometry is wkb and fails to be inserted to db table, get tabletype? # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) # export_network_to_db(reiners_engine, SCHEMA, lv_gen, "lv_gen", metadata_json) # export_network_to_db(CONNECTION, SCHEMA, mv_stations, "mv_stations", metadata_json) - export_all_dataframes_to_db(reiners_engine, SCHEMA) + export_all_dataframes_to_db(oedb_engine, SCHEMA) diff --git a/ding0/io/export.py b/ding0/io/export.py index 41056fbe..4eaede49 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -26,9 +26,8 @@ from ding0.core.network.loads import LVLoadDing0, MVLoadDing0 from ding0.core import LVLoadAreaCentreDing0 -from geoalchemy2.shape import from_shape -from shapely.geometry import Point, MultiPoint, MultiLineString, LineString -from shapely.geometry import shape, mapping +from geoalchemy2.shape import from_shape, to_shape +from shapely.geometry import Point, MultiPoint, MultiLineString, LineString, MultiPolygon, shape, mapping def export_network(nw, mode='', run_id=None): @@ -184,7 +183,7 @@ def aggregate_loads(la_center, aggr): mv_stations_dict[0] = { 'id': mv_district.mv_grid.id_db, 'name': mv_station_name, - 'geom': mv_station.geo_data, + 'geom': wkt_dumps(mv_station.geo_data), 'run_id': run_id} # Trafos MV @@ -192,7 +191,7 @@ def aggregate_loads(la_center, aggr): mvtrafos_idx += 1 hvmv_trafos_dict[mvtrafos_idx] = { 'id': mv_station.id_db, - 'geom': mv_station.geo_data, + 'geom': wkt_dumps(mv_station.geo_data), 'name': '_'.join( ['MVTransformerDing0', 'MV', str(mv_station.id_db), str(mv_station.id_db)]), @@ -321,7 +320,7 @@ def aggregate_loads(la_center, aggr): # Collect metadata of aggregated load areas aggr['aggregates'] = { 'population': node.lv_load_area.zensus_sum, - 'geom': node.lv_load_area.geo_area} + 'geom': wkt_dumps(node.lv_load_area.geo_area)} aggr_line_type = nw._static_data['MV_cables'].iloc[ nw._static_data['MV_cables']['I_max_th'].idxmax()] geom = wkt_dumps(node.geo_data) @@ -394,10 +393,11 @@ def aggregate_loads(la_center, aggr): # MVedges for branch in mv_district.mv_grid.graph_edges(): - # geom = wkt_dumps(node.geo_data) - geom = from_shape(LineString([branch['adj_nodes'][0].geo_data, - branch['adj_nodes'][1].geo_data]), - srid=srid) + # geom_string = from_shape(LineString([branch['adj_nodes'][0].geo_data, + # branch['adj_nodes'][1].geo_data]), + # srid=srid) + # geom = wkt_dumps(geom_string) + if not any([isinstance(branch['adj_nodes'][0], LVLoadAreaCentreDing0), isinstance(branch['adj_nodes'][1], @@ -608,4 +608,4 @@ def aggregate_loads(la_center, aggr): return run_id, metadata_json, \ lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ - lines, mvlv_mapping \ No newline at end of file + lines, mvlv_mapping From 66f7703caf35b4afeb081818efe3de0e71c74ee4 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 11 Oct 2018 18:52:56 +0200 Subject: [PATCH 099/215] #270 changed mv_load table geom type to GEOMETRY, modified func. create_wkt_element to handel geometrys with value=none, modified func. df_sql_write to handle geometries with type GEOMETRY, Tested func. export_all_dataframes_to_db except for the mv_load export --- ding0/io/db_export.py | 62 +++++++++++++++++++++++++------------------ ding0/io/export.py | 9 ++----- 2 files changed, 38 insertions(+), 33 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index fe5e161c..3b1d4b20 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -27,7 +27,7 @@ from ding0.tools.results import save_nd_to_pickle, load_nd_from_pickle from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text -from geoalchemy2.types import Geometry, Raster, WKTElement, WKBElement +from geoalchemy2.types import Geometry, Raster, WKTElement from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker @@ -291,7 +291,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): Column('id_db', BigInteger), Column('name', String(100)), # ToDo: Check geometry type - Column('geom', Geometry('LINESTRING', 4326)), + Column('geom', Geometry('GEOMETRY', 4326)), Column('is_aggregated', Boolean), Column('consumption', String(100)), schema=ding0_schema, @@ -342,11 +342,13 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): METADATA.create_all(engine, checkfirst=True) +# Use GeoAlchemy's WKTElement to create a geom with SRID def create_wkt_element(geom): - return WKTElement(geom, srid=int(SRID)) + if geom is not None: + return WKTElement(geom, srid=int(SRID), extended=True) + else: + return None -def create_wkb_element(geom): - return WKBElement(geom, srid=int(SRID)) def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): """ @@ -396,7 +398,7 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): dtype={'geom': Geometry('MULTIPOLYGON', srid=int(SRID))}) elif geom_type == 'LINESTRING': - sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkb_element) + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('LINESTRING', srid=int(SRID))}) else: @@ -418,12 +420,19 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): elif geom_type == 'MULTIPOLYGON': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, - dtype={'geom': Geometry('POINT', srid=int(SRID))}) + dtype={'geom': Geometry('MULTIPOLYGON', srid=int(SRID))}) elif geom_type == 'LINESTRING': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, - dtype={'geom': Geometry('POINT', srid=int(SRID))}) + dtype={'geom': Geometry('LINESTRING', srid=int(SRID))}) + + elif geom_type == 'GEOMETRY': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('GEOMETRY', srid=int(SRID))}) + + else: sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) @@ -475,6 +484,7 @@ def export_df_to_db(engine, schema, df, tabletype): elif tabletype == 'mv_gen': df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df, 'POINT') + # ToDo: Check the geom_type elif tabletype == 'mv_load': df_sql_write(engine, schema, DING0_TABLES['mv_load'], df, 'LINESTRING') @@ -488,6 +498,7 @@ def export_df_to_db(engine, schema, df, tabletype): df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df, 'POINT') +# ToDo: function works but throws unexpected error (versioning tbl dosent exists def drop_ding0_db_tables(engine, schema): """ Instructions: In order to drop tables all tables need to be stored in METADATA (create tables before dropping them) @@ -535,6 +546,7 @@ def drop_ding0_db_tables(engine, schema): print("Confirmation unclear, no action taken") +# ToDo: Functions works but engine.close() is not tested def db_tables_change_owner(engine, schema): tables = METADATA.sorted_tables @@ -590,38 +602,36 @@ def export_all_dataframes_to_db(engine, schema): df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) - # ToDo: UseCase done: 1. run_id from metadata_json to db 2. Insert all data frames - # ToDo: UseCase maybe?: 1. There are run_id in the db 2. Insert all data frames (might never be the case?) # # 1 - # export_df_to_db(engine, schema, lines, "line") + export_df_to_db(engine, schema, lines, "line") # # 2 - # export_df_to_db(engine, schema, lv_cd, "lv_cd") + export_df_to_db(engine, schema, lv_cd, "lv_cd") # # 3 - # export_df_to_db(engine, schema, lv_gen, "lv_gen") + export_df_to_db(engine, schema, lv_gen, "lv_gen") # # 4 - # export_df_to_db(engine, schema, lv_stations, "lv_station") + export_df_to_db(engine, schema, lv_stations, "lv_station") # # 5 - # export_df_to_db(engine, schema, lv_loads, "lv_load") + export_df_to_db(engine, schema, lv_loads, "lv_load") # # 6 - # export_df_to_db(engine, schema, lv_grid, "lv_grid") + export_df_to_db(engine, schema, lv_grid, "lv_grid") # # 7 - # export_df_to_db(engine, schema, mv_cb, "mv_cb") + export_df_to_db(engine, schema, mv_cb, "mv_cb") # # 8 - # export_df_to_db(engine, schema, mv_cd, "mv_cd") + export_df_to_db(engine, schema, mv_cd, "mv_cd") # # 9 - # export_df_to_db(engine, schema, mv_gen, "mv_gen") + export_df_to_db(engine, schema, mv_gen, "mv_gen") # # 10 - # export_df_to_db(engine, schema, mv_stations, "mv_station") + export_df_to_db(engine, schema, mv_stations, "mv_station") # # 11 - # export_df_to_db(engine, schema, mv_loads, "mv_load") + export_df_to_db(engine, schema, mv_loads, "mv_load") # 12 - # export_df_to_db(engine, schema, mv_grid, "mv_grid") + export_df_to_db(engine, schema, mv_grid, "mv_grid") # # 13 - # export_df_to_db(engine, schema, mvlv_trafos, "mvlv_trafo") + export_df_to_db(engine, schema, mvlv_trafos, "mvlv_trafo") # # 14 - # export_df_to_db(engine, schema, hvmv_trafos, "hvmv_trafo") + export_df_to_db(engine, schema, hvmv_trafos, "hvmv_trafo") # # 15 - # export_df_to_db(engine, schema, mvlv_mapping, "mvlv_mapping") + export_df_to_db(engine, schema, mvlv_mapping, "mvlv_mapping") else: raise KeyError("run_id already present! No tables are input!") @@ -649,7 +659,7 @@ def export_all_dataframes_to_db(engine, schema): #srid #ToDo: Check why converted to int and string SRID = str(int(nw.config['geo']['srid'])) - # SRID = 4326 + # choose MV Grid Districts to import mv_grid_districts = [3040] diff --git a/ding0/io/export.py b/ding0/io/export.py index 4eaede49..91df7e1b 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -17,7 +17,6 @@ import json - from ding0.core import NetworkDing0 from ding0.core import GeneratorDing0, GeneratorFluctuatingDing0 from ding0.core import LVCableDistributorDing0, MVCableDistributorDing0 @@ -371,9 +370,7 @@ def aggregate_loads(la_center, aggr): 'node1': mv_aggr_load_name, 'node2': mv_station_name, 'run_id': run_id, - 'geom': from_shape(LineString([mv_station.geo_data, - mv_station.geo_data]), - srid=srid) + 'geom': LineString([mv_station.geo_data, mv_station.geo_data]) } # TODO: eventually remove export of DisconnectingPoints from export @@ -426,9 +423,7 @@ def aggregate_loads(la_center, aggr): 'MV', str(mv_grid_id), str(branch['adj_nodes'][1].id_db)]), 'run_id': run_id, - 'geom': from_shape(LineString([branch['adj_nodes'][0].geo_data, - branch['adj_nodes'][1].geo_data]), - srid=srid), + 'geom': LineString([branch['adj_nodes'][0].geo_data, branch['adj_nodes'][1].geo_data]) } if lv_info: From 99a3a1bad1b42bfd720df379c3774a9d576748fd Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 12 Oct 2018 20:30:01 +0200 Subject: [PATCH 100/215] #270 handled geom:GEOMETRY --- ding0/io/db_export.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 3b1d4b20..12052fea 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -401,6 +401,12 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('LINESTRING', srid=int(SRID))}) + + elif geom_type == 'GEOMETRY': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('GEOMETRY', srid=int(SRID))}) + else: sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) # If the Dataframe does not contain id named column (like already named id_db) @@ -486,7 +492,7 @@ def export_df_to_db(engine, schema, df, tabletype): # ToDo: Check the geom_type elif tabletype == 'mv_load': - df_sql_write(engine, schema, DING0_TABLES['mv_load'], df, 'LINESTRING') + df_sql_write(engine, schema, DING0_TABLES['mv_load'], df, 'GEOMETRY') elif tabletype == 'mv_grid': df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df, 'MULTIPOLYGON') From 9c6f972a21f93906f815b2633e245b90ac22de77 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 12 Oct 2018 21:16:19 +0200 Subject: [PATCH 101/215] #270 added some docstring information, cleaned code --- ding0/io/db_export.py | 55 +++++++++++++++++++++++++------------------ 1 file changed, 32 insertions(+), 23 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 12052fea..20f879da 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -38,8 +38,10 @@ # Set the Database schema which you want to add the tables to SCHEMA = "model_draft" +# Metadata folder Path METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') +# set your Table names DING0_TABLES = {'versioning': 'ego_ding0_versioning', 'line': 'ego_ding0_line', 'lv_branchtee': 'ego_ding0_lv_branchtee', @@ -90,7 +92,7 @@ def prepare_metadatastring_fordb(table): table: str table name of the sqlAlchemy table - return: mdsstring:str + :return: mdsstring:str Contains the .json file as string """ @@ -107,7 +109,7 @@ def prepare_metadatastring_fordb(table): def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): """ - Create the ding0 tables + Create the 16 ding0 tables Parameters ---------- @@ -149,7 +151,6 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb('line') ) - # 3 ding0 lv_branchtee table ding0_lv_branchtee = Table(DING0_TABLES['lv_branchtee'], METADATA, Column('id', Integer, primary_key=True), @@ -311,7 +312,6 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): comment=prepare_metadatastring_fordb("mv_grid") ) - # 15 ding0 mv_station table ding0_mv_station = Table(DING0_TABLES['mv_station'], METADATA, Column('id', Integer, primary_key=True), @@ -342,8 +342,13 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): METADATA.create_all(engine, checkfirst=True) -# Use GeoAlchemy's WKTElement to create a geom with SRID def create_wkt_element(geom): + """ + Use GeoAlchemy's WKTElement to create a geom with SRID + + :param geom: Shaply geometry from script export.py + :return: GeoAlchemy2 WKTElement (PostGis func:ST_GeomFromText) + """ if geom is not None: return WKTElement(geom, srid=int(SRID), extended=True) else: @@ -352,13 +357,14 @@ def create_wkt_element(geom): def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): """ - Convert dataframes such that their column names - are made small and the index is renamed 'id' so as to - correctly load its data to its appropriate sql table. + Convert data frames such that their column names + are made small and the index is renamed 'id_db' so as to + correctly load its data to its appropriate sql table. Also handles the + upload to a DB data frames with different geometry types. .. ToDo: need to check for id_db instead of only 'id' in index label names - NOTE: This function does not check if the dataframe columns + NOTE: This function does not check if the data frame columns matches the db_table fields, if they do not then no warning is given. @@ -374,17 +380,21 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): engine: :py:mod:`sqlalchemy.engine.base.Engine` Sqlalchemy database engine + schema: DB schema + + geom_type: Prameter for handling data frames with + different geometry types """ - # rename dataframe column DB like + # rename data frame column DB like if 'id' in dataframe.columns: dataframe.rename(columns={'id':'id_db'}, inplace=True) sql_write_df = dataframe.copy() sql_write_df.columns = sql_write_df.columns.map(str.lower) # sql_write_df = sql_write_df.set_index('id_db') - # Insert pd Dataframe with geom column + # Insert pd data frame with geom column if 'geom' in dataframe.columns: if geom_type == 'POINT': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) @@ -438,7 +448,6 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('GEOMETRY', srid=int(SRID))}) - else: sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) @@ -450,10 +459,12 @@ def export_df_to_db(engine, schema, df, tabletype): Parameters ---------- - :param engine: + :param engine: sqlalchemy.engine.base.Engine` + Sqlalchemy database engine :param schema: :param df: - :param tabletype: + :param tabletype: Set the destination table where the pd data frame will + be stored in """ print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': @@ -504,11 +515,12 @@ def export_df_to_db(engine, schema, df, tabletype): df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df, 'POINT') -# ToDo: function works but throws unexpected error (versioning tbl dosent exists +# ToDo: function works but throws unexpected error (versioning tbl dosent exists) def drop_ding0_db_tables(engine, schema): """ Instructions: In order to drop tables all tables need to be stored in METADATA (create tables before dropping them) - :param engine: + :param engine: sqlalchemy.engine.base.Engine` + Sqlalchemy database engine :param schema: :return: """ @@ -562,8 +574,9 @@ def change_owner(engine, table, role, schema): Parameters ---------- - session : sqlalchemy session object + engine: sqlalchemy session object A valid connection to a database + schema: table : sqlalchmy Table class definition The database table role : str @@ -592,7 +605,8 @@ def export_all_dataframes_to_db(engine, schema): Parameters ---------- - :param con: + :param engine:sqlalchemy.engine.base.Engine` + Sqlalchemy database engine :param schema: """ @@ -646,7 +660,6 @@ def export_all_dataframes_to_db(engine, schema): print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + SCHEMA) - if __name__ == "__main__": ##########SQLAlchemy and DB table################ @@ -685,15 +698,11 @@ def export_all_dataframes_to_db(engine, schema): ###################################################### - - # tested with reiners_db and oedb create_ding0_sql_tables(oedb_engine, SCHEMA) # drop_ding0_db_tables(oedb_engine, SCHEMA) # db_tables_change_owner(oedb_engine, SCHEMA) - - # ToDo: Insert line df: Geometry is wkb and fails to be inserted to db table, get tabletype? # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) # export_network_to_db(reiners_engine, SCHEMA, lv_gen, "lv_gen", metadata_json) # export_network_to_db(CONNECTION, SCHEMA, mv_stations, "mv_stations", metadata_json) From 635b8770d0adcc79c618a5839dc444b54e73857d Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 16 Oct 2018 17:48:58 +0200 Subject: [PATCH 102/215] #270 included a second grid_district to test the import functions, finished import successful with two grid_districts with same run_id --- ding0/io/db_export.py | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 20f879da..9a2373aa 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -17,6 +17,8 @@ import json import os +from datetime import datetime + import re from egoio.tools.db import connection @@ -586,7 +588,7 @@ def change_owner(engine, table, role, schema): grant_str = """ALTER TABLE {schema}.{table} OWNER TO {role};""".format(schema=schema, table=tablename.name, - role=role) + role=role) # engine.execute(grant_str) engine.execution_options(autocommit=True).execute(grant_str) @@ -614,9 +616,11 @@ def export_all_dataframes_to_db(engine, schema): db_versioning = pd.read_sql_table(DING0_TABLES['versioning'], engine, schema, columns=['run_id', 'description']) - + # Use for another run with different run_id + # if metadata_json['run_id'] not in db_versioning['run_id']: + # Use if just one run_id should be present to the DB table if db_versioning.empty: - + # this leads to wrong run_id if run_id is SET in __main__ -> 'run_id': metadata_json['run_id'] metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], 'description': str(metadata_json)}, index=[0]) @@ -638,7 +642,7 @@ def export_all_dataframes_to_db(engine, schema): export_df_to_db(engine, schema, mv_cb, "mv_cb") # # 8 export_df_to_db(engine, schema, mv_cd, "mv_cd") - # # 9 + # 9 export_df_to_db(engine, schema, mv_gen, "mv_gen") # # 10 export_df_to_db(engine, schema, mv_stations, "mv_station") @@ -654,7 +658,7 @@ def export_all_dataframes_to_db(engine, schema): export_df_to_db(engine, schema, mvlv_mapping, "mvlv_mapping") else: - raise KeyError("run_id already present! No tables are input!") + raise KeyError("a run_id already present! No tables are input!") else: print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + SCHEMA) @@ -662,25 +666,29 @@ def export_all_dataframes_to_db(engine, schema): if __name__ == "__main__": - ##########SQLAlchemy and DB table################ + # #########SQLAlchemy and DB table################ oedb_engine = connection(section='oedb') session = sessionmaker(bind=oedb_engine)() # Testing Database reiners_engine = connection(section='reiners_db') - ##########Ding0 Network and NW Metadata################ + # #########Ding0 Network and NW Metadata################ # create ding0 Network instance nw = NetworkDing0(name='network') # nw = load_nd_from_pickle(filename='ding0_grids_example.pkl', path='ding0\ding0\examples\ding0_grids_example.pkl') - #srid - #ToDo: Check why converted to int and string + # srid + # ToDo: Check why converted to int and string SRID = str(int(nw.config['geo']['srid'])) + # provide run_id, note that the run_id is unique to the DB table + # if not set it will be set + # RUN_ID = datetime.now().strftime("%Y%m%d%H%M%S") + # choose MV Grid Districts to import - mv_grid_districts = [3040] + mv_grid_districts = [3040, 3045] # run DING0 on selected MV Grid District nw.run_ding0(session=session, From 8dbd0744e186a0ee6c37362ca41fb817fa976cd5 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 16 Oct 2018 17:51:31 +0200 Subject: [PATCH 103/215] #270 included a second grid_district to test the import functions, finished import successful with two grid_districts with same run_id --- ding0/io/db_export.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 9a2373aa..17eeff70 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -690,12 +690,13 @@ def export_all_dataframes_to_db(engine, schema): # choose MV Grid Districts to import mv_grid_districts = [3040, 3045] + # for grid in mv_grid_districts: # run DING0 on selected MV Grid District nw.run_ding0(session=session, mv_grid_districts_no=mv_grid_districts) # return values from export_network() as tupels - run_id, nw_metadata, \ + run_id , nw_metadata, \ lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ lines, mvlv_mapping = export_network(nw) From 45b36d4dde82f338ffb8aa4d746d4bd77252f9eb Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 16 Oct 2018 17:52:13 +0200 Subject: [PATCH 104/215] Revert "#270 included a second grid_district to test the import functions, finished import successful with two grid_districts with same run_id" This reverts commit 8dbd074 --- ding0/io/db_export.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 17eeff70..9a2373aa 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -690,13 +690,12 @@ def export_all_dataframes_to_db(engine, schema): # choose MV Grid Districts to import mv_grid_districts = [3040, 3045] - # for grid in mv_grid_districts: # run DING0 on selected MV Grid District nw.run_ding0(session=session, mv_grid_districts_no=mv_grid_districts) # return values from export_network() as tupels - run_id , nw_metadata, \ + run_id, nw_metadata, \ lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ lines, mvlv_mapping = export_network(nw) From 112e79e88683c22412da379e7919abca1226b275 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 16 Oct 2018 17:53:20 +0200 Subject: [PATCH 105/215] #270 included a second grid_district to test the import functions, finished import successful with two grid_districts with same run_id --- ding0/io/db_export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 9a2373aa..8dbfef80 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -704,7 +704,7 @@ def export_all_dataframes_to_db(engine, schema): # nw_metadata = json.dumps(nw_metadata) metadata_json = json.loads(nw_metadata) - ###################################################### + ##################################################### # tested with reiners_db and oedb create_ding0_sql_tables(oedb_engine, SCHEMA) From c1021e3a59e126bf4815650aefb56629e45ff742 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 17 Oct 2018 13:33:03 +0200 Subject: [PATCH 106/215] #270 updated docstring with instructions on how tho use the script --- ding0/io/db_export.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 8dbfef80..4de1e6b8 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -566,7 +566,6 @@ def drop_ding0_db_tables(engine, schema): print("Confirmation unclear, no action taken") -# ToDo: Functions works but engine.close() is not tested def db_tables_change_owner(engine, schema): tables = METADATA.sorted_tables @@ -598,13 +597,34 @@ def change_owner(engine, table, role, schema): for tab in tables: change_owner(engine, tab, 'oeuser', schema) - engine.close() - def export_all_dataframes_to_db(engine, schema): """ exports all data frames from func. export_network() to the db tables + Instructions: + 1. Create a database connection to the "OEDB" for example use the "from egoio.tools.db import connection" function + 2. Create a SA session: session = sessionmaker(bind=oedb_engine)() + 3. Create a ding0 network instance: nw = NetworkDing0(name='network') + 4. SET the srid from network config: SRID = str(int(nw.config['geo']['srid'])) + 5. Choose the grid_districts for the ding0 run (nothing chosen all grid_districts will be imported) + mv_grid_districts = [3040, 3045] + 6. run ding0 on selected mv_grid_district + 7. call function export_network from export.py -> this provides the run_id, network metadata as json + and all ding0 result data as pandas data frames + 8. json.loads the metadata, it is needed to provide the values for the + versioning table + 9. Create a database connection to your database for example use the "from egoio.tools.db import connection" function + 10. SET the SCHEMA you want to use within the connected database + 11. Create the ding0 sql tables: create_ding0_sql_tables(engine, SCHEMA) + 12. Call the function: export_all_dataframes_to_db(engine, SCHEMA) with your destination database and SCHEMA + additionally: + 13. If you used the "OEDB" as destination database change the table owner using the function: + db_tables_change_owner(engine, schema) + 14. If you need to drop the table call the function drop_ding0_db_tables(engine, schema) immediately after + the called create function: create_ding0_sql_tables(oedb_engine, SCHEMA) + drop_ding0_db_tables(oedb_engine, SCHEMA) + Parameters ---------- :param engine:sqlalchemy.engine.base.Engine` From 51de9cb5e250f9b424d5974a17db1107acd9c404 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 17 Oct 2018 13:54:05 +0200 Subject: [PATCH 107/215] #270 updated docstring --- ding0/io/db_export.py | 68 +++++++++++++++++++++++++++---------------- ding0/io/export.py | 1 - 2 files changed, 43 insertions(+), 26 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 4de1e6b8..b1a00d26 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -68,8 +68,11 @@ def load_json_files(): Parameters ---------- - :return: dict: jsonmetadata - contains all .json file names from the folder + + Returns + ------- + jsonmetadata : dict + contains all .json file names from the folder """ full_dir = os.walk(str(METADATA_STRING_FOLDER)) @@ -94,7 +97,9 @@ def prepare_metadatastring_fordb(table): table: str table name of the sqlAlchemy table - :return: mdsstring:str + Returns + ------- + mdsstring:str Contains the .json file as string """ @@ -118,9 +123,9 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): engine: :py:mod:`sqlalchemy.engine.base.Engine` Sqlalchemy database engine - ding0_schema: :obj:`str` + ding0_schema : str The schema in which the tables are to be created - Default: None + Default: static SCHEMA """ # 1 versioning table @@ -347,9 +352,16 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): def create_wkt_element(geom): """ Use GeoAlchemy's WKTElement to create a geom with SRID + GeoAlchemy2 WKTElement (PostGis func:ST_GeomFromText) - :param geom: Shaply geometry from script export.py - :return: GeoAlchemy2 WKTElement (PostGis func:ST_GeomFromText) + Parameters + ---------- + geom: Shaply geometry from script export.py + + Returns + ------- + None : None + Returns None if the data frame does not contain any geometry """ if geom is not None: return WKTElement(geom, srid=int(SRID), extended=True) @@ -372,9 +384,9 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): Parameters ---------- - dataframe: :pandas:`DataFrame` - The pandas dataframe to be transferred to its - apprpritate db_table + pandas.DataFrame + dataframe : The pandas dataframe to be transferred to its + apprpritate db_table db_table: :py:mod:`sqlalchemy.sql.schema.Table` A table instance definition from sqlalchemy. @@ -383,10 +395,11 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): engine: :py:mod:`sqlalchemy.engine.base.Engine` Sqlalchemy database engine - schema: DB schema + schema: str + The schema in which the tables are to be created - geom_type: Prameter for handling data frames with - different geometry types + geom_type: str + Prameter for handling data frames with different geometry types """ # rename data frame column DB like @@ -461,12 +474,14 @@ def export_df_to_db(engine, schema, df, tabletype): Parameters ---------- - :param engine: sqlalchemy.engine.base.Engine` + engine: sqlalchemy.engine.base.Engine` Sqlalchemy database engine - :param schema: - :param df: - :param tabletype: Set the destination table where the pd data frame will - be stored in + schema : str + The schema in which the tables are to be created + pandas.DataFrame + df : pandas data frame + tabletype : str + Set the destination table where the pd data frame will be stored in """ print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': @@ -518,13 +533,14 @@ def export_df_to_db(engine, schema, df, tabletype): # ToDo: function works but throws unexpected error (versioning tbl dosent exists) -def drop_ding0_db_tables(engine, schema): +def drop_ding0_db_tables(engine): """ Instructions: In order to drop tables all tables need to be stored in METADATA (create tables before dropping them) - :param engine: sqlalchemy.engine.base.Engine` + + Parameters + ---------- + engine: sqlalchemy.engine.base.Engine` Sqlalchemy database engine - :param schema: - :return: """ tables = METADATA.sorted_tables reversed_tables = reversed(tables) @@ -577,7 +593,7 @@ def change_owner(engine, table, role, schema): ---------- engine: sqlalchemy session object A valid connection to a database - schema: + schema: The schema in which the tables are to be created table : sqlalchmy Table class definition The database table role : str @@ -624,12 +640,14 @@ def export_all_dataframes_to_db(engine, schema): 14. If you need to drop the table call the function drop_ding0_db_tables(engine, schema) immediately after the called create function: create_ding0_sql_tables(oedb_engine, SCHEMA) drop_ding0_db_tables(oedb_engine, SCHEMA) + 15. Check if all metadata strings are present to the current folder and added as SQL comment on table Parameters ---------- - :param engine:sqlalchemy.engine.base.Engine` + engine : sqlalchemy.engine.base.Engine Sqlalchemy database engine - :param schema: + schema : str + The schema in which the tables are to be created """ if engine.dialect.has_table(engine, DING0_TABLES["versioning"]): diff --git a/ding0/io/export.py b/ding0/io/export.py index 91df7e1b..e716e766 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -25,7 +25,6 @@ from ding0.core.network.loads import LVLoadDing0, MVLoadDing0 from ding0.core import LVLoadAreaCentreDing0 -from geoalchemy2.shape import from_shape, to_shape from shapely.geometry import Point, MultiPoint, MultiLineString, LineString, MultiPolygon, shape, mapping From c04e8eb07c5bc8d49d68a6a0a97c757640ee4619 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 17 Oct 2018 13:56:39 +0200 Subject: [PATCH 108/215] =?UTF-8?q?#270=20removed=20done=20ToDo=C2=B4s?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ding0/io/db_export.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index b1a00d26..ea4322d3 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -187,7 +187,6 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): schema=ding0_schema, comment=prepare_metadatastring_fordb('lv_generator') ) - # ToDo: Check if right geom type # 5 ding0 lv_load table ding0_lv_load = Table(DING0_TABLES['lv_load'], METADATA, Column('id', Integer, primary_key=True), @@ -298,7 +297,6 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), Column('name', String(100)), - # ToDo: Check geometry type Column('geom', Geometry('GEOMETRY', 4326)), Column('is_aggregated', Boolean), Column('consumption', String(100)), @@ -518,7 +516,6 @@ def export_df_to_db(engine, schema, df, tabletype): elif tabletype == 'mv_gen': df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df, 'POINT') - # ToDo: Check the geom_type elif tabletype == 'mv_load': df_sql_write(engine, schema, DING0_TABLES['mv_load'], df, 'GEOMETRY') From 4b2e2ef7f03283ac4b3fb635385823720e1024f8 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 17 Oct 2018 16:43:26 +0200 Subject: [PATCH 109/215] #275 added the SCHEMA to __main__ and changed the parameter ding0_schema in create_ding0_sql_tables() and schema in export_all_dataframes_to_db(), already tested --- ding0/io/db_export.py | 38 ++++++++++++++++++++------------------ 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index ea4322d3..871ddf94 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -38,7 +38,7 @@ METADATA = DECLARATIVE_BASE.metadata # Set the Database schema which you want to add the tables to -SCHEMA = "model_draft" +# SCHEMA = "model_draft" # Metadata folder Path METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') @@ -114,7 +114,7 @@ def prepare_metadatastring_fordb(table): return mdsstring -def create_ding0_sql_tables(engine, ding0_schema=SCHEMA): +def create_ding0_sql_tables(engine, ding0_schema): """ Create the 16 ding0 tables @@ -530,7 +530,7 @@ def export_df_to_db(engine, schema, df, tabletype): # ToDo: function works but throws unexpected error (versioning tbl dosent exists) -def drop_ding0_db_tables(engine): +def drop_ding0_db_tables(engine, schema): """ Instructions: In order to drop tables all tables need to be stored in METADATA (create tables before dropping them) @@ -661,42 +661,42 @@ def export_all_dataframes_to_db(engine, schema): df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) - # # 1 + # 1 export_df_to_db(engine, schema, lines, "line") - # # 2 + # 2 export_df_to_db(engine, schema, lv_cd, "lv_cd") - # # 3 + # 3 export_df_to_db(engine, schema, lv_gen, "lv_gen") - # # 4 + # 4 export_df_to_db(engine, schema, lv_stations, "lv_station") - # # 5 + # 5 export_df_to_db(engine, schema, lv_loads, "lv_load") - # # 6 + # 6 export_df_to_db(engine, schema, lv_grid, "lv_grid") - # # 7 + # 7 export_df_to_db(engine, schema, mv_cb, "mv_cb") - # # 8 + # 8 export_df_to_db(engine, schema, mv_cd, "mv_cd") # 9 export_df_to_db(engine, schema, mv_gen, "mv_gen") - # # 10 + # 10 export_df_to_db(engine, schema, mv_stations, "mv_station") - # # 11 + # 11 export_df_to_db(engine, schema, mv_loads, "mv_load") # 12 export_df_to_db(engine, schema, mv_grid, "mv_grid") - # # 13 + # 13 export_df_to_db(engine, schema, mvlv_trafos, "mvlv_trafo") - # # 14 + # 14 export_df_to_db(engine, schema, hvmv_trafos, "hvmv_trafo") - # # 15 + # 15 export_df_to_db(engine, schema, mvlv_mapping, "mvlv_mapping") else: raise KeyError("a run_id already present! No tables are input!") else: - print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + SCHEMA) + print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + schema) if __name__ == "__main__": @@ -708,6 +708,9 @@ def export_all_dataframes_to_db(engine, schema): # Testing Database reiners_engine = connection(section='reiners_db') + # Set the Database schema which you want to add the tables to + SCHEMA = "model_draft" + # #########Ding0 Network and NW Metadata################ # create ding0 Network instance @@ -741,7 +744,6 @@ def export_all_dataframes_to_db(engine, schema): ##################################################### - # tested with reiners_db and oedb create_ding0_sql_tables(oedb_engine, SCHEMA) # drop_ding0_db_tables(oedb_engine, SCHEMA) # db_tables_change_owner(oedb_engine, SCHEMA) From 75fe933ea10f67b216579aab6cc8564bf827a67b Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 17 Oct 2018 18:34:52 +0200 Subject: [PATCH 110/215] #275 SA Table definition: changed length of column "subtype" for the tables ding0_mv_generator and ding0_lv_generator --- ding0/io/db_export.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 871ddf94..c76cc36b 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -179,7 +179,7 @@ def create_ding0_sql_tables(engine, ding0_schema): Column('lv_grid_id', BigInteger), Column('geom', Geometry('POINT', 4326)), Column('type', String(22)), - Column('subtype', String(22)), + Column('subtype', String(30)), Column('v_level', Integer), Column('nominal_capacity', Float(10)), Column('weather_cell_id', BigInteger), @@ -282,7 +282,7 @@ def create_ding0_sql_tables(engine, ding0_schema): Column('name', String(100)), Column('geom', Geometry('POINT', 4326)), Column('type', String(22)), - Column('subtype', String(22)), + Column('subtype', String(30)), Column('v_level', Integer), Column('nominal_capacity', Float(10)), Column('weather_cell_id', BigInteger), From 8cdaad5ba9be2bf97cdc4c54b09662b0cf72c212 Mon Sep 17 00:00:00 2001 From: boltbeard Date: Wed, 17 Oct 2018 20:25:28 +0200 Subject: [PATCH 111/215] Changed names of tables to reflect appropriate naming in oedb --- ding0/io/db_export.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index b1a00d26..81509ffc 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -44,22 +44,22 @@ METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') # set your Table names -DING0_TABLES = {'versioning': 'ego_ding0_versioning', - 'line': 'ego_ding0_line', - 'lv_branchtee': 'ego_ding0_lv_branchtee', - 'lv_generator': 'ego_ding0_lv_generator', - 'lv_load': 'ego_ding0_lv_load', - 'lv_grid': 'ego_ding0_lv_grid', - 'lv_station': 'ego_ding0_lv_station', - 'mvlv_transformer': 'ego_ding0_mvlv_transformer', - 'mvlv_mapping': 'ego_ding0_mvlv_mapping', - 'mv_branchtee': 'ego_ding0_mv_branchtee', - 'mv_circuitbreaker': 'ego_ding0_mv_circuitbreaker', - 'mv_generator': 'ego_ding0_mv_generator', - 'mv_load': 'ego_ding0_mv_load', - 'mv_grid': 'ego_ding0_mv_grid', - 'mv_station': 'ego_ding0_mv_station', - 'hvmv_transformer': 'ego_ding0_hvmv_transformer'} +DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', + 'line': 'ego_grid_ding0_line', + 'lv_branchtee': 'ego_grid_ding0_lv_branchtee', + 'lv_generator': 'ego_grid_ding0_lv_generator', + 'lv_load': 'ego_grid_ding0_lv_load', + 'lv_grid': 'ego_grid_ding0_lv_grid', + 'lv_station': 'ego_grid_ding0_lv_station', + 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer', + 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping', + 'mv_branchtee': 'ego_grid_ding0_mv_branchtee', + 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker', + 'mv_generator': 'ego_grid_ding0_mv_generator', + 'mv_load': 'ego_grid_ding0_mv_load', + 'mv_grid': 'ego_grid_ding0_mv_grid', + 'mv_station': 'ego_grid_ding0_mv_station', + 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} def load_json_files(): From ca6ca4e8ef86686430963dee460588f2769371de Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 19 Oct 2018 11:27:08 +0200 Subject: [PATCH 112/215] #275 changed schema in metadata_string.json files, added script for versioning the data --- ding0/io/ego_dp_versioning.py | 155 ++++++++++++++++++ .../io/metadatastrings/hvmv_transformer.json | 2 +- ding0/io/metadatastrings/line.json | 2 +- ding0/io/metadatastrings/lv_branchtee.json | 2 +- ding0/io/metadatastrings/lv_generator.json | 2 +- ding0/io/metadatastrings/lv_grid.json | 2 +- ding0/io/metadatastrings/lv_load.json | 2 +- ding0/io/metadatastrings/lv_station.json | 2 +- ding0/io/metadatastrings/mv_branchtee.json | 2 +- .../io/metadatastrings/mv_circuitbreaker.json | 2 +- ding0/io/metadatastrings/mv_generator.json | 2 +- ding0/io/metadatastrings/mv_grid.json | 2 +- ding0/io/metadatastrings/mv_load.json | 2 +- ding0/io/metadatastrings/mv_station.json | 2 +- ding0/io/metadatastrings/mvlv_mapping.json | 2 +- .../io/metadatastrings/mvlv_transformer.json | 2 +- ding0/io/metadatastrings/versioning.json | 2 +- 17 files changed, 171 insertions(+), 16 deletions(-) create mode 100644 ding0/io/ego_dp_versioning.py diff --git a/ding0/io/ego_dp_versioning.py b/ding0/io/ego_dp_versioning.py new file mode 100644 index 00000000..8a90131e --- /dev/null +++ b/ding0/io/ego_dp_versioning.py @@ -0,0 +1,155 @@ +# """This file is part of DINGO, the DIstribution Network GeneratOr. +# DINGO is a tool to generate synthetic medium and low voltage power +# distribution grids based on open data. +# +# It is developed in the project open_eGo: https://openegoproject.wordpress.com +# +# DING0 lives at github: https://github.com/openego/ding0/ +# The documentation is available on RTD: http://ding0.readthedocs.io""" +# +# __copyright__ = "Reiner Lemoine Institut gGmbH" +# __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +# __url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +# __author__ = "jh-RLI" +# +# +# import json +# import os +# +# from egoio.tools.db import connection +# +# from sqlalchemy import MetaData +# +# # set your Table names +# DING0_TABLES = {'versioning': 'ego_ding0_versioning', +# 'line': 'ego_ding0_line', +# 'lv_branchtee': 'ego_ding0_lv_branchtee', +# 'lv_generator': 'ego_ding0_lv_generator', +# 'lv_load': 'ego_ding0_lv_load', +# 'lv_grid': 'ego_ding0_lv_grid', +# 'lv_station': 'ego_ding0_lv_station', +# 'mvlv_transformer': 'ego_ding0_mvlv_transformer', +# 'mvlv_mapping': 'ego_ding0_mvlv_mapping', +# 'mv_branchtee': 'ego_ding0_mv_branchtee', +# 'mv_circuitbreaker': 'ego_ding0_mv_circuitbreaker', +# 'mv_generator': 'ego_ding0_mv_generator', +# 'mv_load': 'ego_ding0_mv_load', +# 'mv_grid': 'ego_ding0_mv_grid', +# 'mv_station': 'ego_ding0_mv_station', +# 'hvmv_transformer': 'ego_ding0_hvmv_transformer'} +# +# # #########SQLAlchemy and DB table################ +# #source +# oedb_engine = connection(section='oedb') +# # Testing Database -> destination +# reiners_engine = connection(section='reiners_db') +# +# REFLICTED_SCHEMA = "model_draft" +# VERSIONING_SCHEMA = "grid" +# +# META = MetaData() +# META.reflect(bind=reiners_engine, schema=REFLICTED_SCHEMA, only=DING0_TABLES['versioning', 'line', 'lv_branchtee', +# 'lv_generator', 'lv_load', 'lv_grid', +# 'lv_station', 'mvlv_transformer', +# 'mvlv_mapping', 'mv_branchtee', +# 'mv_circuitbreaker', 'mv_generator', +# 'mv_load', 'mv_grid', 'mv_station', +# 'hvmv_transformer']) +# # ################################################ +# +# tables = META.metadata.tables +# for tbl in tables: +# print ('##################################') +# print (tbl) +# print ( tables[tbl].select()) +# data = oedb_engine.execute(tables[tbl].select()).fetchall() +# for a in data: print(a) +# if data: +# print (tables[tbl].insert()) +# reiners_engine.execute( tables[tbl].insert(), data) + +#!/usr/bin/env python + +import sys +from sqlalchemy import create_engine, MetaData, Table +from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.declarative import declarative_base + +from egoio.tools.db import connection + + +# set your Table names +# DING0_TABLES = {'versioning': 'ego_ding0_versioning', +# 'line': 'ego_ding0_line', +# 'lv_branchtee': 'ego_ding0_lv_branchtee', +# 'lv_generator': 'ego_ding0_lv_generator', +# 'lv_load': 'ego_ding0_lv_load', +# 'lv_grid': 'ego_ding0_lv_grid', +# 'lv_station': 'ego_ding0_lv_station', +# 'mvlv_transformer': 'ego_ding0_mvlv_transformer', +# 'mvlv_mapping': 'ego_ding0_mvlv_mapping', +# 'mv_branchtee': 'ego_ding0_mv_branchtee', +# 'mv_circuitbreaker': 'ego_ding0_mv_circuitbreaker', +# 'mv_generator': 'ego_ding0_mv_generator', +# 'mv_load': 'ego_ding0_mv_load', +# 'mv_grid': 'ego_ding0_mv_grid', +# 'mv_station': 'ego_ding0_mv_station', +# 'hvmv_transformer': 'ego_ding0_hvmv_transformer'} + +DING0_TABLES = {'mv_generator': 'ego_ding0_mv_generator'} + + +def get_table_names(t): + tables = [] + for k, v in t.items(): + tables.append(v) + return tables + + +def make_session(engine): + Session = sessionmaker(bind=engine) + return Session(), engine + + +def pull_data(from_db, s_schema, to_db, d_schema, tables): + source, sengine = make_session(from_db) + smeta = MetaData(bind=sengine, schema=s_schema) + destination, dengine = make_session(to_db) + + for table_name in get_table_names(DING0_TABLES): + print('Processing', table_name) + print('Pulling schema from source server') + table = Table(table_name, smeta, autoload=True) + table.schema = d_schema + print('Creating table on destination server') + table.metadata.create_all(dengine, checkfirst=True) + new_record = quick_mapper(table) + columns = table.columns.keys() + print('Transferring records') + for record in source.query(table).all(): + data = dict( + [(str(column), getattr(record, column)) for column in columns] + ) + destination.merge(new_record(**data)) + print('Committing changes') + destination.commit() + + +def quick_mapper(table): + Base = declarative_base() + class GenericMapper(Base): + __table__ = table + return GenericMapper + + +if __name__ == '__main__': + # source + oedb_engine = connection(section='oedb') + # # Testing Database -> destination + reiners_engine = connection(section='reiners_db') + + SOURCE_SCHEMA = 'model_draft' + DESTINATION_SCHEMA = 'grid' + tables = get_table_names(DING0_TABLES) + + pull_data(oedb_engine, SOURCE_SCHEMA, oedb_engine, DESTINATION_SCHEMA, tables) diff --git a/ding0/io/metadatastrings/hvmv_transformer.json b/ding0/io/metadatastrings/hvmv_transformer.json index a61c1ecc..e44eac67 100644 --- a/ding0/io/metadatastrings/hvmv_transformer.json +++ b/ding0/io/metadatastrings/hvmv_transformer.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_hvmv_transformer", + {"name": "grid.ego_grid_ding0_hvmv_transformer", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/line.json b/ding0/io/metadatastrings/line.json index a577a50c..1f2c8449 100644 --- a/ding0/io/metadatastrings/line.json +++ b/ding0/io/metadatastrings/line.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_line", + {"name": "grid.ego_grid_ding0_line", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/lv_branchtee.json b/ding0/io/metadatastrings/lv_branchtee.json index 4d9a17eb..3b29c6ca 100644 --- a/ding0/io/metadatastrings/lv_branchtee.json +++ b/ding0/io/metadatastrings/lv_branchtee.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_lv_branchtee", + {"name": "grid.ego_grid_ding0_lv_branchtee", "format": "PostgreSQL", "fields": [ {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/lv_generator.json b/ding0/io/metadatastrings/lv_generator.json index 68ef3661..769164e2 100644 --- a/ding0/io/metadatastrings/lv_generator.json +++ b/ding0/io/metadatastrings/lv_generator.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_lv_generator", + {"name": "grid.ego_grid_ding0_lv_generator", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/lv_grid.json b/ding0/io/metadatastrings/lv_grid.json index 4ef26f8c..263d8a1d 100644 --- a/ding0/io/metadatastrings/lv_grid.json +++ b/ding0/io/metadatastrings/lv_grid.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_lv_grid", "format": "PostgreSQL", "fields": [ + {"name": "grid.ego_grid_ding0_lv_grid", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, {"name": "id_db","description": "unambiguous number of LV-Grid","unit": "integer"}, diff --git a/ding0/io/metadatastrings/lv_load.json b/ding0/io/metadatastrings/lv_load.json index 66f3f992..7276db63 100644 --- a/ding0/io/metadatastrings/lv_load.json +++ b/ding0/io/metadatastrings/lv_load.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_lv_load", + {"name": "grid.ego_grid_ding0_lv_load", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/lv_station.json b/ding0/io/metadatastrings/lv_station.json index 6f83d319..b57b2bc3 100644 --- a/ding0/io/metadatastrings/lv_station.json +++ b/ding0/io/metadatastrings/lv_station.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_lv_station", + {"name": "grid.ego_grid_ding0_lv_station", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/mv_branchtee.json b/ding0/io/metadatastrings/mv_branchtee.json index 495b0f04..7663db04 100644 --- a/ding0/io/metadatastrings/mv_branchtee.json +++ b/ding0/io/metadatastrings/mv_branchtee.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_mv_branchtee", + {"name": "grid.ego_grid_ding0_mv_branchtee", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/mv_circuitbreaker.json b/ding0/io/metadatastrings/mv_circuitbreaker.json index 2cd2e00d..81b45a77 100644 --- a/ding0/io/metadatastrings/mv_circuitbreaker.json +++ b/ding0/io/metadatastrings/mv_circuitbreaker.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_mv_circuitbreaker", + {"name": "grid.ego_grid_ding0_mv_circuitbreaker", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/mv_generator.json b/ding0/io/metadatastrings/mv_generator.json index 4aa15423..a13ce158 100644 --- a/ding0/io/metadatastrings/mv_generator.json +++ b/ding0/io/metadatastrings/mv_generator.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_mv_generator", + {"name": "grid.ego_grid_ding0_mv_generator", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/mv_grid.json b/ding0/io/metadatastrings/mv_grid.json index 55dddaf0..e7632581 100644 --- a/ding0/io/metadatastrings/mv_grid.json +++ b/ding0/io/metadatastrings/mv_grid.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_mv_grid", + {"name": "grid.ego_grid_ding0_mv_grid", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/mv_load.json b/ding0/io/metadatastrings/mv_load.json index 64856203..5b246425 100644 --- a/ding0/io/metadatastrings/mv_load.json +++ b/ding0/io/metadatastrings/mv_load.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_mv_load", + {"name": "grid.ego_grid_ding0_mv_load", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/mv_station.json b/ding0/io/metadatastrings/mv_station.json index 5799d419..a28d6a3f 100644 --- a/ding0/io/metadatastrings/mv_station.json +++ b/ding0/io/metadatastrings/mv_station.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_mv_station", + {"name": "grid.ego_grid_ding0_mv_station", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/mvlv_mapping.json b/ding0/io/metadatastrings/mvlv_mapping.json index 48ce5032..831f4bb2 100644 --- a/ding0/io/metadatastrings/mvlv_mapping.json +++ b/ding0/io/metadatastrings/mvlv_mapping.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_mvlv_mapping", + {"name": "grid.ego_grid_ding0_mvlv_mapping", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/mvlv_transformer.json b/ding0/io/metadatastrings/mvlv_transformer.json index 290982c9..ff09a4fb 100644 --- a/ding0/io/metadatastrings/mvlv_transformer.json +++ b/ding0/io/metadatastrings/mvlv_transformer.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_mvlv_transformer", + {"name": "grid.ego_grid_ding0_mvlv_transformer", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, diff --git a/ding0/io/metadatastrings/versioning.json b/ding0/io/metadatastrings/versioning.json index 51dec620..f67b5bcc 100644 --- a/ding0/io/metadatastrings/versioning.json +++ b/ding0/io/metadatastrings/versioning.json @@ -25,7 +25,7 @@ {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], "resources": [ - {"name": "model_draft.ego_grid_ding0_versioning", + {"name": "grid.ego_grid_ding0_versioning", "format": "PostgreSQL", "fields": [ {"name": "id","description": "unambiguous unique numer","unit": "integer"}, From 7c1552bb9e2a16f6fe1b2c2dc44076cfb5161133 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 19 Oct 2018 16:11:18 +0200 Subject: [PATCH 113/215] #270 added schema to has_table() --- ding0/io/db_export.py | 5 ++--- ding0/io/ego_dp_versioning.py | 40 +++++++++++++++++------------------ 2 files changed, 22 insertions(+), 23 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index bc8b5e70..b8163f4a 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -411,7 +411,6 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): if 'geom' in dataframe.columns: if geom_type == 'POINT': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) - sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('POINT', srid=int(SRID))}) @@ -647,7 +646,7 @@ def export_all_dataframes_to_db(engine, schema): The schema in which the tables are to be created """ - if engine.dialect.has_table(engine, DING0_TABLES["versioning"]): + if engine.dialect.has_table(engine, DING0_TABLES["versioning"], schema=schema): db_versioning = pd.read_sql_table(DING0_TABLES['versioning'], engine, schema, columns=['run_id', 'description']) @@ -706,7 +705,7 @@ def export_all_dataframes_to_db(engine, schema): session = sessionmaker(bind=oedb_engine)() # Testing Database - reiners_engine = connection(section='reiners_db') + # reiners_engine = connection(section='reiners_db') # Set the Database schema which you want to add the tables to SCHEMA = "model_draft" diff --git a/ding0/io/ego_dp_versioning.py b/ding0/io/ego_dp_versioning.py index 8a90131e..114f61b0 100644 --- a/ding0/io/ego_dp_versioning.py +++ b/ding0/io/ego_dp_versioning.py @@ -78,25 +78,25 @@ from egoio.tools.db import connection -# set your Table names -# DING0_TABLES = {'versioning': 'ego_ding0_versioning', -# 'line': 'ego_ding0_line', -# 'lv_branchtee': 'ego_ding0_lv_branchtee', -# 'lv_generator': 'ego_ding0_lv_generator', -# 'lv_load': 'ego_ding0_lv_load', -# 'lv_grid': 'ego_ding0_lv_grid', -# 'lv_station': 'ego_ding0_lv_station', -# 'mvlv_transformer': 'ego_ding0_mvlv_transformer', -# 'mvlv_mapping': 'ego_ding0_mvlv_mapping', -# 'mv_branchtee': 'ego_ding0_mv_branchtee', -# 'mv_circuitbreaker': 'ego_ding0_mv_circuitbreaker', -# 'mv_generator': 'ego_ding0_mv_generator', -# 'mv_load': 'ego_ding0_mv_load', -# 'mv_grid': 'ego_ding0_mv_grid', -# 'mv_station': 'ego_ding0_mv_station', -# 'hvmv_transformer': 'ego_ding0_hvmv_transformer'} - -DING0_TABLES = {'mv_generator': 'ego_ding0_mv_generator'} +# # set your Table names +# DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', +# 'line': 'ego_grid_ding0_line', +# 'lv_branchtee': 'ego_grid_ding0_lv_branchtee', +# 'lv_generator': 'ego_grid_ding0_lv_generator', +# 'lv_load': 'ego_grid_ding0_lv_load', +# 'lv_grid': 'ego_grid_ding0_lv_grid', +# 'lv_station': 'ego_grid_ding0_lv_station', +# 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer', +# 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping', +# 'mv_branchtee': 'ego_grid_ding0_mv_branchtee', +# 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker', +# 'mv_generator': 'ego_grid_ding0_mv_generator', +# 'mv_load': 'ego_grid_ding0_mv_load', +# 'mv_grid': 'ego_grid_ding0_mv_grid', +# 'mv_station': 'ego_grid_ding0_mv_station', +# 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} + +DING0_TABLES = {'mv_generator': 'ego_grid_ding0_mv_generator'} def get_table_names(t): @@ -121,7 +121,7 @@ def pull_data(from_db, s_schema, to_db, d_schema, tables): print('Pulling schema from source server') table = Table(table_name, smeta, autoload=True) table.schema = d_schema - print('Creating table on destination server') + print('Creating table on destination server or schema') table.metadata.create_all(dengine, checkfirst=True) new_record = quick_mapper(table) columns = table.columns.keys() From cef0813574864efc919cbe10783b8b6a8457cbe2 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 19 Oct 2018 16:12:14 +0200 Subject: [PATCH 114/215] #270 changed grid --- ding0/io/db_export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index b8163f4a..27b4342e 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -725,7 +725,7 @@ def export_all_dataframes_to_db(engine, schema): # RUN_ID = datetime.now().strftime("%Y%m%d%H%M%S") # choose MV Grid Districts to import - mv_grid_districts = [3040, 3045] + mv_grid_districts = [3040, 3046] # run DING0 on selected MV Grid District nw.run_ding0(session=session, From 3031d900ac40af88836b4eb1a6295a3e39aed057 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 22 Oct 2018 19:40:07 +0200 Subject: [PATCH 115/215] #270 added new script in order to insert values to the scenario_log --- ding0/io/ego_scenario_log.py | 84 ++++++++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 ding0/io/ego_scenario_log.py diff --git a/ding0/io/ego_scenario_log.py b/ding0/io/ego_scenario_log.py new file mode 100644 index 00000000..3905b429 --- /dev/null +++ b/ding0/io/ego_scenario_log.py @@ -0,0 +1,84 @@ +""" +Write entry into scenario log table +""" + +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/data_processing/blob/master/LICENSE" +__author__ = "nesnoj, Ludee" + + +from datetime import datetime +from sqlalchemy.orm import sessionmaker +from egoio.db_tables.model_draft import ScenarioLog as orm_scenario_log + + +def write_scenario_log(conn, project, version, io, schema, table, + script, entries=None, comment=None, metadata=None): + """ + Write entry into scenario log table + + Parameters + ---------- + conn: SQLAlchemy connection object + project: str + Project name + version: str + Version number + io: str + IO-type (input, output, temp) + schema: str + Database schema + table: str + Database table + script: str + Script name + entries: int + Number of entries + comment: str + Comment + metadata: str + Meta data + + Example + ------- + write_scenario_log(conn=conn, + project='eGoDP' + version='v0.3.0', + io='output', + schema='model_draft', + table='ego_demand_loadarea_peak_load', + script='peak_load_per_load_area.py', + entries=1000) + """ + + Session = sessionmaker(bind=conn) + session = Session() + + # extract user from connection details + # is there a better way? + try: + conn_details = conn.connection.connection.dsn + for entry in conn_details.split(' '): + if entry.split('=')[0] == 'user': + user = entry.split('=')[1] + break + except: + user = 'unknown' + + # Add data to orm object + log_entry = orm_scenario_log(project=project, + version=version, + io=io, + schema_name=schema, + table_name=table, + script_name=script, + entries=entries, + comment=comment, + user_name=user, + timestamp=datetime.now(), + meta_data=metadata) + + # Commit to DB + session.add(log_entry) + session.commit() From ea1355679344e071b980ad667c4325dbafc321b1 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 22 Oct 2018 19:46:10 +0200 Subject: [PATCH 116/215] #270 to ego_dp_versioning.py added func. for migrate_tables_to_destination which creates the tables from modele draft in schema grid and maps the records to the created tables also creates entry in the schenario_log for every versioned table, added func. change owner, added variable RUN_ID in order to insert in sceanrio_log, added METADATA_STRING_FOLDER in order to add the metadata.json to the scenario_log --- ding0/io/ego_dp_versioning.py | 177 +++++++++++++++++++++------------- 1 file changed, 108 insertions(+), 69 deletions(-) diff --git a/ding0/io/ego_dp_versioning.py b/ding0/io/ego_dp_versioning.py index 114f61b0..af520edc 100644 --- a/ding0/io/ego_dp_versioning.py +++ b/ding0/io/ego_dp_versioning.py @@ -11,72 +11,20 @@ # __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" # __url__ = "https://github.com/openego/ding0/blob/master/LICENSE" # __author__ = "jh-RLI" -# -# -# import json -# import os -# -# from egoio.tools.db import connection -# -# from sqlalchemy import MetaData -# -# # set your Table names -# DING0_TABLES = {'versioning': 'ego_ding0_versioning', -# 'line': 'ego_ding0_line', -# 'lv_branchtee': 'ego_ding0_lv_branchtee', -# 'lv_generator': 'ego_ding0_lv_generator', -# 'lv_load': 'ego_ding0_lv_load', -# 'lv_grid': 'ego_ding0_lv_grid', -# 'lv_station': 'ego_ding0_lv_station', -# 'mvlv_transformer': 'ego_ding0_mvlv_transformer', -# 'mvlv_mapping': 'ego_ding0_mvlv_mapping', -# 'mv_branchtee': 'ego_ding0_mv_branchtee', -# 'mv_circuitbreaker': 'ego_ding0_mv_circuitbreaker', -# 'mv_generator': 'ego_ding0_mv_generator', -# 'mv_load': 'ego_ding0_mv_load', -# 'mv_grid': 'ego_ding0_mv_grid', -# 'mv_station': 'ego_ding0_mv_station', -# 'hvmv_transformer': 'ego_ding0_hvmv_transformer'} -# -# # #########SQLAlchemy and DB table################ -# #source -# oedb_engine = connection(section='oedb') -# # Testing Database -> destination -# reiners_engine = connection(section='reiners_db') -# -# REFLICTED_SCHEMA = "model_draft" -# VERSIONING_SCHEMA = "grid" -# -# META = MetaData() -# META.reflect(bind=reiners_engine, schema=REFLICTED_SCHEMA, only=DING0_TABLES['versioning', 'line', 'lv_branchtee', -# 'lv_generator', 'lv_load', 'lv_grid', -# 'lv_station', 'mvlv_transformer', -# 'mvlv_mapping', 'mv_branchtee', -# 'mv_circuitbreaker', 'mv_generator', -# 'mv_load', 'mv_grid', 'mv_station', -# 'hvmv_transformer']) -# # ################################################ -# -# tables = META.metadata.tables -# for tbl in tables: -# print ('##################################') -# print (tbl) -# print ( tables[tbl].select()) -# data = oedb_engine.execute(tables[tbl].select()).fetchall() -# for a in data: print(a) -# if data: -# print (tables[tbl].insert()) -# reiners_engine.execute( tables[tbl].insert(), data) - -#!/usr/bin/env python - -import sys -from sqlalchemy import create_engine, MetaData, Table + +import os + +from sqlalchemy import create_engine, MetaData, Table, exc from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.automap import automap_base from sqlalchemy.ext.declarative import declarative_base +from geoalchemy2.types import Geometry, WKTElement from egoio.tools.db import connection +import ding0 +from ding0.io.db_export import prepare_metadatastring_fordb +from ding0.io.ego_scenario_log import write_scenario_log # # set your Table names # DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', @@ -96,8 +44,27 @@ # 'mv_station': 'ego_grid_ding0_mv_station', # 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} -DING0_TABLES = {'mv_generator': 'ego_grid_ding0_mv_generator'} - +# set your Table names +DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning_test', + 'line': 'ego_grid_ding0_line_test', + 'lv_branchtee': 'ego_grid_ding0_lv_branchtee_test', + 'lv_generator': 'ego_grid_ding0_lv_generator_test', + 'lv_load': 'ego_grid_ding0_lv_load_test', + 'lv_grid': 'ego_grid_ding0_lv_grid_test', + 'lv_station': 'ego_grid_ding0_lv_station_test', + 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer_test', + 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping_test', + 'mv_branchtee': 'ego_grid_ding0_mv_branchtee_test', + 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker_test', + 'mv_generator': 'ego_grid_ding0_mv_generator_test', + 'mv_load': 'ego_grid_ding0_mv_load_test', + 'mv_grid': 'ego_grid_ding0_mv_grid_test', + 'mv_station': 'ego_grid_ding0_mv_station_test', + 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer_test'} + +# DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning_test', +# 'line': 'ego_grid_ding0_line_test', +# 'lv_branchtee': 'ego_grid_ding0_lv_branchtee_test',} def get_table_names(t): tables = [] @@ -111,7 +78,21 @@ def make_session(engine): return Session(), engine -def pull_data(from_db, s_schema, to_db, d_schema, tables): +def migrate_tables_to_destination(from_db, s_schema, to_db, d_schema, runid=None): + """ + Note: This function will throw a exception caused by the already existing index. + Functionality is still given. + + Copys the table from source to the destination database.schema + + Parameters + ---------- + from_db: + s_schema: + to_db: + d_schema: + runid: + """ source, sengine = make_session(from_db) smeta = MetaData(bind=sengine, schema=s_schema) destination, dengine = make_session(to_db) @@ -120,9 +101,13 @@ def pull_data(from_db, s_schema, to_db, d_schema, tables): print('Processing', table_name) print('Pulling schema from source server') table = Table(table_name, smeta, autoload=True) - table.schema = d_schema print('Creating table on destination server or schema') - table.metadata.create_all(dengine, checkfirst=True) + try: + table.schema = d_schema + table.metadata.create_all(dengine, checkfirst=True) + except exc.ProgrammingError: + print("WARNING: The Index already exists, warning can be ignored.") + table.schema = s_schema new_record = quick_mapper(table) columns = table.columns.keys() print('Transferring records') @@ -130,18 +115,65 @@ def pull_data(from_db, s_schema, to_db, d_schema, tables): data = dict( [(str(column), getattr(record, column)) for column in columns] ) + table.schema = d_schema destination.merge(new_record(**data)) - print('Committing changes') - destination.commit() + + print('Committing changes') + # destination.commit() + + rows = destination.query(table_name).count() + json_tbl_name = [] + for k,v in DING0_TABLES.items(): + if v == table_name: + json_tbl_name.append(k) + metadata_string_json = prepare_metadatastring_fordb(json_tbl_name[0]) + write_scenario_log(oedb_engine, 'open_eGo', runid, 'output', s_schema, table_name, 'db_export.py', + entries=rows, comment='versioning', metadata=metadata_string_json) def quick_mapper(table): Base = declarative_base() + class GenericMapper(Base): __table__ = table return GenericMapper +def db_tables_change_owner(engine, schema): + DECLARATIVE_BASE = declarative_base() + METADATA = DECLARATIVE_BASE.metadata + + tables = METADATA.sorted_tables + + def change_owner(engine, table, role, schema): + """ + Gives access to database users/ groups + + Parameters + ---------- + engine: sqlalchemy session object + A valid connection to a database + schema: The schema in which the tables are to be created + table : sqlalchmy Table class definition + The database table + role : str + database role that access is granted to + """ + tablename = table + + grant_str = """ALTER TABLE {schema}.{table} + OWNER TO {role};""".format(schema=schema, table=tablename.name, + role=role) + + # engine.execute(grant_str) + engine.execution_options(autocommit=True).execute(grant_str) + + # engine.echo=True + + for tab in tables: + change_owner(engine, tab, 'oeuser', schema) + + if __name__ == '__main__': # source oedb_engine = connection(section='oedb') @@ -152,4 +184,11 @@ class GenericMapper(Base): DESTINATION_SCHEMA = 'grid' tables = get_table_names(DING0_TABLES) - pull_data(oedb_engine, SOURCE_SCHEMA, oedb_engine, DESTINATION_SCHEMA, tables) + # Enter the current run_id, Inserted in scenario_log + RUN_ID = '20181022161343' + + # Metadata folder Path + METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') + + migrate_tables_to_destination(oedb_engine, SOURCE_SCHEMA, oedb_engine, DESTINATION_SCHEMA, RUN_ID) + # db_tables_change_owner(oedb_engine, DESTINATION_SCHEMA) From acc666c82c1e251c1d303f26db979cb740827f27 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 24 Oct 2018 11:58:33 +0200 Subject: [PATCH 117/215] #270 added further description, cleaned script from testing code --- ding0/io/ego_dp_versioning.py | 75 ++++++++++++++++------------------- 1 file changed, 34 insertions(+), 41 deletions(-) diff --git a/ding0/io/ego_dp_versioning.py b/ding0/io/ego_dp_versioning.py index af520edc..c838d84a 100644 --- a/ding0/io/ego_dp_versioning.py +++ b/ding0/io/ego_dp_versioning.py @@ -26,45 +26,24 @@ from ding0.io.db_export import prepare_metadatastring_fordb from ding0.io.ego_scenario_log import write_scenario_log -# # set your Table names -# DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', -# 'line': 'ego_grid_ding0_line', -# 'lv_branchtee': 'ego_grid_ding0_lv_branchtee', -# 'lv_generator': 'ego_grid_ding0_lv_generator', -# 'lv_load': 'ego_grid_ding0_lv_load', -# 'lv_grid': 'ego_grid_ding0_lv_grid', -# 'lv_station': 'ego_grid_ding0_lv_station', -# 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer', -# 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping', -# 'mv_branchtee': 'ego_grid_ding0_mv_branchtee', -# 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker', -# 'mv_generator': 'ego_grid_ding0_mv_generator', -# 'mv_load': 'ego_grid_ding0_mv_load', -# 'mv_grid': 'ego_grid_ding0_mv_grid', -# 'mv_station': 'ego_grid_ding0_mv_station', -# 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} - # set your Table names -DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning_test', - 'line': 'ego_grid_ding0_line_test', - 'lv_branchtee': 'ego_grid_ding0_lv_branchtee_test', - 'lv_generator': 'ego_grid_ding0_lv_generator_test', - 'lv_load': 'ego_grid_ding0_lv_load_test', - 'lv_grid': 'ego_grid_ding0_lv_grid_test', - 'lv_station': 'ego_grid_ding0_lv_station_test', - 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer_test', - 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping_test', - 'mv_branchtee': 'ego_grid_ding0_mv_branchtee_test', - 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker_test', - 'mv_generator': 'ego_grid_ding0_mv_generator_test', - 'mv_load': 'ego_grid_ding0_mv_load_test', - 'mv_grid': 'ego_grid_ding0_mv_grid_test', - 'mv_station': 'ego_grid_ding0_mv_station_test', - 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer_test'} - -# DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning_test', -# 'line': 'ego_grid_ding0_line_test', -# 'lv_branchtee': 'ego_grid_ding0_lv_branchtee_test',} +DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', + 'line': 'ego_grid_ding0_line', + 'lv_branchtee': 'ego_grid_ding0_lv_branchtee', + 'lv_generator': 'ego_grid_ding0_lv_generator', + 'lv_load': 'ego_grid_ding0_lv_load', + 'lv_grid': 'ego_grid_ding0_lv_grid', + 'lv_station': 'ego_grid_ding0_lv_station', + 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer', + 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping', + 'mv_branchtee': 'ego_grid_ding0_mv_branchtee', + 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker', + 'mv_generator': 'ego_grid_ding0_mv_generator', + 'mv_load': 'ego_grid_ding0_mv_load', + 'mv_grid': 'ego_grid_ding0_mv_grid', + 'mv_station': 'ego_grid_ding0_mv_station', + 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} + def get_table_names(t): tables = [] @@ -85,6 +64,20 @@ def migrate_tables_to_destination(from_db, s_schema, to_db, d_schema, runid=None Copys the table from source to the destination database.schema + Step-by-Step: + 1. Set up the connection using the egoio.tools.db -> connection() function + 2. SET the SOURCE_SCHEMA and DESTINATION_SCHEMA + 3. Insert your table (key: names) to dict like DING0_TABLES + 4. Call the function get_table_names() with your Table dictionary as parameter save the result in + variable "tables = get_table_names(dict)" + 5. For ding0 data set the RUN_ID + 6. Save the dynamic path to the metadata_string.json in METADATA_STRING_FOLDER´ + Note: Metadata_string file names need to contain the the table name See: + https://github.com/openego/ding0/tree/features/stats-export/ding0/io/metadatastrings + 7. Call the function with parameters like: + migrate_tables_to_destination(oedb_engine, SOURCE_SCHEMA, oedb_engine, DESTINATION_SCHEMA, RUN_ID) + 8. In function migrate_tables_to_destination() check the function write_scenario_log() + 9. Check if the tables in your source schema exist and named eually to the table dict like in DING0_TABLES{} Parameters ---------- from_db: @@ -119,9 +112,9 @@ def migrate_tables_to_destination(from_db, s_schema, to_db, d_schema, runid=None destination.merge(new_record(**data)) print('Committing changes') - # destination.commit() + destination.commit() - rows = destination.query(table_name).count() + rows = destination.query(table.c.run_id).count() json_tbl_name = [] for k,v in DING0_TABLES.items(): if v == table_name: @@ -185,7 +178,7 @@ def change_owner(engine, table, role, schema): tables = get_table_names(DING0_TABLES) # Enter the current run_id, Inserted in scenario_log - RUN_ID = '20181022161343' + RUN_ID = '20181022185643' # Metadata folder Path METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') From fb0bf58edb1c849fa2798f1b401fa8350e170e11 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 6 Feb 2019 17:59:14 +0100 Subject: [PATCH 118/215] added conda env .yml, added description how to setup the conda env --- ding0/io/env.txt | 6 ++++++ ding0/io/skeleton.yml | 35 +++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+) create mode 100644 ding0/io/env.txt create mode 100644 ding0/io/skeleton.yml diff --git a/ding0/io/env.txt b/ding0/io/env.txt new file mode 100644 index 00000000..1793dbc5 --- /dev/null +++ b/ding0/io/env.txt @@ -0,0 +1,6 @@ +This is the conda env that was used to develope the export funktionality +- open it and change the "name" to whatever you would like to call your environment +- use the command "conda env create -f skeleton.yml" +- run the command "pip install -U -e ego.io\" +- run the command "pip install -U -e ding0\" +- run the command "pip install -U -e eDisGo\" \ No newline at end of file diff --git a/ding0/io/skeleton.yml b/ding0/io/skeleton.yml new file mode 100644 index 00000000..cc2cadc1 --- /dev/null +++ b/ding0/io/skeleton.yml @@ -0,0 +1,35 @@ +name: skeleton +channels: + - conda-forge + - defaults +dependencies: + - geoalchemy2=0.4.1 + - geopy=1.11.0 + - jupyter + - keyring + - matplotlib=1.5.3 + - multiprocess + - nb_conda + - networkx=1.11 + - numpy=1.11.3 + - openpyxl + - pandas=0.20.3 + - psycopg2 + - pyomo=5.5.0 + - pyproj=1.9.5.1 + - python=3.6.5 + - seaborn + - shapely=1.6.3 + - sphinx + - sphinx_rtd_theme + - sqlite + - xlrd + - sqlalchemy=1.2.0 + - unittest2 + - pip: + - demandlib + - keyrings.alt + - oedialect + - pypsa==0.11.0 + - workalendar + From 628f4b50b036c3a65617c2bd7634ba0d97d86805 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 7 Feb 2019 18:31:18 +0100 Subject: [PATCH 119/215] fixex typo, minor changes caused by inputs added for testing, added file to upload ding0 data from pickel files --- ding0/io/db_export.py | 35 +++++++++++++++------ ding0/io/ding0_pkl2db.py | 57 +++++++++++++++++++++++++++++++++++ ding0/io/ego_dp_versioning.py | 4 +-- 3 files changed, 83 insertions(+), 13 deletions(-) create mode 100644 ding0/io/ding0_pkl2db.py diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 27b4342e..db9ed2b7 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -61,6 +61,25 @@ 'mv_station': 'ego_grid_ding0_mv_station', 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} +# # set your Table names +# DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning_test', +# 'line': 'ego_grid_ding0_line_test', +# 'lv_branchtee': 'ego_grid_ding0_lv_branchtee_test', +# 'lv_generator': 'ego_grid_ding0_lv_generator_test', +# 'lv_load': 'ego_grid_ding0_lv_load_test', +# 'lv_grid': 'ego_grid_ding0_lv_grid_test', +# 'lv_station': 'ego_grid_ding0_lv_station_test', +# 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer_test', +# 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping_test', +# 'mv_branchtee': 'ego_grid_ding0_mv_branchtee_test', +# 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker_test', +# 'mv_generator': 'ego_grid_ding0_mv_generator_test', +# 'mv_load': 'ego_grid_ding0_mv_load_test', +# 'mv_grid': 'ego_grid_ding0_mv_grid_test', +# 'mv_station': 'ego_grid_ding0_mv_station_test', +# 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer_test'} + + def load_json_files(): """ @@ -374,8 +393,6 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): correctly load its data to its appropriate sql table. Also handles the upload to a DB data frames with different geometry types. - .. ToDo: need to check for id_db instead of only 'id' in index label names - NOTE: This function does not check if the data frame columns matches the db_table fields, if they do not then no warning is given. @@ -529,10 +546,10 @@ def export_df_to_db(engine, schema, df, tabletype): # ToDo: function works but throws unexpected error (versioning tbl dosent exists) -def drop_ding0_db_tables(engine, schema): +def drop_ding0_db_tables(engine): """ Instructions: In order to drop tables all tables need to be stored in METADATA (create tables before dropping them) - + Drops the tables in the schema where they have been created. Parameters ---------- engine: sqlalchemy.engine.base.Engine` @@ -695,7 +712,7 @@ def export_all_dataframes_to_db(engine, schema): raise KeyError("a run_id already present! No tables are input!") else: - print("There is no " + DING0_TABLES["versioning"] + " table in the schema: " + schema) + print("WARNING: There is no " + DING0_TABLES["versioning"] + " table in the schema: " + schema) if __name__ == "__main__": @@ -709,6 +726,7 @@ def export_all_dataframes_to_db(engine, schema): # Set the Database schema which you want to add the tables to SCHEMA = "model_draft" + # SCHEMA = "public" # #########Ding0 Network and NW Metadata################ @@ -725,7 +743,7 @@ def export_all_dataframes_to_db(engine, schema): # RUN_ID = datetime.now().strftime("%Y%m%d%H%M%S") # choose MV Grid Districts to import - mv_grid_districts = [3040, 3046] + mv_grid_districts = [1, 2, 3, 4, 5] # run DING0 on selected MV Grid District nw.run_ding0(session=session, @@ -744,11 +762,8 @@ def export_all_dataframes_to_db(engine, schema): ##################################################### create_ding0_sql_tables(oedb_engine, SCHEMA) - # drop_ding0_db_tables(oedb_engine, SCHEMA) + # drop_ding0_db_tables(oedb_engine) # db_tables_change_owner(oedb_engine, SCHEMA) - # parameter: export_network_to_db(engine, schema, df, tabletype, srid=None) - # export_network_to_db(reiners_engine, SCHEMA, lv_gen, "lv_gen", metadata_json) - # export_network_to_db(CONNECTION, SCHEMA, mv_stations, "mv_stations", metadata_json) export_all_dataframes_to_db(oedb_engine, SCHEMA) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py new file mode 100644 index 00000000..0b320fc3 --- /dev/null +++ b/ding0/io/ding0_pkl2db.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +import os + +import numpy as np +import json + +from egoio.tools import db + +# import required modules of DING0 +from ding0.tools.logger import setup_logger +from ding0.tools.results import load_nd_from_pickle +from ding0.io.export import export_network +from ding0.io.db_export import METADATA, create_ding0_sql_tables, \ + export_all_dataframes_to_db, db_tables_change_owner, drop_ding0_db_tables +from sqlalchemy.orm import sessionmaker + + +# define logger +logger = setup_logger() + +# database connection/ session +oedb_engine = db.connection(section='oedb') +session = sessionmaker(bind=oedb_engine)() + +SCHEMA = "model_draft" + +create_ding0_sql_tables(oedb_engine, SCHEMA) +db_tables_change_owner(oedb_engine, SCHEMA) +# drop_ding0_db_tables(oedb_engine) + +# pickle file locations path to RLI_Daten_Flex01 mount +pkl_filepath = "/home/local/RL-INSTITUT/bharadwaj.narasimhan/rli_daten_flex/Ding0/20180823154014/" + + +# choose MV Grid Districts to import +grids = np.arange(1, 5) + +# generate all the grids and push them to oedb +for grid_no in grids: + + nw = load_nd_from_pickle(os.path.join(pkl_filepath, + 'ding0_grids__{}.pkl'.format(grid_no))) + + # Extract data from network and put it to DataFrames for csv and for oedb + run_id, nw_metadata, \ + lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ + mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ + lines, mvlv_mapping = export_network(nw) + + # Send data to OEDB + srid = str(int(nw.config['geo']['srid'])) + metadata_json = json.loads(nw_metadata) + + export_all_dataframes_to_db(oedb_engine, SCHEMA) + +#db_tables_change_owner(oedb_engine, SCHEMA) \ No newline at end of file diff --git a/ding0/io/ego_dp_versioning.py b/ding0/io/ego_dp_versioning.py index c838d84a..d2e006fc 100644 --- a/ding0/io/ego_dp_versioning.py +++ b/ding0/io/ego_dp_versioning.py @@ -16,9 +16,7 @@ from sqlalchemy import create_engine, MetaData, Table, exc from sqlalchemy.orm import sessionmaker -from sqlalchemy.ext.automap import automap_base from sqlalchemy.ext.declarative import declarative_base -from geoalchemy2.types import Geometry, WKTElement from egoio.tools.db import connection @@ -99,7 +97,7 @@ def migrate_tables_to_destination(from_db, s_schema, to_db, d_schema, runid=None table.schema = d_schema table.metadata.create_all(dengine, checkfirst=True) except exc.ProgrammingError: - print("WARNING: The Index already exists, warning can be ignored.") + print("WARNING: The Index on the table already exists, warning can be ignored.") table.schema = s_schema new_record = quick_mapper(table) columns = table.columns.keys() From 3d5edd4db46c4e307246c3d92cbb052b1d49b55a Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 14 Feb 2019 14:45:15 +0100 Subject: [PATCH 120/215] extending funktionality in order to integrate df export form pickel files, added paramters to func export_all_dataframes_to_db it now gets a list with the dataframes created for the network --- .../examples/example_single_grid_district.py | 2 +- ding0/io/db_export.py | 86 ++++++++++--------- ding0/io/ding0_pkl2db.py | 14 +-- ding0/io/skeleton.yml | 2 +- 4 files changed, 56 insertions(+), 48 deletions(-) diff --git a/ding0/examples/example_single_grid_district.py b/ding0/examples/example_single_grid_district.py index f3907100..ca94f672 100644 --- a/ding0/examples/example_single_grid_district.py +++ b/ding0/examples/example_single_grid_district.py @@ -33,7 +33,7 @@ # ===== MAIN ===== # database connection/ session -engine = db.connection(section='oedb') +engine = db.connection(section='oedb_vpn') session = sessionmaker(bind=engine)() # instantiate new ding0 network object diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index db9ed2b7..69ff5ca7 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -627,7 +627,7 @@ def change_owner(engine, table, role, schema): change_owner(engine, tab, 'oeuser', schema) -def export_all_dataframes_to_db(engine, schema): +def export_all_dataframes_to_db(engine, schema, md=None, all_df_list=None): """ exports all data frames from func. export_network() to the db tables @@ -672,41 +672,47 @@ def export_all_dataframes_to_db(engine, schema): # Use if just one run_id should be present to the DB table if db_versioning.empty: # this leads to wrong run_id if run_id is SET in __main__ -> 'run_id': metadata_json['run_id'] - metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], - 'description': str(metadata_json)}, index=[0]) - - df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) - - # 1 - export_df_to_db(engine, schema, lines, "line") - # 2 - export_df_to_db(engine, schema, lv_cd, "lv_cd") - # 3 - export_df_to_db(engine, schema, lv_gen, "lv_gen") - # 4 - export_df_to_db(engine, schema, lv_stations, "lv_station") - # 5 - export_df_to_db(engine, schema, lv_loads, "lv_load") - # 6 - export_df_to_db(engine, schema, lv_grid, "lv_grid") - # 7 - export_df_to_db(engine, schema, mv_cb, "mv_cb") - # 8 - export_df_to_db(engine, schema, mv_cd, "mv_cd") - # 9 - export_df_to_db(engine, schema, mv_gen, "mv_gen") - # 10 - export_df_to_db(engine, schema, mv_stations, "mv_station") - # 11 - export_df_to_db(engine, schema, mv_loads, "mv_load") - # 12 - export_df_to_db(engine, schema, mv_grid, "mv_grid") - # 13 - export_df_to_db(engine, schema, mvlv_trafos, "mvlv_trafo") - # 14 - export_df_to_db(engine, schema, hvmv_trafos, "hvmv_trafo") - # 15 - export_df_to_db(engine, schema, mvlv_mapping, "mvlv_mapping") + try: + metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], + 'description': str(metadata_json)}, index=[0]) + df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) + except: + print(md['run_id']) + metadata_df = pd.DataFrame({'run_id': md['run_id'], + 'description': str(md)}, index=[0]) + df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) + + for df in all_df_list: + # 1 + export_df_to_db(engine, schema, lines, "line") + # 2 + export_df_to_db(engine, schema, lv_cd, "lv_cd") + # 3 + export_df_to_db(engine, schema, lv_gen, "lv_gen") + # 4 + export_df_to_db(engine, schema, lv_stations, "lv_station") + # 5 + export_df_to_db(engine, schema, lv_loads, "lv_load") + # 6 + export_df_to_db(engine, schema, lv_grid, "lv_grid") + # 7 + export_df_to_db(engine, schema, mv_cb, "mv_cb") + # 8 + export_df_to_db(engine, schema, mv_cd, "mv_cd") + # 9 + export_df_to_db(engine, schema, mv_gen, "mv_gen") + # 10 + export_df_to_db(engine, schema, mv_stations, "mv_station") + # 11 + export_df_to_db(engine, schema, mv_loads, "mv_load") + # 12 + export_df_to_db(engine, schema, mv_grid, "mv_grid") + # 13 + export_df_to_db(engine, schema, mvlv_trafos, "mvlv_trafo") + # 14 + export_df_to_db(engine, schema, hvmv_trafos, "hvmv_trafo") + # 15 + export_df_to_db(engine, schema, mvlv_mapping, "mvlv_mapping") else: raise KeyError("a run_id already present! No tables are input!") @@ -743,7 +749,8 @@ def export_all_dataframes_to_db(engine, schema): # RUN_ID = datetime.now().strftime("%Y%m%d%H%M%S") # choose MV Grid Districts to import - mv_grid_districts = [1, 2, 3, 4, 5] + # needs to be a list with Integers + mv_grid_districts = list(range(1, 1001)) # run DING0 on selected MV Grid District nw.run_ding0(session=session, @@ -762,8 +769,5 @@ def export_all_dataframes_to_db(engine, schema): ##################################################### create_ding0_sql_tables(oedb_engine, SCHEMA) - # drop_ding0_db_tables(oedb_engine) + drop_ding0_db_tables(oedb_engine) # db_tables_change_owner(oedb_engine, SCHEMA) - - export_all_dataframes_to_db(oedb_engine, SCHEMA) - diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 0b320fc3..f8287670 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -30,17 +30,17 @@ # drop_ding0_db_tables(oedb_engine) # pickle file locations path to RLI_Daten_Flex01 mount -pkl_filepath = "/home/local/RL-INSTITUT/bharadwaj.narasimhan/rli_daten_flex/Ding0/20180823154014/" +pkl_filepath = "/home/local/RL-INSTITUT/jonas.huber/rli/Daten_flexibel_01/Ding0/20180823154014" # choose MV Grid Districts to import -grids = np.arange(1, 5) +grids = list(range(1, 6)) # generate all the grids and push them to oedb for grid_no in grids: nw = load_nd_from_pickle(os.path.join(pkl_filepath, - 'ding0_grids__{}.pkl'.format(grid_no))) + 'ding0_grids__{}.pkl'.format(grid_no))) # Extract data from network and put it to DataFrames for csv and for oedb run_id, nw_metadata, \ @@ -48,10 +48,14 @@ mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ lines, mvlv_mapping = export_network(nw) + df_list = [lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, + mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, + lines, mvlv_mapping] + # Send data to OEDB srid = str(int(nw.config['geo']['srid'])) metadata_json = json.loads(nw_metadata) - export_all_dataframes_to_db(oedb_engine, SCHEMA) + export_all_dataframes_to_db(oedb_engine, SCHEMA, nw_metadata, df_list) -#db_tables_change_owner(oedb_engine, SCHEMA) \ No newline at end of file +# db_tables_change_owner(oedb_engine, SCHEMA) \ No newline at end of file diff --git a/ding0/io/skeleton.yml b/ding0/io/skeleton.yml index cc2cadc1..59ce7bde 100644 --- a/ding0/io/skeleton.yml +++ b/ding0/io/skeleton.yml @@ -1,4 +1,4 @@ -name: skeleton +name: exporter channels: - conda-forge - defaults From f92263cdf15993d97dbecb63f5e52e2dc3e051aa Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 14 Feb 2019 17:19:34 +0100 Subject: [PATCH 121/215] added nametupel to export_network func now the pandas dataframes can be used/passed on by the variable network and a df can be accessed like this network.df --- ding0/io/db_export.py | 59 ++++++++++++++++++++++------------------ ding0/io/ding0_pkl2db.py | 21 ++++++++------ ding0/io/export.py | 17 +++++++++--- 3 files changed, 57 insertions(+), 40 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 69ff5ca7..1c1e731c 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -627,7 +627,7 @@ def change_owner(engine, table, role, schema): change_owner(engine, tab, 'oeuser', schema) -def export_all_dataframes_to_db(engine, schema, md=None, all_df_list=None): +def export_all_dataframes_to_db(engine, schema, network): """ exports all data frames from func. export_network() to the db tables @@ -677,42 +677,42 @@ def export_all_dataframes_to_db(engine, schema, md=None, all_df_list=None): 'description': str(metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) except: - print(md['run_id']) - metadata_df = pd.DataFrame({'run_id': md['run_id'], - 'description': str(md)}, index=[0]) + print(network.metadata['run_id']) + metadata_df = pd.DataFrame({'run_id': network.metadata['run_id'], + 'description': str(network.metadata)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) - for df in all_df_list: + # 1 - export_df_to_db(engine, schema, lines, "line") + export_df_to_db(engine, schema, network.lines, "line") # 2 - export_df_to_db(engine, schema, lv_cd, "lv_cd") + export_df_to_db(engine, schema, network.lv_cd, "lv_cd") # 3 - export_df_to_db(engine, schema, lv_gen, "lv_gen") + export_df_to_db(engine, schema, network.lv_gen, "lv_gen") # 4 - export_df_to_db(engine, schema, lv_stations, "lv_station") + export_df_to_db(engine, schema, network.lv_stations, "lv_station") # 5 - export_df_to_db(engine, schema, lv_loads, "lv_load") + export_df_to_db(engine, schema, network.lv_loads, "lv_load") # 6 - export_df_to_db(engine, schema, lv_grid, "lv_grid") + export_df_to_db(engine, schema, network.lv_grid, "lv_grid") # 7 - export_df_to_db(engine, schema, mv_cb, "mv_cb") + export_df_to_db(engine, schema, network.mv_cb, "mv_cb") # 8 - export_df_to_db(engine, schema, mv_cd, "mv_cd") + export_df_to_db(engine, schema, network.mv_cd, "mv_cd") # 9 - export_df_to_db(engine, schema, mv_gen, "mv_gen") + export_df_to_db(engine, schema, network.mv_gen, "mv_gen") # 10 - export_df_to_db(engine, schema, mv_stations, "mv_station") + export_df_to_db(engine, schema, network.mv_stations, "mv_station") # 11 - export_df_to_db(engine, schema, mv_loads, "mv_load") + export_df_to_db(engine, schema, network.mv_loads, "mv_load") # 12 - export_df_to_db(engine, schema, mv_grid, "mv_grid") + export_df_to_db(engine, schema, network.mv_grid, "mv_grid") # 13 - export_df_to_db(engine, schema, mvlv_trafos, "mvlv_trafo") + export_df_to_db(engine, schema, network.mvlv_trafos, "mvlv_trafo") # 14 - export_df_to_db(engine, schema, hvmv_trafos, "hvmv_trafo") + export_df_to_db(engine, schema, network.hvmv_trafos, "hvmv_trafo") # 15 - export_df_to_db(engine, schema, mvlv_mapping, "mvlv_mapping") + export_df_to_db(engine, schema, network.mvlv_mapping, "mvlv_mapping") else: raise KeyError("a run_id already present! No tables are input!") @@ -750,24 +750,29 @@ def export_all_dataframes_to_db(engine, schema, md=None, all_df_list=None): # choose MV Grid Districts to import # needs to be a list with Integers - mv_grid_districts = list(range(1, 1001)) + mv_grid_districts = list(range(1, 5)) + # run DING0 on selected MV Grid District nw.run_ding0(session=session, mv_grid_districts_no=mv_grid_districts) # return values from export_network() as tupels - run_id, nw_metadata, \ - lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ - mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ - lines, mvlv_mapping = export_network(nw) + network = export_network(nw) + + # df_list = [lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, + # mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, + # lines, mvlv_mapping] # any list of NetworkDing0 also provides run_id # nw_metadata = json.dumps(nw_metadata) - metadata_json = json.loads(nw_metadata) + metadata_json = json.loads(network.nw_metadata) ##################################################### - + # Creates all defined tables create_ding0_sql_tables(oedb_engine, SCHEMA) drop_ding0_db_tables(oedb_engine) # db_tables_change_owner(oedb_engine, SCHEMA) + + # Export all Dataframes returned form export_network(nw) to DB + export_all_dataframes_to_db(oedb_engine, SCHEMA) \ No newline at end of file diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index f8287670..cd19a724 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -43,19 +43,22 @@ 'ding0_grids__{}.pkl'.format(grid_no))) # Extract data from network and put it to DataFrames for csv and for oedb - run_id, nw_metadata, \ - lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ - mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ - lines, mvlv_mapping = export_network(nw) + network = export_network(nw) + + # ToDo:How TO pass the dataframes to export_all_df func???????????????? + # df_dict = {"lv_grid":lv_grid, "lv_gen":lv_gen, "":lv_cd, "":lv_stations, "":mvlv_trafos, "":lv_loads, + # "": mv_grid, mv_gen, "": mv_cb, "": mv_cd, "":mv_stations, "":hvmv_trafos, "":mv_loads, + # "":lines, "":mvlv_mapping} + # + # df_dict = [lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, + # mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, + # lines, mvlv_mapping] - df_list = [lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, - mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, - lines, mvlv_mapping] # Send data to OEDB srid = str(int(nw.config['geo']['srid'])) - metadata_json = json.loads(nw_metadata) + metadata_json = json.loads(network.nw_metadata) - export_all_dataframes_to_db(oedb_engine, SCHEMA, nw_metadata, df_list) + export_all_dataframes_to_db(oedb_engine, SCHEMA, network) # db_tables_change_owner(oedb_engine, SCHEMA) \ No newline at end of file diff --git a/ding0/io/export.py b/ding0/io/export.py index e716e766..7372c3fc 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -14,6 +14,7 @@ import numpy as np import pandas as pd +from collections import namedtuple import json @@ -27,6 +28,14 @@ from shapely.geometry import Point, MultiPoint, MultiLineString, LineString, MultiPolygon, shape, mapping +Network = namedtuple( + 'Network', + [ + 'run_id', 'metadata_json', 'lv_grid', 'lv_gen', 'lv_cd', 'lv_stations', 'mvlv_trafos', 'lv_loads', + 'mv_grid', 'mv_gen', 'mv_cb', 'mv_cd', 'mv_stations', 'hvmv_trafos', 'mv_loads', 'lines', 'mvlv_mapping' + ] +) + def export_network(nw, mode='', run_id=None): """ @@ -599,7 +608,7 @@ def aggregate_loads(la_center, aggr): lines = lines[sorted(lines.columns.tolist())] - return run_id, metadata_json, \ - lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \ - mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \ - lines, mvlv_mapping + return Network( + run_id, metadata_json, lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, mv_grid, mv_gen, mv_cb, + mv_cd, mv_stations, hvmv_trafos, mv_loads, lines, mvlv_mapping + ) From 40de1d0cbacaae30c68afc1b284c7273d74c9e8c Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 14 Feb 2019 17:44:34 +0100 Subject: [PATCH 122/215] fixed wrong useage of metadata_json --- ding0/io/db_export.py | 14 +++++--------- ding0/io/ding0_pkl2db.py | 3 ++- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 1c1e731c..542b8503 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -673,16 +673,15 @@ def export_all_dataframes_to_db(engine, schema, network): if db_versioning.empty: # this leads to wrong run_id if run_id is SET in __main__ -> 'run_id': metadata_json['run_id'] try: - metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], - 'description': str(metadata_json)}, index=[0]) + metadata_df = pd.DataFrame({'run_id': network.metadata_json['run_id'], + 'description': str(network.metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) except: print(network.metadata['run_id']) - metadata_df = pd.DataFrame({'run_id': network.metadata['run_id'], - 'description': str(network.metadata)}, index=[0]) + metadata_df = pd.DataFrame({'run_id': network.metadata_json['run_id'], + 'description': str(network.metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) - # 1 export_df_to_db(engine, schema, network.lines, "line") # 2 @@ -760,13 +759,10 @@ def export_all_dataframes_to_db(engine, schema, network): # return values from export_network() as tupels network = export_network(nw) - # df_list = [lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, - # mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, - # lines, mvlv_mapping] # any list of NetworkDing0 also provides run_id # nw_metadata = json.dumps(nw_metadata) - metadata_json = json.loads(network.nw_metadata) + metadata_json = json.loads(network.metadata_json) ##################################################### # Creates all defined tables diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index cd19a724..0c988787 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -57,7 +57,8 @@ # Send data to OEDB srid = str(int(nw.config['geo']['srid'])) - metadata_json = json.loads(network.nw_metadata) + # ToDo:might not be necessary to use this metadata + # metadata_json = json.loads(network.metadata_json) export_all_dataframes_to_db(oedb_engine, SCHEMA, network) From 139db15ae0b260e7a5514d35c660320c65f9b398 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 14 Feb 2019 17:48:55 +0100 Subject: [PATCH 123/215] fixed typo metadata to metadata_json --- ding0/io/db_export.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 542b8503..a61d2c8b 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -677,7 +677,7 @@ def export_all_dataframes_to_db(engine, schema, network): 'description': str(network.metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) except: - print(network.metadata['run_id']) + print(network.metadata_json['run_id']) metadata_df = pd.DataFrame({'run_id': network.metadata_json['run_id'], 'description': str(network.metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) @@ -762,7 +762,8 @@ def export_all_dataframes_to_db(engine, schema, network): # any list of NetworkDing0 also provides run_id # nw_metadata = json.dumps(nw_metadata) - metadata_json = json.loads(network.metadata_json) + # ToDo:might not be necessary to use this metadata + # metadata_json = json.loads(network.metadata_json) ##################################################### # Creates all defined tables From b3c8cfaeb187a50594e358a24aee90af63f739d2 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 14 Feb 2019 17:56:56 +0100 Subject: [PATCH 124/215] parse the run_id provided by network.metadata_json to integer --- ding0/io/db_export.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index a61d2c8b..cf7f3676 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -671,14 +671,16 @@ def export_all_dataframes_to_db(engine, schema, network): # if metadata_json['run_id'] not in db_versioning['run_id']: # Use if just one run_id should be present to the DB table if db_versioning.empty: + # parse run_id to integer + meta_run_id = int(network.metadata_json['run_id']) # this leads to wrong run_id if run_id is SET in __main__ -> 'run_id': metadata_json['run_id'] try: - metadata_df = pd.DataFrame({'run_id': network.metadata_json['run_id'], + metadata_df = pd.DataFrame({'run_id': meta_run_id, 'description': str(network.metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) except: print(network.metadata_json['run_id']) - metadata_df = pd.DataFrame({'run_id': network.metadata_json['run_id'], + metadata_df = pd.DataFrame({'run_id': meta_run_id, 'description': str(network.metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) From 5739bb88618d6747cd6862533de96f460b2dc80d Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 14 Feb 2019 18:03:12 +0100 Subject: [PATCH 125/215] load the metadatajson for further usage --- ding0/io/db_export.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index cf7f3676..e5fbb211 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -671,17 +671,18 @@ def export_all_dataframes_to_db(engine, schema, network): # if metadata_json['run_id'] not in db_versioning['run_id']: # Use if just one run_id should be present to the DB table if db_versioning.empty: - # parse run_id to integer + # json.load the metadata_json + metadata_json = json.loads(network.metadata_json) meta_run_id = int(network.metadata_json['run_id']) # this leads to wrong run_id if run_id is SET in __main__ -> 'run_id': metadata_json['run_id'] try: - metadata_df = pd.DataFrame({'run_id': meta_run_id, - 'description': str(network.metadata_json)}, index=[0]) + metadata_df = pd.DataFrame({'run_id': metadata_json, + 'description': str(metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) except: print(network.metadata_json['run_id']) - metadata_df = pd.DataFrame({'run_id': meta_run_id, - 'description': str(network.metadata_json)}, index=[0]) + metadata_df = pd.DataFrame({'run_id': metadata_json, + 'description': str(metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) # 1 From 6ff90c5c93a7826cfe4eab319bb4c649ea96fd44 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 14 Feb 2019 18:08:06 +0100 Subject: [PATCH 126/215] fixed missing key for run id in metadata_json, deleted unused variables --- ding0/io/db_export.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index e5fbb211..ea28d493 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -673,15 +673,14 @@ def export_all_dataframes_to_db(engine, schema, network): if db_versioning.empty: # json.load the metadata_json metadata_json = json.loads(network.metadata_json) - meta_run_id = int(network.metadata_json['run_id']) # this leads to wrong run_id if run_id is SET in __main__ -> 'run_id': metadata_json['run_id'] try: - metadata_df = pd.DataFrame({'run_id': metadata_json, + metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], 'description': str(metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) except: - print(network.metadata_json['run_id']) - metadata_df = pd.DataFrame({'run_id': metadata_json, + print(metadata_json['run_id']) + metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], 'description': str(metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) From db0515e0eb44e78ba19a3ec24cedba66c4613c19 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 15 Feb 2019 12:24:11 +0100 Subject: [PATCH 127/215] hard set srid in func create_wkt_element, added parameter srid to pass on from pickel file --- ding0/io/db_export.py | 80 ++++++++++++++++++++-------------------- ding0/io/ding0_pkl2db.py | 12 +----- 2 files changed, 42 insertions(+), 50 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index ea28d493..75353fbf 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -381,12 +381,14 @@ def create_wkt_element(geom): Returns None if the data frame does not contain any geometry """ if geom is not None: - return WKTElement(geom, srid=int(SRID), extended=True) + # return WKTElement(geom, srid=int(SRID), extended=True) + # ToDo: hardset srid find better approach to make it work for dingo run and dingo from pickel + return WKTElement(geom, srid=4326, extended=True) else: return None -def df_sql_write(engine, schema, db_table, dataframe, geom_type=None): +def df_sql_write(engine, schema, db_table, dataframe, geom_type, SRID): """ Convert data frames such that their column names are made small and the index is renamed 'id_db' so as to @@ -627,7 +629,7 @@ def change_owner(engine, table, role, schema): change_owner(engine, tab, 'oeuser', schema) -def export_all_dataframes_to_db(engine, schema, network): +def export_all_dataframes_to_db(engine, schema, network, srid): """ exports all data frames from func. export_network() to the db tables @@ -684,36 +686,36 @@ def export_all_dataframes_to_db(engine, schema, network): 'description': str(metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) - # 1 - export_df_to_db(engine, schema, network.lines, "line") - # 2 - export_df_to_db(engine, schema, network.lv_cd, "lv_cd") - # 3 - export_df_to_db(engine, schema, network.lv_gen, "lv_gen") - # 4 - export_df_to_db(engine, schema, network.lv_stations, "lv_station") - # 5 - export_df_to_db(engine, schema, network.lv_loads, "lv_load") - # 6 - export_df_to_db(engine, schema, network.lv_grid, "lv_grid") - # 7 - export_df_to_db(engine, schema, network.mv_cb, "mv_cb") - # 8 - export_df_to_db(engine, schema, network.mv_cd, "mv_cd") - # 9 - export_df_to_db(engine, schema, network.mv_gen, "mv_gen") - # 10 - export_df_to_db(engine, schema, network.mv_stations, "mv_station") - # 11 - export_df_to_db(engine, schema, network.mv_loads, "mv_load") - # 12 - export_df_to_db(engine, schema, network.mv_grid, "mv_grid") - # 13 - export_df_to_db(engine, schema, network.mvlv_trafos, "mvlv_trafo") - # 14 - export_df_to_db(engine, schema, network.hvmv_trafos, "hvmv_trafo") - # 15 - export_df_to_db(engine, schema, network.mvlv_mapping, "mvlv_mapping") + # 1 + export_df_to_db(engine, schema, network.lines, "line", srid) + # 2 + export_df_to_db(engine, schema, network.lv_cd, "lv_cd", srid) + # 3 + export_df_to_db(engine, schema, network.lv_gen, "lv_gen", srid) + # 4 + export_df_to_db(engine, schema, network.lv_stations, "lv_station", srid) + # 5 + export_df_to_db(engine, schema, network.lv_loads, "lv_load", srid) + # 6 + export_df_to_db(engine, schema, network.lv_grid, "lv_grid", srid) + # 7 + export_df_to_db(engine, schema, network.mv_cb, "mv_cb", srid) + # 8 + export_df_to_db(engine, schema, network.mv_cd, "mv_cd", srid) + # 9 + export_df_to_db(engine, schema, network.mv_gen, "mv_gen", srid) + # 10 + export_df_to_db(engine, schema, network.mv_stations, "mv_station", srid) + # 11 + export_df_to_db(engine, schema, network.mv_loads, "mv_load", srid) + # 12 + export_df_to_db(engine, schema, network.mv_grid, "mv_grid", srid) + # 13 + export_df_to_db(engine, schema, network.mvlv_trafos, "mvlv_trafo", srid) + # 14 + export_df_to_db(engine, schema, network.hvmv_trafos, "hvmv_trafo", srid) + # 15 + export_df_to_db(engine, schema, network.mvlv_mapping, "mvlv_mapping", srid) else: raise KeyError("a run_id already present! No tables are input!") @@ -738,12 +740,12 @@ def export_all_dataframes_to_db(engine, schema, network): # #########Ding0 Network and NW Metadata################ # create ding0 Network instance - nw = NetworkDing0(name='network') + # nw = NetworkDing0(name='network') # nw = load_nd_from_pickle(filename='ding0_grids_example.pkl', path='ding0\ding0\examples\ding0_grids_example.pkl') # srid # ToDo: Check why converted to int and string - SRID = str(int(nw.config['geo']['srid'])) + # SRID = str(int(nw.config['geo']['srid'])) # provide run_id, note that the run_id is unique to the DB table # if not set it will be set @@ -755,11 +757,11 @@ def export_all_dataframes_to_db(engine, schema, network): # run DING0 on selected MV Grid District - nw.run_ding0(session=session, - mv_grid_districts_no=mv_grid_districts) + # nw.run_ding0(session=session, + # mv_grid_districts_no=mv_grid_districts) # return values from export_network() as tupels - network = export_network(nw) + # network = export_network(nw) # any list of NetworkDing0 also provides run_id @@ -774,4 +776,4 @@ def export_all_dataframes_to_db(engine, schema, network): # db_tables_change_owner(oedb_engine, SCHEMA) # Export all Dataframes returned form export_network(nw) to DB - export_all_dataframes_to_db(oedb_engine, SCHEMA) \ No newline at end of file + # export_all_dataframes_to_db(oedb_engine, SCHEMA) \ No newline at end of file diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 0c988787..b20135a1 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -45,21 +45,11 @@ # Extract data from network and put it to DataFrames for csv and for oedb network = export_network(nw) - # ToDo:How TO pass the dataframes to export_all_df func???????????????? - # df_dict = {"lv_grid":lv_grid, "lv_gen":lv_gen, "":lv_cd, "":lv_stations, "":mvlv_trafos, "":lv_loads, - # "": mv_grid, mv_gen, "": mv_cb, "": mv_cd, "":mv_stations, "":hvmv_trafos, "":mv_loads, - # "":lines, "":mvlv_mapping} - # - # df_dict = [lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, - # mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, - # lines, mvlv_mapping] - - # Send data to OEDB srid = str(int(nw.config['geo']['srid'])) # ToDo:might not be necessary to use this metadata # metadata_json = json.loads(network.metadata_json) - export_all_dataframes_to_db(oedb_engine, SCHEMA, network) + export_all_dataframes_to_db(oedb_engine, SCHEMA, network, srid) # db_tables_change_owner(oedb_engine, SCHEMA) \ No newline at end of file From d07a7d1b6676dbf31b41a9d0eaed74e4ef379f69 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 15 Feb 2019 12:27:13 +0100 Subject: [PATCH 128/215] added missing srid parameter --- ding0/io/db_export.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 75353fbf..fbfa47d1 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -483,7 +483,7 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type, SRID): sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) -def export_df_to_db(engine, schema, df, tabletype): +def export_df_to_db(engine, schema, df, tabletype, srid): """ Writes values to the connected DB. Values from Pandas data frame. Decides which table by tabletype @@ -501,50 +501,50 @@ def export_df_to_db(engine, schema, df, tabletype): """ print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': - df_sql_write(engine, schema, DING0_TABLES['line'], df, 'LINESTRING') + df_sql_write(engine, schema, DING0_TABLES['line'], df, 'LINESTRING', srid) elif tabletype == 'lv_cd': df = df.drop(['lv_grid_id'], axis=1) - df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df, 'POINT') + df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df, 'POINT', srid) elif tabletype == 'lv_gen': - df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df, 'POINT') + df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df, 'POINT', srid) elif tabletype == 'lv_load': - df_sql_write(engine, schema, DING0_TABLES['lv_load'], df, 'POINT') + df_sql_write(engine, schema, DING0_TABLES['lv_load'], df, 'POINT', srid) elif tabletype == 'lv_grid': - df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df, 'MULTIPOLYGON') + df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df, 'MULTIPOLYGON', srid) elif tabletype == 'lv_station': - df_sql_write(engine, schema, DING0_TABLES['lv_station'], df, 'POINT') + df_sql_write(engine, schema, DING0_TABLES['lv_station'], df, 'POINT', srid) elif tabletype == 'mvlv_trafo': - df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df, 'POINT') + df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df, 'POINT', srid) elif tabletype == 'mvlv_mapping': - df_sql_write(engine, schema, DING0_TABLES['mvlv_mapping'], df) + df_sql_write(engine, schema, DING0_TABLES['mvlv_mapping'], df, srid) elif tabletype == 'mv_cd': - df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df, 'POINT') + df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df, 'POINT', srid) elif tabletype == 'mv_cb': - df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df, 'POINT') + df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df, 'POINT', srid) elif tabletype == 'mv_gen': - df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df, 'POINT') + df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df, 'POINT', srid) elif tabletype == 'mv_load': - df_sql_write(engine, schema, DING0_TABLES['mv_load'], df, 'GEOMETRY') + df_sql_write(engine, schema, DING0_TABLES['mv_load'], df, 'GEOMETRY', srid) elif tabletype == 'mv_grid': - df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df, 'MULTIPOLYGON') + df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df, 'MULTIPOLYGON', srid) elif tabletype == 'mv_station': - df_sql_write(engine, schema, DING0_TABLES['mv_station'], df, 'POINT') + df_sql_write(engine, schema, DING0_TABLES['mv_station'], df, 'POINT', srid) elif tabletype == 'hvmv_trafo': - df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df, 'POINT') + df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df, 'POINT', srid) # ToDo: function works but throws unexpected error (versioning tbl dosent exists) From 182d81e9bc05c9373ac61a95264aa65dec4a7f55 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 15 Feb 2019 13:01:38 +0100 Subject: [PATCH 129/215] added func export_all_pkl_to_db() to handel the different requirements for the pickle export --- ding0/io/db_export.py | 130 ++++++++++++++++++++++++++++++++++++--- ding0/io/ding0_pkl2db.py | 7 +-- 2 files changed, 123 insertions(+), 14 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index fbfa47d1..8a9f5d4d 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -388,7 +388,7 @@ def create_wkt_element(geom): return None -def df_sql_write(engine, schema, db_table, dataframe, geom_type, SRID): +def df_sql_write(engine, schema, db_table, dataframe, geom_type=None, SRID=None): """ Convert data frames such that their column names are made small and the index is renamed 'id_db' so as to @@ -663,6 +663,10 @@ def export_all_dataframes_to_db(engine, schema, network, srid): Sqlalchemy database engine schema : str The schema in which the tables are to be created + network: namedtuple + All the return values(Data Frames) from export_network() + srid: int + The current srid provided by the ding0 network """ if engine.dialect.has_table(engine, DING0_TABLES["versioning"], schema=schema): @@ -676,15 +680,15 @@ def export_all_dataframes_to_db(engine, schema, network, srid): # json.load the metadata_json metadata_json = json.loads(network.metadata_json) # this leads to wrong run_id if run_id is SET in __main__ -> 'run_id': metadata_json['run_id'] - try: - metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], - 'description': str(metadata_json)}, index=[0]) - df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) - except: - print(metadata_json['run_id']) - metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], - 'description': str(metadata_json)}, index=[0]) - df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) + # try: + metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], + 'description': str(metadata_json)}, index=[0]) + df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) + # except: + # print(metadata_json['run_id']) + # metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], + # 'description': str(metadata_json)}, index=[0]) + # df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) # 1 export_df_to_db(engine, schema, network.lines, "line", srid) @@ -724,6 +728,112 @@ def export_all_dataframes_to_db(engine, schema, network, srid): print("WARNING: There is no " + DING0_TABLES["versioning"] + " table in the schema: " + schema) +def export_all_pkl_to_db(engine, schema, network, srid, grid_no): + """ + This function basicly works the same way export_all_dataframes_to_db() does. + It is implemented to handel the diffrent ways of executing the functions. + + If loaded form pickel files a for loop is included and every grid district will be uploaded one after another. + This chances the requirements for this function. + + Parameters + ---------- + engine : sqlalchemy.engine.base.Engine + Sqlalchemy database engine + schema : str + The schema in which the tables are to be created + network: namedtuple + All the return values(Data Frames) from export_network() + srid: int + The current srid provided by the ding0 network + grid_no: int + The Griddistrict number + """ + + if engine.dialect.has_table(engine, DING0_TABLES["versioning"], schema=schema): + + db_versioning = pd.read_sql_table(DING0_TABLES['versioning'], engine, schema, + columns=['run_id', 'description']) + + if db_versioning.empty: + + metadata_json = json.loads(network.metadata_json) + + metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], + 'description': str(metadata_json)}, index=[0]) + df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) + + # 1 + export_df_to_db(engine, schema, network.lines, "line", srid) + # 2 + export_df_to_db(engine, schema, network.lv_cd, "lv_cd", srid) + # 3 + export_df_to_db(engine, schema, network.lv_gen, "lv_gen", srid) + # 4 + export_df_to_db(engine, schema, network.lv_stations, "lv_station", srid) + # 5 + export_df_to_db(engine, schema, network.lv_loads, "lv_load", srid) + # 6 + export_df_to_db(engine, schema, network.lv_grid, "lv_grid", srid) + # 7 + export_df_to_db(engine, schema, network.mv_cb, "mv_cb", srid) + # 8 + export_df_to_db(engine, schema, network.mv_cd, "mv_cd", srid) + # 9 + export_df_to_db(engine, schema, network.mv_gen, "mv_gen", srid) + # 10 + export_df_to_db(engine, schema, network.mv_stations, "mv_station", srid) + # 11 + export_df_to_db(engine, schema, network.mv_loads, "mv_load", srid) + # 12 + export_df_to_db(engine, schema, network.mv_grid, "mv_grid", srid) + # 13 + export_df_to_db(engine, schema, network.mvlv_trafos, "mvlv_trafo", srid) + # 14 + export_df_to_db(engine, schema, network.hvmv_trafos, "hvmv_trafo", srid) + # 15 + export_df_to_db(engine, schema, network.mvlv_mapping, "mvlv_mapping", srid) + + print('Griddistrict {} has been exported to the database').format(grid_no) + + else: + # 1 + export_df_to_db(engine, schema, network.lines, "line", srid) + # 2 + export_df_to_db(engine, schema, network.lv_cd, "lv_cd", srid) + # 3 + export_df_to_db(engine, schema, network.lv_gen, "lv_gen", srid) + # 4 + export_df_to_db(engine, schema, network.lv_stations, "lv_station", srid) + # 5 + export_df_to_db(engine, schema, network.lv_loads, "lv_load", srid) + # 6 + export_df_to_db(engine, schema, network.lv_grid, "lv_grid", srid) + # 7 + export_df_to_db(engine, schema, network.mv_cb, "mv_cb", srid) + # 8 + export_df_to_db(engine, schema, network.mv_cd, "mv_cd", srid) + # 9 + export_df_to_db(engine, schema, network.mv_gen, "mv_gen", srid) + # 10 + export_df_to_db(engine, schema, network.mv_stations, "mv_station", srid) + # 11 + export_df_to_db(engine, schema, network.mv_loads, "mv_load", srid) + # 12 + export_df_to_db(engine, schema, network.mv_grid, "mv_grid", srid) + # 13 + export_df_to_db(engine, schema, network.mvlv_trafos, "mvlv_trafo", srid) + # 14 + export_df_to_db(engine, schema, network.hvmv_trafos, "hvmv_trafo", srid) + # 15 + export_df_to_db(engine, schema, network.mvlv_mapping, "mvlv_mapping", srid) + + print('Griddistrict {} has been exported to the database').format(grid_no) + + else: + print("WARNING: There is no " + DING0_TABLES["versioning"] + " table in the schema: " + schema) + + if __name__ == "__main__": # #########SQLAlchemy and DB table################ diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index b20135a1..f0e20c1e 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -12,7 +12,7 @@ from ding0.tools.results import load_nd_from_pickle from ding0.io.export import export_network from ding0.io.db_export import METADATA, create_ding0_sql_tables, \ - export_all_dataframes_to_db, db_tables_change_owner, drop_ding0_db_tables + export_all_pkl_to_db, db_tables_change_owner, drop_ding0_db_tables from sqlalchemy.orm import sessionmaker @@ -39,8 +39,7 @@ # generate all the grids and push them to oedb for grid_no in grids: - nw = load_nd_from_pickle(os.path.join(pkl_filepath, - 'ding0_grids__{}.pkl'.format(grid_no))) + nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) # Extract data from network and put it to DataFrames for csv and for oedb network = export_network(nw) @@ -50,6 +49,6 @@ # ToDo:might not be necessary to use this metadata # metadata_json = json.loads(network.metadata_json) - export_all_dataframes_to_db(oedb_engine, SCHEMA, network, srid) + export_all_pkl_to_db(oedb_engine, SCHEMA, network, srid, grid_no) # db_tables_change_owner(oedb_engine, SCHEMA) \ No newline at end of file From 6044c8cb90265ea2bfc25e6f9c7a9803e2ce8e5f Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 15 Feb 2019 13:27:41 +0100 Subject: [PATCH 130/215] fixed wrong usage of pritn() and format --- ding0/io/db_export.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 8a9f5d4d..96885ef8 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -417,6 +417,8 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None, SRID=None) geom_type: str Prameter for handling data frames with different geometry types + SRID: int + The current srid provided by the ding0 network """ # rename data frame column DB like @@ -498,6 +500,8 @@ def export_df_to_db(engine, schema, df, tabletype, srid): df : pandas data frame tabletype : str Set the destination table where the pd data frame will be stored in + srid: int + The current srid provided by the ding0 network """ print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': @@ -632,6 +636,8 @@ def change_owner(engine, table, role, schema): def export_all_dataframes_to_db(engine, schema, network, srid): """ exports all data frames from func. export_network() to the db tables + This works with a completely generated ding0 network(all grid districts have to be generated at once), + all provided DataFrames will be uploaded. Instructions: 1. Create a database connection to the "OEDB" for example use the "from egoio.tools.db import connection" function @@ -730,11 +736,10 @@ def export_all_dataframes_to_db(engine, schema, network, srid): def export_all_pkl_to_db(engine, schema, network, srid, grid_no): """ - This function basicly works the same way export_all_dataframes_to_db() does. - It is implemented to handel the diffrent ways of executing the functions. - - If loaded form pickel files a for loop is included and every grid district will be uploaded one after another. - This chances the requirements for this function. + This function basically works the same way export_all_dataframes_to_db() does. + It is implemented to handel the diffrent ways of executing the functions: + If grids are loaded form pickle files a for loop is included and every grid district will be uploaded one after + another. This chances the requirements for this function. Parameters ---------- @@ -794,8 +799,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): # 15 export_df_to_db(engine, schema, network.mvlv_mapping, "mvlv_mapping", srid) - print('Griddistrict {} has been exported to the database').format(grid_no) - + print('Griddistrict' + str(grid_no) + 'has been exported to the database') else: # 1 export_df_to_db(engine, schema, network.lines, "line", srid) @@ -828,7 +832,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): # 15 export_df_to_db(engine, schema, network.mvlv_mapping, "mvlv_mapping", srid) - print('Griddistrict {} has been exported to the database').format(grid_no) + print('Griddistrict' + str(grid_no) + 'has been exported to the database') else: print("WARNING: There is no " + DING0_TABLES["versioning"] + " table in the schema: " + schema) @@ -837,7 +841,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): if __name__ == "__main__": # #########SQLAlchemy and DB table################ - oedb_engine = connection(section='oedb') + oedb_engine = connection(section='vpn_oedb') session = sessionmaker(bind=oedb_engine)() # Testing Database From 4a4c3fc700d47770a9b66aff127bbec7f9cbbdf6 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 15 Feb 2019 14:08:55 +0100 Subject: [PATCH 131/215] provide run id as parameter for all pickled grid districts --- ding0/io/ding0_pkl2db.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index f0e20c1e..5c54bbb2 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -39,10 +39,11 @@ # generate all the grids and push them to oedb for grid_no in grids: + # ToDo: run_id changes for every file nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) # Extract data from network and put it to DataFrames for csv and for oedb - network = export_network(nw) + network = export_network(nw, run_id=20190215122822) # Send data to OEDB srid = str(int(nw.config['geo']['srid'])) From 1181bfa92fc3da0564df72f8b1fb22a908ac9624 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 15 Feb 2019 14:57:48 +0100 Subject: [PATCH 132/215] first try fixing problem with different run id for every pickle file --- ding0/io/db_export.py | 24 ++++++++++++++---------- ding0/io/ding0_pkl2db.py | 9 +++++++-- 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 96885ef8..b40ada8a 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -485,7 +485,7 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None, SRID=None) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) -def export_df_to_db(engine, schema, df, tabletype, srid): +def export_df_to_db(engine, schema, df, tabletype, srid=None): """ Writes values to the connected DB. Values from Pandas data frame. Decides which table by tabletype @@ -633,7 +633,7 @@ def change_owner(engine, table, role, schema): change_owner(engine, tab, 'oeuser', schema) -def export_all_dataframes_to_db(engine, schema, network, srid): +def export_all_dataframes_to_db(engine, schema, network=None, srid=None): """ exports all data frames from func. export_network() to the db tables This works with a completely generated ding0 network(all grid districts have to be generated at once), @@ -734,7 +734,7 @@ def export_all_dataframes_to_db(engine, schema, network, srid): print("WARNING: There is no " + DING0_TABLES["versioning"] + " table in the schema: " + schema) -def export_all_pkl_to_db(engine, schema, network, srid, grid_no): +def export_all_pkl_to_db(engine, schema, network, srid, grid_no, run_id): """ This function basically works the same way export_all_dataframes_to_db() does. It is implemented to handel the diffrent ways of executing the functions: @@ -764,7 +764,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): metadata_json = json.loads(network.metadata_json) - metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], + metadata_df = pd.DataFrame({'run_id': run_id, 'description': str(metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) @@ -854,12 +854,16 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): # #########Ding0 Network and NW Metadata################ # create ding0 Network instance - # nw = NetworkDing0(name='network') + nw = NetworkDing0(name='network') # nw = load_nd_from_pickle(filename='ding0_grids_example.pkl', path='ding0\ding0\examples\ding0_grids_example.pkl') # srid # ToDo: Check why converted to int and string # SRID = str(int(nw.config['geo']['srid'])) + SRID = int(nw.config['geo']['srid']) + + # provide run id for pickle upload + run_id = 20190215122822 # provide run_id, note that the run_id is unique to the DB table # if not set it will be set @@ -871,11 +875,11 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): # run DING0 on selected MV Grid District - # nw.run_ding0(session=session, - # mv_grid_districts_no=mv_grid_districts) + nw.run_ding0(session=session, + mv_grid_districts_no=mv_grid_districts) # return values from export_network() as tupels - # network = export_network(nw) + network = export_network(nw, run_id=run_id) # any list of NetworkDing0 also provides run_id @@ -886,8 +890,8 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): ##################################################### # Creates all defined tables create_ding0_sql_tables(oedb_engine, SCHEMA) - drop_ding0_db_tables(oedb_engine) + # drop_ding0_db_tables(oedb_engine) # db_tables_change_owner(oedb_engine, SCHEMA) # Export all Dataframes returned form export_network(nw) to DB - # export_all_dataframes_to_db(oedb_engine, SCHEMA) \ No newline at end of file + export_all_dataframes_to_db(oedb_engine, SCHEMA, network=network, srid=SRID) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 5c54bbb2..939dc84c 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -46,10 +46,15 @@ network = export_network(nw, run_id=20190215122822) # Send data to OEDB - srid = str(int(nw.config['geo']['srid'])) + # SRID = str(int(nw.config['geo']['srid'])) + srid = int(nw.config['geo']['srid']) + + # provide run id for pickle upload + run_id = 20190215122822 + # ToDo:might not be necessary to use this metadata # metadata_json = json.loads(network.metadata_json) - export_all_pkl_to_db(oedb_engine, SCHEMA, network, srid, grid_no) + export_all_pkl_to_db(oedb_engine, SCHEMA, network, srid, grid_no, run_id) # db_tables_change_owner(oedb_engine, SCHEMA) \ No newline at end of file From 6720e25d79d671d79fdde1ed943ed6575f5d80d6 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 15 Feb 2019 18:03:36 +0100 Subject: [PATCH 133/215] set the provided run id in metadata_json --- ding0/io/db_export.py | 6 +++--- ding0/io/export.py | 3 +++ 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index b40ada8a..0a40ba29 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -799,7 +799,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no, run_id): # 15 export_df_to_db(engine, schema, network.mvlv_mapping, "mvlv_mapping", srid) - print('Griddistrict' + str(grid_no) + 'has been exported to the database') + print('Griddistrict_' + str(grid_no) + '_has been exported to the database') else: # 1 export_df_to_db(engine, schema, network.lines, "line", srid) @@ -832,7 +832,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no, run_id): # 15 export_df_to_db(engine, schema, network.mvlv_mapping, "mvlv_mapping", srid) - print('Griddistrict' + str(grid_no) + 'has been exported to the database') + print('Griddistrict_' + str(grid_no) + '_has been exported to the database') else: print("WARNING: There is no " + DING0_TABLES["versioning"] + " table in the schema: " + schema) @@ -890,7 +890,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no, run_id): ##################################################### # Creates all defined tables create_ding0_sql_tables(oedb_engine, SCHEMA) - # drop_ding0_db_tables(oedb_engine) + drop_ding0_db_tables(oedb_engine) # db_tables_change_owner(oedb_engine, SCHEMA) # Export all Dataframes returned form export_network(nw) to DB diff --git a/ding0/io/export.py b/ding0/io/export.py index 7372c3fc..c1f1d2ab 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -74,6 +74,9 @@ def export_network(nw, mode='', run_id=None): # from datetime import datetime if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") + else: + run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") + metadata_json = json.dumps(nw.metadata) ############################## ############################# From beb281186c1636ffacb4b27b40e48b27a0e1f426 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 15 Feb 2019 18:11:38 +0100 Subject: [PATCH 134/215] set the provided run id in metadata_json --- ding0/io/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/export.py b/ding0/io/export.py index c1f1d2ab..84cde3e0 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -75,7 +75,7 @@ def export_network(nw, mode='', run_id=None): if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") else: - run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") + nw.metadata['run_id'] = run_id # datetime.now().strftime("%Y%m%d%H%M%S") metadata_json = json.dumps(nw.metadata) ############################## From 3123556384ceb954019704478524a80dcf6b3739 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 15 Feb 2019 20:08:02 +0100 Subject: [PATCH 135/215] set the provided run id in metadata_json --- ding0/io/db_export.py | 16 ++++++++-------- ding0/io/ding0_pkl2db.py | 7 +++++-- ding0/io/export.py | 3 +-- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 0a40ba29..f4b00e15 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -764,7 +764,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no, run_id): metadata_json = json.loads(network.metadata_json) - metadata_df = pd.DataFrame({'run_id': run_id, + metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], 'description': str(metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) @@ -874,12 +874,12 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no, run_id): mv_grid_districts = list(range(1, 5)) - # run DING0 on selected MV Grid District - nw.run_ding0(session=session, - mv_grid_districts_no=mv_grid_districts) - - # return values from export_network() as tupels - network = export_network(nw, run_id=run_id) + # # run DING0 on selected MV Grid District + # nw.run_ding0(session=session, + # mv_grid_districts_no=mv_grid_districts) + # + # # return values from export_network() as tupels + # network = export_network(nw, run_id=run_id) # any list of NetworkDing0 also provides run_id @@ -894,4 +894,4 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no, run_id): # db_tables_change_owner(oedb_engine, SCHEMA) # Export all Dataframes returned form export_network(nw) to DB - export_all_dataframes_to_db(oedb_engine, SCHEMA, network=network, srid=SRID) + # export_all_dataframes_to_db(oedb_engine, SCHEMA, network=network, srid=SRID) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 939dc84c..7ce04a75 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -42,15 +42,18 @@ # ToDo: run_id changes for every file nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) + run_id = 20190215122822 + nw.metadata['run_id'] = run_id + # Extract data from network and put it to DataFrames for csv and for oedb - network = export_network(nw, run_id=20190215122822) + network = export_network(nw) # Send data to OEDB # SRID = str(int(nw.config['geo']['srid'])) srid = int(nw.config['geo']['srid']) # provide run id for pickle upload - run_id = 20190215122822 + # ToDo:might not be necessary to use this metadata # metadata_json = json.loads(network.metadata_json) diff --git a/ding0/io/export.py b/ding0/io/export.py index 84cde3e0..f7a62f95 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -74,8 +74,7 @@ def export_network(nw, mode='', run_id=None): # from datetime import datetime if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") - else: - nw.metadata['run_id'] = run_id # datetime.now().strftime("%Y%m%d%H%M%S") + metadata_json = json.dumps(nw.metadata) ############################## From c3c2c62c8116219ac87d13c1a7f1cadabc040433 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 15 Feb 2019 20:12:48 +0100 Subject: [PATCH 136/215] set the provided run id in metadata_json --- ding0/io/ding0_pkl2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 7ce04a75..ead2c6e0 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -34,7 +34,7 @@ # choose MV Grid Districts to import -grids = list(range(1, 6)) +grids = list(range(1, 5)) # generate all the grids and push them to oedb for grid_no in grids: From 3ad8763fe06cbb8ecf56bce051f082bf8904bff4 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Sat, 16 Feb 2019 14:12:29 +0100 Subject: [PATCH 137/215] test export with other grid districts --- ding0/io/ding0_pkl2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index ead2c6e0..26ce1bf9 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -34,7 +34,7 @@ # choose MV Grid Districts to import -grids = list(range(1, 5)) +grids = list(range(3, 6)) # generate all the grids and push them to oedb for grid_no in grids: From b0929408e2cff874b4b0e70eacc90274c13f224f Mon Sep 17 00:00:00 2001 From: Jonas H Date: Sat, 16 Feb 2019 14:15:52 +0100 Subject: [PATCH 138/215] print run id for testing --- ding0/io/db_export.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index f4b00e15..1e625ccf 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -763,6 +763,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no, run_id): if db_versioning.empty: metadata_json = json.loads(network.metadata_json) + print(str(metadata_json['run_id'])) metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], 'description': str(metadata_json)}, index=[0]) From 41f9c1200d16792a3c766c6b701fad25a5f6ef05 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Sat, 16 Feb 2019 14:36:43 +0100 Subject: [PATCH 139/215] testing other run id --- ding0/io/db_export.py | 3 ++- ding0/io/ding0_pkl2db.py | 8 ++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 1e625ccf..4532bba1 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -734,7 +734,7 @@ def export_all_dataframes_to_db(engine, schema, network=None, srid=None): print("WARNING: There is no " + DING0_TABLES["versioning"] + " table in the schema: " + schema) -def export_all_pkl_to_db(engine, schema, network, srid, grid_no, run_id): +def export_all_pkl_to_db(engine, schema, network, srid, grid_no): """ This function basically works the same way export_all_dataframes_to_db() does. It is implemented to handel the diffrent ways of executing the functions: @@ -802,6 +802,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no, run_id): print('Griddistrict_' + str(grid_no) + '_has been exported to the database') else: + # network.metadata['run_id'] # 1 export_df_to_db(engine, schema, network.lines, "line", srid) # 2 diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 26ce1bf9..14c96260 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -42,11 +42,11 @@ # ToDo: run_id changes for every file nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) - run_id = 20190215122822 - nw.metadata['run_id'] = run_id + # Extract data from network and put it to DataFrames for csv and for oedb - network = export_network(nw) + network = export_network(nw, run_id=20190215122822) + # Send data to OEDB # SRID = str(int(nw.config['geo']['srid'])) @@ -58,6 +58,6 @@ # ToDo:might not be necessary to use this metadata # metadata_json = json.loads(network.metadata_json) - export_all_pkl_to_db(oedb_engine, SCHEMA, network, srid, grid_no, run_id) + export_all_pkl_to_db(oedb_engine, SCHEMA, network, srid, grid_no) # db_tables_change_owner(oedb_engine, SCHEMA) \ No newline at end of file From ba4eb406713ea335aff60ea591e996f82afda51b Mon Sep 17 00:00:00 2001 From: Jonas H Date: Sat, 16 Feb 2019 14:43:53 +0100 Subject: [PATCH 140/215] set the provided run id in nw.metadata --- ding0/io/export.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ding0/io/export.py b/ding0/io/export.py index f7a62f95..3b7c6361 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -74,7 +74,8 @@ def export_network(nw, mode='', run_id=None): # from datetime import datetime if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") - + else: + nw.metadata['run_id'] = run_id metadata_json = json.dumps(nw.metadata) ############################## From f2f4e9cd23f5229e3a57df4686c6e61db4fb9618 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Sat, 16 Feb 2019 14:55:57 +0100 Subject: [PATCH 141/215] set the provided run id in nw.metadata --- ding0/io/export.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ding0/io/export.py b/ding0/io/export.py index 3b7c6361..5335eb6b 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -75,9 +75,12 @@ def export_network(nw, mode='', run_id=None): if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") else: + print("test") nw.metadata['run_id'] = run_id - + metadata_json = json.dumps(nw.metadata) + metadata_json = json.loads(metadata_json) + print(str(metadata_json['run_id'])) ############################## ############################# # go through the grid collecting info From c2959ba8b0040206277ca3ce225d22a43889a76c Mon Sep 17 00:00:00 2001 From: Jonas H Date: Sat, 16 Feb 2019 14:59:48 +0100 Subject: [PATCH 142/215] insert testing prints --- ding0/io/export.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ding0/io/export.py b/ding0/io/export.py index 5335eb6b..03b34b50 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -72,15 +72,17 @@ def export_network(nw, mode='', run_id=None): lv_info = False ############################## # from datetime import datetime + print("1 " + run_id) if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") else: print("test") nw.metadata['run_id'] = run_id + print("2 "+run_id) metadata_json = json.dumps(nw.metadata) metadata_json = json.loads(metadata_json) - print(str(metadata_json['run_id'])) + print("3" + str(metadata_json['run_id'])) ############################## ############################# # go through the grid collecting info From 4c2082c2e0f7a93c8acbc53b14a09f9c33489ede Mon Sep 17 00:00:00 2001 From: Jonas H Date: Sat, 16 Feb 2019 15:01:41 +0100 Subject: [PATCH 143/215] insert testing prints --- ding0/io/export.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ding0/io/export.py b/ding0/io/export.py index 03b34b50..bc163221 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -72,13 +72,13 @@ def export_network(nw, mode='', run_id=None): lv_info = False ############################## # from datetime import datetime - print("1 " + run_id) + print("1 " + str(run_id)) if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") else: print("test") nw.metadata['run_id'] = run_id - print("2 "+run_id) + print("2 "+ str(run_id)) metadata_json = json.dumps(nw.metadata) metadata_json = json.loads(metadata_json) From 7e771e9ce86c35cbedcbb535d82040486bbc8e75 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Sat, 16 Feb 2019 15:12:06 +0100 Subject: [PATCH 144/215] insert testing prints --- ding0/io/export.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/ding0/io/export.py b/ding0/io/export.py index bc163221..516ca9c1 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -75,14 +75,15 @@ def export_network(nw, mode='', run_id=None): print("1 " + str(run_id)) if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") + metadata_json = json.dumps(nw.metadata) else: print("test") nw.metadata['run_id'] = run_id - print("2 "+ str(run_id)) - - metadata_json = json.dumps(nw.metadata) - metadata_json = json.loads(metadata_json) - print("3" + str(metadata_json['run_id'])) + print("2 " + str(run_id)) + metadata_json = json.dumps(nw.metadata) + + metadata_json1 = json.loads(metadata_json) + print("3" + str(metadata_json1['run_id'])) ############################## ############################# # go through the grid collecting info From ed79d817066eb8a0f21b9a355efea2ac78d5f483 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Sat, 16 Feb 2019 15:16:35 +0100 Subject: [PATCH 145/215] change nw.metadata after pickle import --- ding0/io/ding0_pkl2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 14c96260..9741a6b0 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -41,7 +41,7 @@ # ToDo: run_id changes for every file nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) - + nw.metadata['run_id'] = 20190215122822 # Extract data from network and put it to DataFrames for csv and for oedb From 981b597d9036e1271d36117d5c2cc058689295a3 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Sat, 16 Feb 2019 15:20:39 +0100 Subject: [PATCH 146/215] change nw.metadata(run id) --- ding0/io/ding0_pkl2db.py | 1 - ding0/io/export.py | 5 +++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 9741a6b0..924046ed 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -41,7 +41,6 @@ # ToDo: run_id changes for every file nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) - nw.metadata['run_id'] = 20190215122822 # Extract data from network and put it to DataFrames for csv and for oedb diff --git a/ding0/io/export.py b/ding0/io/export.py index 516ca9c1..a5b8e6b0 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -78,9 +78,10 @@ def export_network(nw, mode='', run_id=None): metadata_json = json.dumps(nw.metadata) else: print("test") - nw.metadata['run_id'] = run_id + # nw.metadata['run_id'] = run_id + print("2 " + str(run_id)) - metadata_json = json.dumps(nw.metadata) + metadata_json = json.dumps(nw.metadata(run_id)) metadata_json1 = json.loads(metadata_json) print("3" + str(metadata_json1['run_id'])) From 8d7ac1f818869b508fd64caed67730f398141712 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Sat, 16 Feb 2019 15:25:57 +0100 Subject: [PATCH 147/215] change metadata_json runid --- ding0/io/ding0_pkl2db.py | 1 + ding0/io/export.py | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 924046ed..0d7a4279 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -41,6 +41,7 @@ # ToDo: run_id changes for every file nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) + nw.metadata(run_id=20190215122822) # Extract data from network and put it to DataFrames for csv and for oedb diff --git a/ding0/io/export.py b/ding0/io/export.py index a5b8e6b0..7b807311 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -75,14 +75,14 @@ def export_network(nw, mode='', run_id=None): print("1 " + str(run_id)) if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") - metadata_json = json.dumps(nw.metadata) + else: print("test") # nw.metadata['run_id'] = run_id - print("2 " + str(run_id)) - metadata_json = json.dumps(nw.metadata(run_id)) + metadata_json = json.dumps(nw.metadata) + metadata_json['run_id'] = run_id metadata_json1 = json.loads(metadata_json) print("3" + str(metadata_json1['run_id'])) ############################## From c07586da79b3b7593ba8b1cf99d7b5b8c107027f Mon Sep 17 00:00:00 2001 From: Jonas H Date: Sat, 16 Feb 2019 15:33:25 +0100 Subject: [PATCH 148/215] fixed wrong usage of nw.metadata() --- ding0/io/ding0_pkl2db.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 0d7a4279..924046ed 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -41,7 +41,6 @@ # ToDo: run_id changes for every file nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) - nw.metadata(run_id=20190215122822) # Extract data from network and put it to DataFrames for csv and for oedb From aaabeeba55978a97ac638e3a137683f405651990 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 18 Feb 2019 13:02:14 +0100 Subject: [PATCH 149/215] provide correct run id for pickle export --- ding0/io/ding0_pkl2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 924046ed..7b47c604 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -44,7 +44,7 @@ # Extract data from network and put it to DataFrames for csv and for oedb - network = export_network(nw, run_id=20190215122822) + network = export_network(nw, run_id=20180823154014) # Send data to OEDB From 9dfd96c972dc674d3c4653d1ce7c2889a45c067d Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 18 Feb 2019 13:19:03 +0100 Subject: [PATCH 150/215] testing another run id --- ding0/io/db_export.py | 5 +++-- ding0/io/export.py | 10 +--------- 2 files changed, 4 insertions(+), 11 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 4532bba1..5ccabcc0 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -763,10 +763,11 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): if db_versioning.empty: metadata_json = json.loads(network.metadata_json) - print(str(metadata_json['run_id'])) + metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], 'description': str(metadata_json)}, index=[0]) + print(str(metadata_json['run_id'])) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) # 1 @@ -843,7 +844,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): if __name__ == "__main__": # #########SQLAlchemy and DB table################ - oedb_engine = connection(section='vpn_oedb') + oedb_engine = connection(section='oedb') session = sessionmaker(bind=oedb_engine)() # Testing Database diff --git a/ding0/io/export.py b/ding0/io/export.py index 7b807311..a356a71a 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -75,16 +75,8 @@ def export_network(nw, mode='', run_id=None): print("1 " + str(run_id)) if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") - - else: - print("test") - # nw.metadata['run_id'] = run_id - print("2 " + str(run_id)) - + metadata_json = json.dumps(nw.metadata) - metadata_json['run_id'] = run_id - metadata_json1 = json.loads(metadata_json) - print("3" + str(metadata_json1['run_id'])) ############################## ############################# # go through the grid collecting info From 35f580a92fad809fe4de6a631e6f9c3a71f1d7df Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 18 Feb 2019 13:37:12 +0100 Subject: [PATCH 151/215] testing another run id --- ding0/io/ding0_pkl2db.py | 3 ++- ding0/io/export.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 7b47c604..7c6850a2 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -44,7 +44,8 @@ # Extract data from network and put it to DataFrames for csv and for oedb - network = export_network(nw, run_id=20180823154014) + # network = export_network(nw, run_id=20180823154014) + network = export_network(nw) # Send data to OEDB diff --git a/ding0/io/export.py b/ding0/io/export.py index a356a71a..b1eaa7fc 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -75,7 +75,7 @@ def export_network(nw, mode='', run_id=None): print("1 " + str(run_id)) if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") - + metadata_json = json.dumps(nw.metadata) ############################## ############################# From ea1a653714897409c67d30dc529130095aaf4546 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 18 Feb 2019 13:43:32 +0100 Subject: [PATCH 152/215] testing another run id, insert else option for setting the run id in nw.metadata --- ding0/io/ding0_pkl2db.py | 6 ++++-- ding0/io/export.py | 2 ++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 7c6850a2..813ad657 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -44,8 +44,10 @@ # Extract data from network and put it to DataFrames for csv and for oedb - # network = export_network(nw, run_id=20180823154014) - network = export_network(nw) + network = export_network(nw, run_id=20180823154014) + + # This will fail after the first grid no becuase every grid gets another run_d + # network = export_network(nw) # Send data to OEDB diff --git a/ding0/io/export.py b/ding0/io/export.py index b1eaa7fc..12cdd4ae 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -75,6 +75,8 @@ def export_network(nw, mode='', run_id=None): print("1 " + str(run_id)) if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") + else: + nw.metadata['run_id'] = run_id metadata_json = json.dumps(nw.metadata) ############################## From 12729e98bf061c688eae48d3bb6cb1c4fd65eef4 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 18 Feb 2019 13:47:38 +0100 Subject: [PATCH 153/215] insert print(run id) for testing --- ding0/io/db_export.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 5ccabcc0..eac53645 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -763,11 +763,11 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): if db_versioning.empty: metadata_json = json.loads(network.metadata_json) - + print("db_e 1:" + str(metadata_json['run_id'])) metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], 'description': str(metadata_json)}, index=[0]) - print(str(metadata_json['run_id'])) + print("db_e 2:" + str(metadata_json['run_id'])) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) # 1 From 39801b5a2ae63d078ec6fb891bdb3a17f8c45c1b Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 18 Feb 2019 14:02:49 +0100 Subject: [PATCH 154/215] overwrite run id --- ding0/io/db_export.py | 7 ++----- ding0/io/export.py | 2 ++ 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index eac53645..b06c894f 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -382,7 +382,7 @@ def create_wkt_element(geom): """ if geom is not None: # return WKTElement(geom, srid=int(SRID), extended=True) - # ToDo: hardset srid find better approach to make it work for dingo run and dingo from pickel + # ToDo: hardset srid for pickle export, find better approach to make it work for dingo run and dingo from pickel return WKTElement(geom, srid=4326, extended=True) else: return None @@ -865,9 +865,6 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): # SRID = str(int(nw.config['geo']['srid'])) SRID = int(nw.config['geo']['srid']) - # provide run id for pickle upload - run_id = 20190215122822 - # provide run_id, note that the run_id is unique to the DB table # if not set it will be set # RUN_ID = datetime.now().strftime("%Y%m%d%H%M%S") @@ -882,7 +879,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): # mv_grid_districts_no=mv_grid_districts) # # # return values from export_network() as tupels - # network = export_network(nw, run_id=run_id) + # network = export_network(nw) # any list of NetworkDing0 also provides run_id diff --git a/ding0/io/export.py b/ding0/io/export.py index 12cdd4ae..6ede907c 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -73,10 +73,12 @@ def export_network(nw, mode='', run_id=None): ############################## # from datetime import datetime print("1 " + str(run_id)) + nw.metadata['run_id'] = run_id if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") else: nw.metadata['run_id'] = run_id + print("else" + str(nw.metadata['run_id'])) metadata_json = json.dumps(nw.metadata) ############################## From 07f25bb95ee8ff1332783cd90c169d18a33d1bee Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 18 Feb 2019 14:31:12 +0100 Subject: [PATCH 155/215] overwrite run id in metadata_json --- ding0/io/export.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/ding0/io/export.py b/ding0/io/export.py index 6ede907c..2d5e6c23 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -76,11 +76,17 @@ def export_network(nw, mode='', run_id=None): nw.metadata['run_id'] = run_id if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") + metadata_json = json.dumps(nw.metadata) else: - nw.metadata['run_id'] = run_id - print("else" + str(nw.metadata['run_id'])) + # ToDo: This seems to have no effect so check why + # nw.metadata['run_id'] = run_id + + metadata_json = json.loads(nw.metadata) + metadata_json['run_id'] = run_id + metadata_json = json.dumps(metadata_json) + # print("else" + str(nw.metadata['run_id'])) + print("else " + metadata_json['run_id']) - metadata_json = json.dumps(nw.metadata) ############################## ############################# # go through the grid collecting info From ee86be019145b3d308c1f34ff4bdd01cf29ce3c3 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 18 Feb 2019 14:34:05 +0100 Subject: [PATCH 156/215] json needs to be string --- ding0/io/export.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ding0/io/export.py b/ding0/io/export.py index 2d5e6c23..00e38156 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -80,8 +80,8 @@ def export_network(nw, mode='', run_id=None): else: # ToDo: This seems to have no effect so check why # nw.metadata['run_id'] = run_id - - metadata_json = json.loads(nw.metadata) + metadata_json = json.dumps(nw.metadata) + metadata_json = json.loads(metadata_json) metadata_json['run_id'] = run_id metadata_json = json.dumps(metadata_json) # print("else" + str(nw.metadata['run_id'])) From 669af1773db4acc878d8071acce0210476208eff Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 18 Feb 2019 14:37:11 +0100 Subject: [PATCH 157/215] minor change --- ding0/io/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/export.py b/ding0/io/export.py index 00e38156..2f90451c 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -85,7 +85,7 @@ def export_network(nw, mode='', run_id=None): metadata_json['run_id'] = run_id metadata_json = json.dumps(metadata_json) # print("else" + str(nw.metadata['run_id'])) - print("else " + metadata_json['run_id']) + # print("else " + metadata_json['run_id']) ############################## ############################# From 384b58ac61476e2e4f5a6f4699028c5341396b48 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 18 Feb 2019 14:56:27 +0100 Subject: [PATCH 158/215] handel provided run_id --- ding0/io/db_export.py | 4 +--- ding0/io/ding0_pkl2db.py | 11 +---------- ding0/io/export.py | 5 +---- 3 files changed, 3 insertions(+), 17 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index b06c894f..ee5b2036 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -763,11 +763,10 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): if db_versioning.empty: metadata_json = json.loads(network.metadata_json) - print("db_e 1:" + str(metadata_json['run_id'])) + print("run_id: " + str(metadata_json['run_id'])) metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], 'description': str(metadata_json)}, index=[0]) - print("db_e 2:" + str(metadata_json['run_id'])) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) # 1 @@ -803,7 +802,6 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): print('Griddistrict_' + str(grid_no) + '_has been exported to the database') else: - # network.metadata['run_id'] # 1 export_df_to_db(engine, schema, network.lines, "line", srid) # 2 diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 813ad657..70dd7488 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -34,7 +34,7 @@ # choose MV Grid Districts to import -grids = list(range(3, 6)) +grids = list(range(1, 3609)) # generate all the grids and push them to oedb for grid_no in grids: @@ -46,20 +46,11 @@ # Extract data from network and put it to DataFrames for csv and for oedb network = export_network(nw, run_id=20180823154014) - # This will fail after the first grid no becuase every grid gets another run_d - # network = export_network(nw) - - # Send data to OEDB - # SRID = str(int(nw.config['geo']['srid'])) srid = int(nw.config['geo']['srid']) # provide run id for pickle upload - - # ToDo:might not be necessary to use this metadata - # metadata_json = json.loads(network.metadata_json) - export_all_pkl_to_db(oedb_engine, SCHEMA, network, srid, grid_no) # db_tables_change_owner(oedb_engine, SCHEMA) \ No newline at end of file diff --git a/ding0/io/export.py b/ding0/io/export.py index 2f90451c..67001b36 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -72,20 +72,17 @@ def export_network(nw, mode='', run_id=None): lv_info = False ############################## # from datetime import datetime - print("1 " + str(run_id)) - nw.metadata['run_id'] = run_id if not run_id: run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") metadata_json = json.dumps(nw.metadata) else: # ToDo: This seems to have no effect so check why # nw.metadata['run_id'] = run_id + metadata_json = json.dumps(nw.metadata) metadata_json = json.loads(metadata_json) metadata_json['run_id'] = run_id metadata_json = json.dumps(metadata_json) - # print("else" + str(nw.metadata['run_id'])) - # print("else " + metadata_json['run_id']) ############################## ############################# From b1825d52b5e98a50debc367849d248717328860c Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 18 Feb 2019 15:18:12 +0100 Subject: [PATCH 159/215] show run id for every grid district wich currently gets exported --- ding0/io/db_export.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index ee5b2036..0722319a 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -760,9 +760,9 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): db_versioning = pd.read_sql_table(DING0_TABLES['versioning'], engine, schema, columns=['run_id', 'description']) - if db_versioning.empty: + metadata_json = json.loads(network.metadata_json) - metadata_json = json.loads(network.metadata_json) + if db_versioning.empty: print("run_id: " + str(metadata_json['run_id'])) metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], @@ -802,6 +802,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): print('Griddistrict_' + str(grid_no) + '_has been exported to the database') else: + print("run_id: " + str(metadata_json['run_id'])) # 1 export_df_to_db(engine, schema, network.lines, "line", srid) # 2 From 31c0b5f4d2ebf343ca36dcf5fba2dac55bffa528 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Tue, 19 Feb 2019 12:03:32 +0100 Subject: [PATCH 160/215] ding0_grid_district 42 (pickle file) does not exists, set the starting point to grid no 43 --- ding0/io/ding0_pkl2db.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 70dd7488..ddb0cb5c 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -34,15 +34,13 @@ # choose MV Grid Districts to import -grids = list(range(1, 3609)) +grids = list(range(43, 3609)) # generate all the grids and push them to oedb for grid_no in grids: - # ToDo: run_id changes for every file nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) - # Extract data from network and put it to DataFrames for csv and for oedb network = export_network(nw, run_id=20180823154014) From 615f773fdd6fa8b18e888eac4df34436e3ba78ca Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 19 Feb 2019 15:04:55 +0100 Subject: [PATCH 161/215] ding0_grid_district 62 (pickle) not present, change grid districht starting point to 63 --- ding0/io/ding0_pkl2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index ddb0cb5c..33b2983b 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -34,7 +34,7 @@ # choose MV Grid Districts to import -grids = list(range(43, 3609)) +grids = list(range(63, 3609)) # generate all the grids and push them to oedb for grid_no in grids: From 7dd1304e96cf2a02504f96d4b2277495ec3a3951 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 19 Feb 2019 19:04:44 +0100 Subject: [PATCH 162/215] ding0_grid_district 69 (pickle-file) not present, change grid districht starting point to 70 --- ding0/io/ding0_pkl2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 33b2983b..9d5f8d5c 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -34,7 +34,7 @@ # choose MV Grid Districts to import -grids = list(range(63, 3609)) +grids = list(range(70, 3609)) # generate all the grids and push them to oedb for grid_no in grids: From f330efa487d7530cc9d53d9ce4342141fffb5d1b Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 20 Feb 2019 08:50:26 +0100 Subject: [PATCH 163/215] grid districht (pickle-file) 154 not present, test logger for missing grid districhts --- ding0/io/confirm_existing_gd_in_pkl_dir.py | 35 ++++++++++++++++++++++ ding0/io/ding0_pkl2db.py | 2 +- 2 files changed, 36 insertions(+), 1 deletion(-) create mode 100644 ding0/io/confirm_existing_gd_in_pkl_dir.py diff --git a/ding0/io/confirm_existing_gd_in_pkl_dir.py b/ding0/io/confirm_existing_gd_in_pkl_dir.py new file mode 100644 index 00000000..5d342033 --- /dev/null +++ b/ding0/io/confirm_existing_gd_in_pkl_dir.py @@ -0,0 +1,35 @@ +import os +import logging +from ding0.tools.results import load_nd_from_pickle + +# LOG_FILE_PATH = 'pickle_log' +LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') + +# does the file exist? +if not os.path.isfile(LOG_FILE_PATH): + print('ding0 log-file {file} not found. ' + 'This might be the first run of the tool. ' + .format(file=LOG_FILE_PATH)) + base_path = os.path.split(LOG_FILE_PATH)[0] + if not os.path.isdir(base_path): + os.mkdir(base_path) + print('The directory {path} was created.'.format(path=base_path)) + + with open(LOG_FILE_PATH, 'a') as log: + log.write("List of missing grid districts:") + pass + + +logging.basicConfig(filename=LOG_FILE_PATH, level=logging.DEBUG) + +# pickle file locations path to RLI_Daten_Flex01 mount +pkl_filepath = "/home/local/RL-INSTITUT/jonas.huber/rli/Daten_flexibel_01/Ding0/20180823154014" + +# choose MV Grid Districts to import +grids = list(range(61, 70)) + +for grid_no in grids: + try: + nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) + except: + logging.DEBUG('ding0_grids__{}.pkl not present to the current directory'.format(grid_no)) \ No newline at end of file diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 9d5f8d5c..8f10c6e8 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -34,7 +34,7 @@ # choose MV Grid Districts to import -grids = list(range(70, 3609)) +grids = list(range(154, 3609)) # generate all the grids and push them to oedb for grid_no in grids: From 4a0805b332a243e653e09833eb4dbc88a7f7f93f Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 20 Feb 2019 08:56:15 +0100 Subject: [PATCH 164/215] parse grid_no to string --- ding0/io/confirm_existing_gd_in_pkl_dir.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/confirm_existing_gd_in_pkl_dir.py b/ding0/io/confirm_existing_gd_in_pkl_dir.py index 5d342033..24f96fcc 100644 --- a/ding0/io/confirm_existing_gd_in_pkl_dir.py +++ b/ding0/io/confirm_existing_gd_in_pkl_dir.py @@ -32,4 +32,4 @@ try: nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) except: - logging.DEBUG('ding0_grids__{}.pkl not present to the current directory'.format(grid_no)) \ No newline at end of file + logging.DEBUG('ding0_grids__{}.pkl not present to the current directory'.format(str(grid_no))) \ No newline at end of file From 7db04fe8011defc2b4574f9d071086096bda7d42 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 20 Feb 2019 14:48:06 +0100 Subject: [PATCH 165/215] grid districht (pickle-file) 197 not present, test logger for missing grid districhts --- ding0/io/confirm_existing_gd_in_pkl_dir.py | 2 +- ding0/io/ding0_pkl2db.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ding0/io/confirm_existing_gd_in_pkl_dir.py b/ding0/io/confirm_existing_gd_in_pkl_dir.py index 24f96fcc..96af5fca 100644 --- a/ding0/io/confirm_existing_gd_in_pkl_dir.py +++ b/ding0/io/confirm_existing_gd_in_pkl_dir.py @@ -32,4 +32,4 @@ try: nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) except: - logging.DEBUG('ding0_grids__{}.pkl not present to the current directory'.format(str(grid_no))) \ No newline at end of file + logging.debug('ding0_grids__{}.pkl not present to the current directory'.format(grid_no)) \ No newline at end of file diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 8f10c6e8..264bbcd2 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -34,7 +34,7 @@ # choose MV Grid Districts to import -grids = list(range(154, 3609)) +grids = list(range(198, 3609)) # generate all the grids and push them to oedb for grid_no in grids: From 7f8cce50ec86d87f56e7e680a7ab4eeb840d30d9 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 20 Feb 2019 14:58:42 +0100 Subject: [PATCH 166/215] test logger with standart file handler for missing grid districhts --- ding0/io/confirm_existing_gd_in_pkl_dir.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/ding0/io/confirm_existing_gd_in_pkl_dir.py b/ding0/io/confirm_existing_gd_in_pkl_dir.py index 96af5fca..5ceb2d36 100644 --- a/ding0/io/confirm_existing_gd_in_pkl_dir.py +++ b/ding0/io/confirm_existing_gd_in_pkl_dir.py @@ -20,7 +20,7 @@ pass -logging.basicConfig(filename=LOG_FILE_PATH, level=logging.DEBUG) +# logging.basicConfig(filename=LOG_FILE_PATH, level=logging.DEBUG) # pickle file locations path to RLI_Daten_Flex01 mount pkl_filepath = "/home/local/RL-INSTITUT/jonas.huber/rli/Daten_flexibel_01/Ding0/20180823154014" @@ -32,4 +32,7 @@ try: nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) except: - logging.debug('ding0_grids__{}.pkl not present to the current directory'.format(grid_no)) \ No newline at end of file + # logging.debug('ding0_grids__{}.pkl not present to the current directory'.format(grid_no)) + with open(LOG_FILE_PATH, 'a') as log: + log.write('ding0_grids__{}.pkl not present to the current directory'.format(grid_no)) + pass \ No newline at end of file From 87f591c78e8aff96c2a19b95369472fd0a59d327 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 20 Feb 2019 15:11:31 +0100 Subject: [PATCH 167/215] handel non existing pickle files, added logger(to file) for non existing pickle files --- ding0/io/confirm_existing_gd_in_pkl_dir.py | 2 +- ding0/io/ding0_pkl2db.py | 30 +++++++++++++++++++--- 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/ding0/io/confirm_existing_gd_in_pkl_dir.py b/ding0/io/confirm_existing_gd_in_pkl_dir.py index 5ceb2d36..656628ab 100644 --- a/ding0/io/confirm_existing_gd_in_pkl_dir.py +++ b/ding0/io/confirm_existing_gd_in_pkl_dir.py @@ -34,5 +34,5 @@ except: # logging.debug('ding0_grids__{}.pkl not present to the current directory'.format(grid_no)) with open(LOG_FILE_PATH, 'a') as log: - log.write('ding0_grids__{}.pkl not present to the current directory'.format(grid_no)) + log.write('ding0_grids__{}.pkl not present to the current directory\n'.format(grid_no)) pass \ No newline at end of file diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 264bbcd2..e76c17a8 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -15,9 +15,25 @@ export_all_pkl_to_db, db_tables_change_owner, drop_ding0_db_tables from sqlalchemy.orm import sessionmaker - -# define logger -logger = setup_logger() +# ToDo: Create logger as function +################################## +# LOG_FILE_PATH = 'pickle_log' +LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') + +# does the file exist? +if not os.path.isfile(LOG_FILE_PATH): + print('ding0 log-file {file} not found. ' + 'This might be the first run of the tool. ' + .format(file=LOG_FILE_PATH)) + base_path = os.path.split(LOG_FILE_PATH)[0] + if not os.path.isdir(base_path): + os.mkdir(base_path) + print('The directory {path} was created.'.format(path=base_path)) + + with open(LOG_FILE_PATH, 'a') as log: + log.write("List of missing grid districts:") + pass +###################################################### # database connection/ session oedb_engine = db.connection(section='oedb') @@ -39,7 +55,13 @@ # generate all the grids and push them to oedb for grid_no in grids: - nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) + try: + nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) + except: + print('Log entry in: {}'.format(LOG_FILE_PATH)) + with open(LOG_FILE_PATH, 'a') as log: + log.write('ding0_grids__{}.pkl not present to the current directory\n'.format(grid_no)) + pass # Extract data from network and put it to DataFrames for csv and for oedb network = export_network(nw, run_id=20180823154014) From 675d6d01eb4f63c19a2a1b8aa56bdd53513af390 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 20 Feb 2019 15:20:38 +0100 Subject: [PATCH 168/215] continue to netxt grid dirstricht if log entry done --- ding0/io/confirm_existing_gd_in_pkl_dir.py | 3 ++- ding0/io/ding0_pkl2db.py | 6 ++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/ding0/io/confirm_existing_gd_in_pkl_dir.py b/ding0/io/confirm_existing_gd_in_pkl_dir.py index 656628ab..62fd5085 100644 --- a/ding0/io/confirm_existing_gd_in_pkl_dir.py +++ b/ding0/io/confirm_existing_gd_in_pkl_dir.py @@ -35,4 +35,5 @@ # logging.debug('ding0_grids__{}.pkl not present to the current directory'.format(grid_no)) with open(LOG_FILE_PATH, 'a') as log: log.write('ding0_grids__{}.pkl not present to the current directory\n'.format(grid_no)) - pass \ No newline at end of file + pass + continue \ No newline at end of file diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index e76c17a8..f1743ce8 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -50,7 +50,7 @@ # choose MV Grid Districts to import -grids = list(range(198, 3609)) +grids = list(range(197, 3609)) # generate all the grids and push them to oedb for grid_no in grids: @@ -58,11 +58,13 @@ try: nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) except: - print('Log entry in: {}'.format(LOG_FILE_PATH)) + print('Create log entry in: {}'.format(LOG_FILE_PATH)) with open(LOG_FILE_PATH, 'a') as log: log.write('ding0_grids__{}.pkl not present to the current directory\n'.format(grid_no)) pass + continue + # Extract data from network and put it to DataFrames for csv and for oedb network = export_network(nw, run_id=20180823154014) From d2736473b3bb0213e1abcf5daefa1c91775fdab0 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 20 Feb 2019 15:24:07 +0100 Subject: [PATCH 169/215] new line in log file --- ding0/io/confirm_existing_gd_in_pkl_dir.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/confirm_existing_gd_in_pkl_dir.py b/ding0/io/confirm_existing_gd_in_pkl_dir.py index 62fd5085..a0a094bd 100644 --- a/ding0/io/confirm_existing_gd_in_pkl_dir.py +++ b/ding0/io/confirm_existing_gd_in_pkl_dir.py @@ -16,7 +16,7 @@ print('The directory {path} was created.'.format(path=base_path)) with open(LOG_FILE_PATH, 'a') as log: - log.write("List of missing grid districts:") + log.write("List of missing grid districts:\n") pass From 489a76fc003660007d3c2e062b0eece2d99cc3d0 Mon Sep 17 00:00:00 2001 From: Jonas Huber Date: Mon, 4 Mar 2019 13:51:01 +0000 Subject: [PATCH 170/215] set new griddistrict range --- ding0/io/ding0_pkl2db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index f1743ce8..5798bef5 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -50,7 +50,7 @@ # choose MV Grid Districts to import -grids = list(range(197, 3609)) +grids = list(range(1407, 3609)) # generate all the grids and push them to oedb for grid_no in grids: @@ -75,4 +75,4 @@ export_all_pkl_to_db(oedb_engine, SCHEMA, network, srid, grid_no) -# db_tables_change_owner(oedb_engine, SCHEMA) \ No newline at end of file +# db_tables_change_owner(oedb_engine, SCHEMA) From 3ffa4c3bce5e99a634de1ede79b3c4fd9fb9c7f8 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 4 Mar 2019 15:17:06 +0100 Subject: [PATCH 171/215] change tablenames for tetsing --- ding0/io/db_export.py | 68 ++++++++++++++++++++-------------------- ding0/io/ding0_pkl2db.py | 4 +-- 2 files changed, 36 insertions(+), 36 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 0722319a..1dee766f 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -43,41 +43,41 @@ # Metadata folder Path METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') -# set your Table names -DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', - 'line': 'ego_grid_ding0_line', - 'lv_branchtee': 'ego_grid_ding0_lv_branchtee', - 'lv_generator': 'ego_grid_ding0_lv_generator', - 'lv_load': 'ego_grid_ding0_lv_load', - 'lv_grid': 'ego_grid_ding0_lv_grid', - 'lv_station': 'ego_grid_ding0_lv_station', - 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer', - 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping', - 'mv_branchtee': 'ego_grid_ding0_mv_branchtee', - 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker', - 'mv_generator': 'ego_grid_ding0_mv_generator', - 'mv_load': 'ego_grid_ding0_mv_load', - 'mv_grid': 'ego_grid_ding0_mv_grid', - 'mv_station': 'ego_grid_ding0_mv_station', - 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} - # # set your Table names -# DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning_test', -# 'line': 'ego_grid_ding0_line_test', -# 'lv_branchtee': 'ego_grid_ding0_lv_branchtee_test', -# 'lv_generator': 'ego_grid_ding0_lv_generator_test', -# 'lv_load': 'ego_grid_ding0_lv_load_test', -# 'lv_grid': 'ego_grid_ding0_lv_grid_test', -# 'lv_station': 'ego_grid_ding0_lv_station_test', -# 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer_test', -# 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping_test', -# 'mv_branchtee': 'ego_grid_ding0_mv_branchtee_test', -# 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker_test', -# 'mv_generator': 'ego_grid_ding0_mv_generator_test', -# 'mv_load': 'ego_grid_ding0_mv_load_test', -# 'mv_grid': 'ego_grid_ding0_mv_grid_test', -# 'mv_station': 'ego_grid_ding0_mv_station_test', -# 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer_test'} +# DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', +# 'line': 'ego_grid_ding0_line', +# 'lv_branchtee': 'ego_grid_ding0_lv_branchtee', +# 'lv_generator': 'ego_grid_ding0_lv_generator', +# 'lv_load': 'ego_grid_ding0_lv_load', +# 'lv_grid': 'ego_grid_ding0_lv_grid', +# 'lv_station': 'ego_grid_ding0_lv_station', +# 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer', +# 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping', +# 'mv_branchtee': 'ego_grid_ding0_mv_branchtee', +# 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker', +# 'mv_generator': 'ego_grid_ding0_mv_generator', +# 'mv_load': 'ego_grid_ding0_mv_load', +# 'mv_grid': 'ego_grid_ding0_mv_grid', +# 'mv_station': 'ego_grid_ding0_mv_station', +# 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} + +# set your Table names +DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning_test', + 'line': 'ego_grid_ding0_line_test', + 'lv_branchtee': 'ego_grid_ding0_lv_branchtee_test', + 'lv_generator': 'ego_grid_ding0_lv_generator_test', + 'lv_load': 'ego_grid_ding0_lv_load_test', + 'lv_grid': 'ego_grid_ding0_lv_grid_test', + 'lv_station': 'ego_grid_ding0_lv_station_test', + 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer_test', + 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping_test', + 'mv_branchtee': 'ego_grid_ding0_mv_branchtee_test', + 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker_test', + 'mv_generator': 'ego_grid_ding0_mv_generator_test', + 'mv_load': 'ego_grid_ding0_mv_load_test', + 'mv_grid': 'ego_grid_ding0_mv_grid_test', + 'mv_station': 'ego_grid_ding0_mv_station_test', + 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer_test'} diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index f1743ce8..926a6cee 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -50,7 +50,7 @@ # choose MV Grid Districts to import -grids = list(range(197, 3609)) +grids = list(range(1407, 3609)) # generate all the grids and push them to oedb for grid_no in grids: @@ -58,7 +58,7 @@ try: nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) except: - print('Create log entry in: {}'.format(LOG_FILE_PATH)) + print('Something went wrong, created log entry in: {}'.format(LOG_FILE_PATH)) with open(LOG_FILE_PATH, 'a') as log: log.write('ding0_grids__{}.pkl not present to the current directory\n'.format(grid_no)) pass From 241d2417e9c87353ddbfc67eec7fdea667aa3d8e Mon Sep 17 00:00:00 2001 From: Jonas H Date: Wed, 6 Mar 2019 14:30:31 +0100 Subject: [PATCH 172/215] changed lv_edges branch dict access in order to get the right value, changed column to polygon and set the griddiststict for testing the export --- ding0/io/db_export.py | 83 +++++++++++++++++++++++----------------- ding0/io/ding0_pkl2db.py | 2 +- ding0/io/export.py | 3 +- 3 files changed, 50 insertions(+), 38 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 1dee766f..205bc81c 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -43,41 +43,41 @@ # Metadata folder Path METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') -# # set your Table names -# DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', -# 'line': 'ego_grid_ding0_line', -# 'lv_branchtee': 'ego_grid_ding0_lv_branchtee', -# 'lv_generator': 'ego_grid_ding0_lv_generator', -# 'lv_load': 'ego_grid_ding0_lv_load', -# 'lv_grid': 'ego_grid_ding0_lv_grid', -# 'lv_station': 'ego_grid_ding0_lv_station', -# 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer', -# 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping', -# 'mv_branchtee': 'ego_grid_ding0_mv_branchtee', -# 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker', -# 'mv_generator': 'ego_grid_ding0_mv_generator', -# 'mv_load': 'ego_grid_ding0_mv_load', -# 'mv_grid': 'ego_grid_ding0_mv_grid', -# 'mv_station': 'ego_grid_ding0_mv_station', -# 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} - # set your Table names -DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning_test', - 'line': 'ego_grid_ding0_line_test', - 'lv_branchtee': 'ego_grid_ding0_lv_branchtee_test', - 'lv_generator': 'ego_grid_ding0_lv_generator_test', - 'lv_load': 'ego_grid_ding0_lv_load_test', - 'lv_grid': 'ego_grid_ding0_lv_grid_test', - 'lv_station': 'ego_grid_ding0_lv_station_test', - 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer_test', - 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping_test', - 'mv_branchtee': 'ego_grid_ding0_mv_branchtee_test', - 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker_test', - 'mv_generator': 'ego_grid_ding0_mv_generator_test', - 'mv_load': 'ego_grid_ding0_mv_load_test', - 'mv_grid': 'ego_grid_ding0_mv_grid_test', - 'mv_station': 'ego_grid_ding0_mv_station_test', - 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer_test'} +DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', + 'line': 'ego_grid_ding0_line', + 'lv_branchtee': 'ego_grid_ding0_lv_branchtee', + 'lv_generator': 'ego_grid_ding0_lv_generator', + 'lv_load': 'ego_grid_ding0_lv_load', + 'lv_grid': 'ego_grid_ding0_lv_grid', + 'lv_station': 'ego_grid_ding0_lv_station', + 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer', + 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping', + 'mv_branchtee': 'ego_grid_ding0_mv_branchtee', + 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker', + 'mv_generator': 'ego_grid_ding0_mv_generator', + 'mv_load': 'ego_grid_ding0_mv_load', + 'mv_grid': 'ego_grid_ding0_mv_grid', + 'mv_station': 'ego_grid_ding0_mv_station', + 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} + +# # set your Table names +# DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning_test', +# 'line': 'ego_grid_ding0_line_test', +# 'lv_branchtee': 'ego_grid_ding0_lv_branchtee_test', +# 'lv_generator': 'ego_grid_ding0_lv_generator_test', +# 'lv_load': 'ego_grid_ding0_lv_load_test', +# 'lv_grid': 'ego_grid_ding0_lv_grid_test', +# 'lv_station': 'ego_grid_ding0_lv_station_test', +# 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer_test', +# 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping_test', +# 'mv_branchtee': 'ego_grid_ding0_mv_branchtee_test', +# 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker_test', +# 'mv_generator': 'ego_grid_ding0_mv_generator_test', +# 'mv_load': 'ego_grid_ding0_mv_load_test', +# 'mv_grid': 'ego_grid_ding0_mv_grid_test', +# 'mv_station': 'ego_grid_ding0_mv_station_test', +# 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer_test'} @@ -225,7 +225,8 @@ def create_ding0_sql_tables(engine, ding0_schema): Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), Column('id_db', BigInteger), Column('name', String(100)), - Column('geom', Geometry('MULTIPOLYGON', 4326)), + # Column('geom', Geometry('MULTIPOLYGON', 4326)), + Column('geom', Geometry('POLYGON', 4326)), Column('population', BigInteger), Column('voltage_nom', Float(10)), schema=ding0_schema, @@ -435,6 +436,11 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None, SRID=None) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('POINT', srid=int(SRID))}) + elif geom_type == 'POLYGON': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('POLYGON', srid=int(SRID))}) + elif geom_type == 'MULTIPOLYGON': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, @@ -466,6 +472,11 @@ def df_sql_write(engine, schema, db_table, dataframe, geom_type=None, SRID=None) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('POINT', srid=int(SRID))}) + elif geom_type == 'POLYGON': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('POLYGON', srid=int(SRID))}) + elif geom_type == 'MULTIPOLYGON': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, @@ -518,7 +529,7 @@ def export_df_to_db(engine, schema, df, tabletype, srid=None): df_sql_write(engine, schema, DING0_TABLES['lv_load'], df, 'POINT', srid) elif tabletype == 'lv_grid': - df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df, 'MULTIPOLYGON', srid) + df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df, 'POLYGON', srid) elif tabletype == 'lv_station': df_sql_write(engine, schema, DING0_TABLES['lv_station'], df, 'POINT', srid) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 926a6cee..bcd25b15 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -50,7 +50,7 @@ # choose MV Grid Districts to import -grids = list(range(1407, 3609)) +grids = list(range(473, 476)) # generate all the grids and push them to oedb for grid_no in grids: diff --git a/ding0/io/export.py b/ding0/io/export.py index 67001b36..1094321a 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -28,6 +28,7 @@ from shapely.geometry import Point, MultiPoint, MultiLineString, LineString, MultiPolygon, shape, mapping +#ToDo: python 3.5??? yes was new in version 2.6 Network = namedtuple( 'Network', [ @@ -558,7 +559,7 @@ def aggregate_loads(la_center, aggr): lines_dict[lines_idx] = { 'id': branch['branch'].id_db, 'edge_name': '_'.join( - [branch.__class__.__name__, + [branch['branch'].__class__.__name__, str(branch['branch'].id_db)]), 'grid_name': lv_grid_name, 'type_name': branch[ From 45e0a3c937572fa927380b1b7535f143bffffc48 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Wed, 6 Mar 2019 14:36:55 +0100 Subject: [PATCH 173/215] set griddist to test range --- ding0/io/ding0_pkl2db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 926a6cee..bcd25b15 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -50,7 +50,7 @@ # choose MV Grid Districts to import -grids = list(range(1407, 3609)) +grids = list(range(473, 476)) # generate all the grids and push them to oedb for grid_no in grids: From 14d316ff82b02a7e05cd2fcf10a654bd7881336c Mon Sep 17 00:00:00 2001 From: Jonas H Date: Wed, 6 Mar 2019 19:44:43 +0100 Subject: [PATCH 174/215] add all griddist., changed geometry Type of lv_grid tabele, now handels lv_grid geometry as GEOMETRY --- ding0/io/db_export.py | 36 ++++++++++++++++++------------------ ding0/io/ding0_pkl2db.py | 2 +- ding0/io/export.py | 2 ++ 3 files changed, 21 insertions(+), 19 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 205bc81c..51de2865 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -226,7 +226,7 @@ def create_ding0_sql_tables(engine, ding0_schema): Column('id_db', BigInteger), Column('name', String(100)), # Column('geom', Geometry('MULTIPOLYGON', 4326)), - Column('geom', Geometry('POLYGON', 4326)), + Column('geom', Geometry('GEOMETRY', 4326)), Column('population', BigInteger), Column('voltage_nom', Float(10)), schema=ding0_schema, @@ -389,7 +389,7 @@ def create_wkt_element(geom): return None -def df_sql_write(engine, schema, db_table, dataframe, geom_type=None, SRID=None): +def df_sql_write(engine, schema, db_table, dataframe, SRID=None, geom_type=None): """ Convert data frames such that their column names are made small and the index is renamed 'id_db' so as to @@ -516,50 +516,50 @@ def export_df_to_db(engine, schema, df, tabletype, srid=None): """ print("Exporting table type : {}".format(tabletype)) if tabletype == 'line': - df_sql_write(engine, schema, DING0_TABLES['line'], df, 'LINESTRING', srid) + df_sql_write(engine, schema, DING0_TABLES['line'], df, srid, 'LINESTRING') elif tabletype == 'lv_cd': df = df.drop(['lv_grid_id'], axis=1) - df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df, 'POINT', srid) + df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df, srid, 'POINT') elif tabletype == 'lv_gen': - df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df, 'POINT', srid) + df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df, srid, 'POINT') elif tabletype == 'lv_load': - df_sql_write(engine, schema, DING0_TABLES['lv_load'], df, 'POINT', srid) + df_sql_write(engine, schema, DING0_TABLES['lv_load'], df, srid, 'POINT') elif tabletype == 'lv_grid': - df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df, 'POLYGON', srid) + df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df, srid, 'GEOMETRY') elif tabletype == 'lv_station': - df_sql_write(engine, schema, DING0_TABLES['lv_station'], df, 'POINT', srid) + df_sql_write(engine, schema, DING0_TABLES['lv_station'], df, srid, 'POINT') elif tabletype == 'mvlv_trafo': - df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df, 'POINT', srid) + df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df, srid, 'POINT') elif tabletype == 'mvlv_mapping': df_sql_write(engine, schema, DING0_TABLES['mvlv_mapping'], df, srid) elif tabletype == 'mv_cd': - df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df, 'POINT', srid) + df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df, srid, 'POINT') elif tabletype == 'mv_cb': - df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df, 'POINT', srid) + df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df, srid, 'POINT') elif tabletype == 'mv_gen': - df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df, 'POINT', srid) + df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df, srid, 'POINT') elif tabletype == 'mv_load': - df_sql_write(engine, schema, DING0_TABLES['mv_load'], df, 'GEOMETRY', srid) + df_sql_write(engine, schema, DING0_TABLES['mv_load'], df, srid, 'GEOMETRY') elif tabletype == 'mv_grid': - df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df, 'MULTIPOLYGON', srid) + df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df, srid, 'MULTIPOLYGON') elif tabletype == 'mv_station': - df_sql_write(engine, schema, DING0_TABLES['mv_station'], df, 'POINT', srid) + df_sql_write(engine, schema, DING0_TABLES['mv_station'], df, srid, 'POINT') elif tabletype == 'hvmv_trafo': - df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df, 'POINT', srid) + df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df, srid, 'POINT') # ToDo: function works but throws unexpected error (versioning tbl dosent exists) @@ -745,7 +745,7 @@ def export_all_dataframes_to_db(engine, schema, network=None, srid=None): print("WARNING: There is no " + DING0_TABLES["versioning"] + " table in the schema: " + schema) -def export_all_pkl_to_db(engine, schema, network, srid, grid_no): +def export_all_pkl_to_db(engine, schema, network, srid, grid_no=None): """ This function basically works the same way export_all_dataframes_to_db() does. It is implemented to handel the diffrent ways of executing the functions: @@ -881,7 +881,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no): # choose MV Grid Districts to import # needs to be a list with Integers - mv_grid_districts = list(range(1, 5)) + mv_grid_districts = list(range(473, 475)) # # run DING0 on selected MV Grid District diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index bcd25b15..49ea235a 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -50,7 +50,7 @@ # choose MV Grid Districts to import -grids = list(range(473, 476)) +grids = list(range(1, 3609)) # generate all the grids and push them to oedb for grid_no in grids: diff --git a/ding0/io/export.py b/ding0/io/export.py index 1094321a..9b94e730 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -12,6 +12,7 @@ __url__ = "https://github.com/openego/ding0/blob/master/LICENSE" __author__ = "nesnoj, gplssm" +import os import numpy as np import pandas as pd from collections import namedtuple @@ -28,6 +29,7 @@ from shapely.geometry import Point, MultiPoint, MultiLineString, LineString, MultiPolygon, shape, mapping +os.environ['PROJ_LIB']=r"B:\Anaconda3\Library\share" #ToDo: python 3.5??? yes was new in version 2.6 Network = namedtuple( 'Network', From 61970e1762e7081d0e04ee8d915d82c922a1c49e Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 21 Mar 2019 00:19:49 +0100 Subject: [PATCH 175/215] c.typo --- ding0/io/ego_dp_versioning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/ego_dp_versioning.py b/ding0/io/ego_dp_versioning.py index d2e006fc..5de77b19 100644 --- a/ding0/io/ego_dp_versioning.py +++ b/ding0/io/ego_dp_versioning.py @@ -75,7 +75,7 @@ def migrate_tables_to_destination(from_db, s_schema, to_db, d_schema, runid=None 7. Call the function with parameters like: migrate_tables_to_destination(oedb_engine, SOURCE_SCHEMA, oedb_engine, DESTINATION_SCHEMA, RUN_ID) 8. In function migrate_tables_to_destination() check the function write_scenario_log() - 9. Check if the tables in your source schema exist and named eually to the table dict like in DING0_TABLES{} + 9. Check if the tables in your source schema exist and named equally to the table dict like in DING0_TABLES{} Parameters ---------- from_db: From ffb4b6d3eb9998402df31dffaeac40f9489e00ba Mon Sep 17 00:00:00 2001 From: Jonas Huber Date: Wed, 20 Mar 2019 23:21:46 +0000 Subject: [PATCH 176/215] adjust grid range for export --- ding0/io/ding0_pkl2db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 49ea235a..bf4d40c4 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -50,7 +50,7 @@ # choose MV Grid Districts to import -grids = list(range(1, 3609)) +grids = list(range(3554, 3609)) # generate all the grids and push them to oedb for grid_no in grids: @@ -75,4 +75,4 @@ export_all_pkl_to_db(oedb_engine, SCHEMA, network, srid, grid_no) -# db_tables_change_owner(oedb_engine, SCHEMA) \ No newline at end of file +# db_tables_change_owner(oedb_engine, SCHEMA) From a9e3fffca154ff9d80d76c69abbb21bef2d166d7 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 21 Mar 2019 00:30:51 +0100 Subject: [PATCH 177/215] try exporting grid 1658 2. time --- ding0/io/ding0_pkl2db.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index bf4d40c4..05decc89 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -49,8 +49,9 @@ pkl_filepath = "/home/local/RL-INSTITUT/jonas.huber/rli/Daten_flexibel_01/Ding0/20180823154014" -# choose MV Grid Districts to import -grids = list(range(3554, 3609)) +# choose MV Grid Districts to import use list of integers +# f. e.: grids = list(range(1, 3609)) +grids = [1658] # generate all the grids and push them to oedb for grid_no in grids: From becce7ea8f08534581cff7e501faf33822729276 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 21 Mar 2019 00:36:14 +0100 Subject: [PATCH 178/215] test export grid 1658 3. time --- ding0/io/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/export.py b/ding0/io/export.py index 9b94e730..bf34465f 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -189,7 +189,7 @@ def aggregate_loads(la_center, aggr): sum([_.zensus_sum for _ in mv_district._lv_load_areas - if not np.isnan(_.zensus_sum)]), + if not pd.isnull(_.zensus_sum)]), 'voltage_nom': mv_district.mv_grid.v_level, # in kV 'run_id': run_id } From 17a500ad5a4ec115869c1fabce59efde31eb9158 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 21 Mar 2019 00:46:20 +0100 Subject: [PATCH 179/215] insert new run id to migrate recent exported ding0 data --- ding0/io/ego_dp_versioning.py | 4 ++-- ding0/io/export.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/ding0/io/ego_dp_versioning.py b/ding0/io/ego_dp_versioning.py index 5de77b19..ee0992bd 100644 --- a/ding0/io/ego_dp_versioning.py +++ b/ding0/io/ego_dp_versioning.py @@ -169,14 +169,14 @@ def change_owner(engine, table, role, schema): # source oedb_engine = connection(section='oedb') # # Testing Database -> destination - reiners_engine = connection(section='reiners_db') + # reiners_engine = connection(section='reiners_db') SOURCE_SCHEMA = 'model_draft' DESTINATION_SCHEMA = 'grid' tables = get_table_names(DING0_TABLES) # Enter the current run_id, Inserted in scenario_log - RUN_ID = '20181022185643' + RUN_ID = '20180823154014' # Metadata folder Path METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') diff --git a/ding0/io/export.py b/ding0/io/export.py index bf34465f..b93cecd1 100644 --- a/ding0/io/export.py +++ b/ding0/io/export.py @@ -189,6 +189,7 @@ def aggregate_loads(la_center, aggr): sum([_.zensus_sum for _ in mv_district._lv_load_areas + #ToDo: Check if this returns any value -> changed np.inan to pd.isnull, fixes type error for the export if not pd.isnull(_.zensus_sum)]), 'voltage_nom': mv_district.mv_grid.v_level, # in kV 'run_id': run_id From 5bcf4253ad4157e89c2fe07a1a4596aed1d8ac45 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 3 May 2019 14:55:13 +0200 Subject: [PATCH 180/215] added dingo network as nametupe, tables can be accessed as tupels, tupels are pandas DataFrames, added loop for multipe griddistrict export --- ding0/io/file_export.py | 86 +++++++++++++++++++++++++++++------------ 1 file changed, 61 insertions(+), 25 deletions(-) diff --git a/ding0/io/file_export.py b/ding0/io/file_export.py index 7461d665..a9b90d74 100644 --- a/ding0/io/file_export.py +++ b/ding0/io/file_export.py @@ -16,18 +16,18 @@ import json +from ding0.tools.results import load_nd_from_pickle +from ding0.io.export import export_network +from ding0.io.exporter_log import pickle_export_logger -def export_data_tocsv(path, run_id, metadata_json, - lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, - lv_loads, - mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, - mv_loads, - lines, mvlv_mapping, csv_sep=','): - # make directory with run_id if it doesn't exist - os.makedirs(path, exist_ok=True) +def create_destination_dir(): + pass + + +def export_data_tocsv(path, network, csv_sep=','): # put a text file with the metadata - metadata = json.loads(metadata_json) + metadata = json.loads(network.metadata_json) with open(os.path.join(path, 'metadata.json'), 'w') as metafile: json.dump(metadata, metafile) @@ -35,19 +35,55 @@ def export_data_tocsv(path, run_id, metadata_json, def export_network_tocsv(path, table, tablename): return table.to_csv(os.path.join(path, tablename + '.csv'), sep=csv_sep) - export_network_tocsv(path, lv_grid, 'lv_grid') - export_network_tocsv(path, lv_gen, 'lv_generator') - export_network_tocsv(path, lv_cd, 'lv_branchtee') - export_network_tocsv(path, lv_stations, 'lv_station') - export_network_tocsv(path, mvlv_trafos, 'mvlv_transformer') - export_network_tocsv(path, lv_loads, 'lv_load') - export_network_tocsv(path, mv_grid, 'mv_grid') - export_network_tocsv(path, mv_gen, 'mv_generator') - export_network_tocsv(path, mv_cd, 'mv_branchtee') - export_network_tocsv(path, mv_stations, 'mv_station') - export_network_tocsv(path, hvmv_trafos, 'hvmv_transformer') - export_network_tocsv(path, mv_cb, 'mv_circuitbreaker') - export_network_tocsv(path, mv_loads, 'mv_load') - export_network_tocsv(path, lines, 'line') - export_network_tocsv(path, mvlv_mapping, 'mvlv_mapping') - # export_network_tocsv(path, areacenter, 'areacenter') \ No newline at end of file + export_network_tocsv(path, network.lv_grid, 'lv_grid') + export_network_tocsv(path, network.lv_gen, 'lv_generator') + export_network_tocsv(path, network.lv_cd, 'lv_branchtee') + export_network_tocsv(path, network.lv_stations, 'lv_station') + export_network_tocsv(path, network.mvlv_trafos, 'mvlv_transformer') + export_network_tocsv(path, network.lv_loads, 'lv_load') + export_network_tocsv(path, network.mv_grid, 'mv_grid') + export_network_tocsv(path, network.mv_gen, 'mv_generator') + export_network_tocsv(path, network.mv_cd, 'mv_branchtee') + export_network_tocsv(path, network.mv_stations, 'mv_station') + export_network_tocsv(path, network.hvmv_trafos, 'hvmv_transformer') + export_network_tocsv(path, network.mv_cb, 'mv_circuitbreaker') + export_network_tocsv(path, network.mv_loads, 'mv_load') + export_network_tocsv(path, network.lines, 'line') + export_network_tocsv(path, network.mvlv_mapping, 'mvlv_mapping') + # export_network_tocsv(path, areacenter, 'areacenter') + + +if __name__ == '__main__': + + # Path to user dir, Log file for missing Grid_Districts, Will be crated if not existing + LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') + pickle_export_logger(LOG_FILE_PATH) + + # static path, Insert your own path + pkl_filepath = "/home/local/RL-INSTITUT/jonas.huber/rli/Daten_flexibel_01/Ding0/20180823154014" + + # static path, .csv will be stored here + destination_path = pkl_filepath + + # choose MV Grid Districts to import use list of integers + # f. e.: grids = list(range(1, 3609)) - 1 to 3608 + grids = [1658] + + # Loop over all selected Grids, exports every singele one to file like .csv + for grid_no in grids: + + try: + nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) + except: + print('Something went wrong, created log entry in: {}'.format(LOG_FILE_PATH)) + with open(LOG_FILE_PATH, 'a') as log: + log.write('ding0_grids__{}.pkl not present to the current directory\n'.format(grid_no)) + pass + + continue + + # Extract data from network and create DataFrames + # pandas DataFrames will be exported as .csv file + network_tupels = export_network(nw, run_id=nw.metadata['run_id']) + export_data_tocsv(destination_path, network_tupels) + From e316c3bd4a4ddfa2084f7e6e4f94b266e2caf297 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 3 May 2019 14:59:52 +0200 Subject: [PATCH 181/215] set up pkl file logger, logs all missing pkl files --- ding0/io/exporter_log.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 ding0/io/exporter_log.py diff --git a/ding0/io/exporter_log.py b/ding0/io/exporter_log.py new file mode 100644 index 00000000..38e09229 --- /dev/null +++ b/ding0/io/exporter_log.py @@ -0,0 +1,26 @@ +import os + +# LOG_FILE_PATH = 'pickle_log' +# LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') + +def pickle_export_logger(log_file_path): + """ + Creates a list for pickle files that are missing for some reason. + Most likely the file does not exists @ the pickle file path dir. + + :param log_file_path: + :return: + """ + # does the file exist? + if not os.path.isfile(log_file_path): + print('ding0 log-file {file} not found. ' + 'This might be the first run of the tool. ' + .format(file=log_file_path)) + base_path = os.path.split(log_file_path)[0] + if not os.path.isdir(base_path): + os.mkdir(base_path) + print('The directory {path} was created.'.format(path=base_path)) + + with open(log_file_path, 'a') as log: + log.write("List of missing grid districts:") + pass From 4854a1dec9f0210faeed3c73ef26017d21ad0a45 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 3 May 2019 15:12:30 +0200 Subject: [PATCH 182/215] removed metadata.json creaton due to missing permission in dir --- ding0/io/file_export.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ding0/io/file_export.py b/ding0/io/file_export.py index a9b90d74..cebf0977 100644 --- a/ding0/io/file_export.py +++ b/ding0/io/file_export.py @@ -28,8 +28,9 @@ def create_destination_dir(): def export_data_tocsv(path, network, csv_sep=','): # put a text file with the metadata metadata = json.loads(network.metadata_json) - with open(os.path.join(path, 'metadata.json'), 'w') as metafile: - json.dump(metadata, metafile) + # can´t test this -> no permission for my user + # with open(os.path.join(path, 'metadata.json'), 'w') as metafile: + # json.dump(metadata, metafile) # Exports data to csv def export_network_tocsv(path, table, tablename): From ba5e149d3013e9f317b71ee1592863a4853d25ef Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 3 May 2019 15:23:38 +0200 Subject: [PATCH 183/215] changed destination folder for testing csv creation --- ding0/io/file_export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/file_export.py b/ding0/io/file_export.py index cebf0977..c272d64b 100644 --- a/ding0/io/file_export.py +++ b/ding0/io/file_export.py @@ -64,7 +64,7 @@ def export_network_tocsv(path, table, tablename): pkl_filepath = "/home/local/RL-INSTITUT/jonas.huber/rli/Daten_flexibel_01/Ding0/20180823154014" # static path, .csv will be stored here - destination_path = pkl_filepath + destination_path = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') # choose MV Grid Districts to import use list of integers # f. e.: grids = list(range(1, 3609)) - 1 to 3608 From c73b53cd64fa8ed758c171667566c505aec733e7 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 3 May 2019 15:26:23 +0200 Subject: [PATCH 184/215] changed path to access the folder not file --- ding0/io/file_export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/file_export.py b/ding0/io/file_export.py index c272d64b..8838a54a 100644 --- a/ding0/io/file_export.py +++ b/ding0/io/file_export.py @@ -57,7 +57,7 @@ def export_network_tocsv(path, table, tablename): if __name__ == '__main__': # Path to user dir, Log file for missing Grid_Districts, Will be crated if not existing - LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') + LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log') pickle_export_logger(LOG_FILE_PATH) # static path, Insert your own path From b95a7e13011033d8c331ad585f185f0102ffc205 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 3 May 2019 15:29:16 +0200 Subject: [PATCH 185/215] minor changed, fixed programming error --- ding0/io/file_export.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ding0/io/file_export.py b/ding0/io/file_export.py index 8838a54a..13afaeef 100644 --- a/ding0/io/file_export.py +++ b/ding0/io/file_export.py @@ -57,14 +57,14 @@ def export_network_tocsv(path, table, tablename): if __name__ == '__main__': # Path to user dir, Log file for missing Grid_Districts, Will be crated if not existing - LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log') + LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') pickle_export_logger(LOG_FILE_PATH) # static path, Insert your own path pkl_filepath = "/home/local/RL-INSTITUT/jonas.huber/rli/Daten_flexibel_01/Ding0/20180823154014" # static path, .csv will be stored here - destination_path = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') + destination_path = os.path.join(os.path.expanduser("~"), '.ding0_log') # choose MV Grid Districts to import use list of integers # f. e.: grids = list(range(1, 3609)) - 1 to 3608 From e054e8c9b5bfd67949fcdf2d434e79bc4c556eef Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 3 May 2019 15:34:25 +0200 Subject: [PATCH 186/215] changed destination_path --- ding0/io/file_export.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/ding0/io/file_export.py b/ding0/io/file_export.py index 13afaeef..f05c590a 100644 --- a/ding0/io/file_export.py +++ b/ding0/io/file_export.py @@ -60,14 +60,16 @@ def export_network_tocsv(path, table, tablename): LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') pickle_export_logger(LOG_FILE_PATH) - # static path, Insert your own path + # static path + # Insert your own path pkl_filepath = "/home/local/RL-INSTITUT/jonas.huber/rli/Daten_flexibel_01/Ding0/20180823154014" # static path, .csv will be stored here - destination_path = os.path.join(os.path.expanduser("~"), '.ding0_log') + # change this to your own destination folder + destination_path = pkl_filepath # choose MV Grid Districts to import use list of integers - # f. e.: grids = list(range(1, 3609)) - 1 to 3608 + # Multiple grids f. e.: grids = list(range(1, 3609)) - 1 to 3608(all of the existing) grids = [1658] # Loop over all selected Grids, exports every singele one to file like .csv From 6c975656fa7f21dfca5472f666ba326033a65daa Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 3 May 2019 15:39:12 +0200 Subject: [PATCH 187/215] added another griddistricts for testing --- ding0/io/file_export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/file_export.py b/ding0/io/file_export.py index f05c590a..66312498 100644 --- a/ding0/io/file_export.py +++ b/ding0/io/file_export.py @@ -70,7 +70,7 @@ def export_network_tocsv(path, table, tablename): # choose MV Grid Districts to import use list of integers # Multiple grids f. e.: grids = list(range(1, 3609)) - 1 to 3608(all of the existing) - grids = [1658] + grids = list(range(1, 3)) # Loop over all selected Grids, exports every singele one to file like .csv for grid_no in grids: From 81709d50f3367a8662a330d4615065343cd6d4c4 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 3 May 2019 15:40:23 +0200 Subject: [PATCH 188/215] changed destination_path --- ding0/io/file_export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/file_export.py b/ding0/io/file_export.py index 66312498..429641f0 100644 --- a/ding0/io/file_export.py +++ b/ding0/io/file_export.py @@ -66,7 +66,7 @@ def export_network_tocsv(path, table, tablename): # static path, .csv will be stored here # change this to your own destination folder - destination_path = pkl_filepath + destination_path = os.path.join(os.path.expanduser("~"), '.ding0_log') # choose MV Grid Districts to import use list of integers # Multiple grids f. e.: grids = list(range(1, 3609)) - 1 to 3608(all of the existing) From 87b3ac5e6acf83a335acdc664d5e0b5ebe08249d Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 3 May 2019 15:46:37 +0200 Subject: [PATCH 189/215] basicly working now, changed destination path, added advise, mind metadata.json still not tested --- ding0/io/file_export.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/ding0/io/file_export.py b/ding0/io/file_export.py index 429641f0..24192879 100644 --- a/ding0/io/file_export.py +++ b/ding0/io/file_export.py @@ -29,8 +29,8 @@ def export_data_tocsv(path, network, csv_sep=','): # put a text file with the metadata metadata = json.loads(network.metadata_json) # can´t test this -> no permission for my user - # with open(os.path.join(path, 'metadata.json'), 'w') as metafile: - # json.dump(metadata, metafile) + with open(os.path.join(path, 'metadata.json'), 'w') as metafile: + json.dump(metadata, metafile) # Exports data to csv def export_network_tocsv(path, table, tablename): @@ -55,6 +55,12 @@ def export_network_tocsv(path, table, tablename): if __name__ == '__main__': + """ + Advise: + First off check for existing .csv files in your destination folder. + Existing files will be extended. + Multiple grids will be stored all in one file. + """ # Path to user dir, Log file for missing Grid_Districts, Will be crated if not existing LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') @@ -66,10 +72,11 @@ def export_network_tocsv(path, table, tablename): # static path, .csv will be stored here # change this to your own destination folder - destination_path = os.path.join(os.path.expanduser("~"), '.ding0_log') + destination_path = pkl_filepath # choose MV Grid Districts to import use list of integers # Multiple grids f. e.: grids = list(range(1, 3609)) - 1 to 3608(all of the existing) + # Single grids f. e.: grids = [2] grids = list(range(1, 3)) # Loop over all selected Grids, exports every singele one to file like .csv From 2feb89ebef2153ed4da63747a1d3a6b2bbbba3cc Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 12 Jul 2019 20:08:36 +0200 Subject: [PATCH 190/215] create config file to outsource inputs from code --- ding0/config/exporter_config.cfg | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 ding0/config/exporter_config.cfg diff --git a/ding0/config/exporter_config.cfg b/ding0/config/exporter_config.cfg new file mode 100644 index 00000000..3e7cadf9 --- /dev/null +++ b/ding0/config/exporter_config.cfg @@ -0,0 +1,6 @@ +[EXPORTER_DB] + SCHEMA = model_draft + +[GRID_DISTRICT_RANGE] + START = '' + END = '' From 29157418f8d4dc40e17728786c0915618175e593 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 12 Jul 2019 20:10:30 +0200 Subject: [PATCH 191/215] New settings file to store io specific settings and paths --- ding0/io/io_settings.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 ding0/io/io_settings.py diff --git a/ding0/io/io_settings.py b/ding0/io/io_settings.py new file mode 100644 index 00000000..87a41776 --- /dev/null +++ b/ding0/io/io_settings.py @@ -0,0 +1,13 @@ +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "jh-RLI" + +import os +from configobj import ConfigObj + + +io_base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +ding0_config_path = os.path.join(io_base_dir, 'config', 'exporter_config.cfg') +exporter_config = ConfigObj(ding0_config_path) From a85fbfb05ed619cb2e5c754b1d5344dbc7613c13 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 12 Jul 2019 20:12:33 +0200 Subject: [PATCH 192/215] New ding_db_tables file. outsource table definition and table metadata from db_export --- ding0/io/ding0_db_tables.py | 339 ++++++++++++++++++++++++++++++++++++ 1 file changed, 339 insertions(+) create mode 100644 ding0/io/ding0_db_tables.py diff --git a/ding0/io/ding0_db_tables.py b/ding0/io/ding0_db_tables.py new file mode 100644 index 00000000..102284a4 --- /dev/null +++ b/ding0/io/ding0_db_tables.py @@ -0,0 +1,339 @@ +import json +import os +import ding0 +from sqlalchemy import BigInteger, Boolean, Column, Float, ForeignKey, Integer, String, Table +from geoalchemy2.types import Geometry +from sqlalchemy.ext.declarative import declarative_base + + +DECLARATIVE_BASE = declarative_base() +METADATA = DECLARATIVE_BASE.metadata + +# Set the Database schema which you want to add the tables to +# SCHEMA = "model_draft" + +# Metadata folder Path +METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') + +# set your Table names +DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', + 'line': 'ego_grid_ding0_line', + 'lv_branchtee': 'ego_grid_ding0_lv_branchtee', + 'lv_generator': 'ego_grid_ding0_lv_generator', + 'lv_load': 'ego_grid_ding0_lv_load', + 'lv_grid': 'ego_grid_ding0_lv_grid', + 'lv_station': 'ego_grid_ding0_lv_station', + 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer', + 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping', + 'mv_branchtee': 'ego_grid_ding0_mv_branchtee', + 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker', + 'mv_generator': 'ego_grid_ding0_mv_generator', + 'mv_load': 'ego_grid_ding0_mv_load', + 'mv_grid': 'ego_grid_ding0_mv_grid', + 'mv_station': 'ego_grid_ding0_mv_station', + 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} + +# # set your Table names +# DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning_test', +# 'line': 'ego_grid_ding0_line_test', +# 'lv_branchtee': 'ego_grid_ding0_lv_branchtee_test', +# 'lv_generator': 'ego_grid_ding0_lv_generator_test', +# 'lv_load': 'ego_grid_ding0_lv_load_test', +# 'lv_grid': 'ego_grid_ding0_lv_grid_test', +# 'lv_station': 'ego_grid_ding0_lv_station_test', +# 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer_test', +# 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping_test', +# 'mv_branchtee': 'ego_grid_ding0_mv_branchtee_test', +# 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker_test', +# 'mv_generator': 'ego_grid_ding0_mv_generator_test', +# 'mv_load': 'ego_grid_ding0_mv_load_test', +# 'mv_grid': 'ego_grid_ding0_mv_grid_test', +# 'mv_station': 'ego_grid_ding0_mv_station_test', +# 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer_test'} + + + +def load_json_files(): + """ + Creats a list of all .json files in METADATA_STRING_FOLDER + + Parameters + ---------- + + Returns + ------- + jsonmetadata : dict + contains all .json file names from the folder + """ + + full_dir = os.walk(str(METADATA_STRING_FOLDER)) + jsonmetadata = [] + + for jsonfiles in full_dir: + for jsonfile in jsonfiles: + jsonmetadata = jsonfile + + return jsonmetadata + + +def prepare_metadatastring_fordb(table): + """ + Prepares the JSON String for the sql comment on table + + Required: The .json file names must contain the table name (for example from create_ding0_sql_tables()) + Instruction: Check the SQL "comment on table" for each table (e.g. use pgAdmin) + + Parameters + ---------- + table: str + table name of the sqlAlchemy table + + Returns + ------- + mdsstring:str + Contains the .json file as string + """ + + for json_file in load_json_files(): + json_file_path = os.path.join(METADATA_STRING_FOLDER, json_file) + with open(json_file_path, encoding='UTF-8') as jf: + if table in json_file: + # included for testing / or logging + # print("Comment on table: " + table + "\nusing this metadata string file: " + file + "\n") + mds = json.load(jf) + mdsstring = json.dumps(mds, indent=4, ensure_ascii=False) + return mdsstring + + +def create_ding0_sql_tables(engine, ding0_schema): + """ + Create the 16 ding0 tables + + Parameters + ---------- + engine: :py:mod:`sqlalchemy.engine.base.Engine` + Sqlalchemy database engine + + ding0_schema : str + The schema in which the tables are to be created + Default: static SCHEMA + """ + + # 1 versioning table + versioning = Table(DING0_TABLES['versioning'], METADATA, + Column('run_id', BigInteger, primary_key=True, autoincrement=False, nullable=False), + Column('description', String(6000)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('versioning') + ) + + # 2 ding0 lines table + ding0_line = Table(DING0_TABLES['line'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('edge_name', String(100)), + Column('grid_name', String(100)), + Column('node1', String(100)), + Column('node2', String(100)), + Column('type_kind', String(100)), + Column('type_name', String(100)), + Column('length', Float(10)), + Column('u_n', Float(10)), + Column('c', Float(10)), + Column('l', Float(10)), + Column('r', Float(10)), + Column('i_max_th', Float(10)), + Column('geom', Geometry('LINESTRING', 4326)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('line') + ) + + # 3 ding0 lv_branchtee table + ding0_lv_branchtee = Table(DING0_TABLES['lv_branchtee'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_branchtee') + ) + + # 4 ding0 lv_generator table + ding0_lv_generator = Table(DING0_TABLES['lv_generator'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('la_id', BigInteger), + Column('name', String(100)), + Column('lv_grid_id', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('type', String(22)), + Column('subtype', String(30)), + Column('v_level', Integer), + Column('nominal_capacity', Float(10)), + Column('weather_cell_id', BigInteger), + Column('is_aggregated', Boolean), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_generator') + ) + # 5 ding0 lv_load table + ding0_lv_load = Table(DING0_TABLES['lv_load'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('lv_grid_id', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('consumption', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_load') + ) + + # 6 + ding0_lv_grid = Table(DING0_TABLES['lv_grid'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + # Column('geom', Geometry('MULTIPOLYGON', 4326)), + Column('geom', Geometry('GEOMETRY', 4326)), + Column('population', BigInteger), + Column('voltage_nom', Float(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_grid') + ) + + # 7 ding0 lv_station table + ding0_lv_station = Table(DING0_TABLES['lv_station'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_station') + ) + + # 8 ding0 mvlv_transformer table + ding0_mvlv_transformer = Table(DING0_TABLES['mvlv_transformer'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + Column('voltage_op', Float(10)), + Column('s_nom', Float(10)), + Column('x', Float(10)), + Column('r', Float(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mvlv_transformer") + ) + + # 9 ding0 mvlv_mapping table + ding0_mvlv_mapping = Table(DING0_TABLES['mvlv_mapping'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('lv_grid_id', BigInteger), + Column('lv_grid_name', String(100)), + Column('mv_grid_id', BigInteger), + Column('mv_grid_name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mvlv_mapping") + ) + + # 10 ding0 mv_branchtee table + ding0_mv_branchtee = Table(DING0_TABLES['mv_branchtee'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_branchtee") + ) + + # 11 ding0 mv_circuitbreaker table + ding0_mv_circuitbreaker = Table(DING0_TABLES['mv_circuitbreaker'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + Column('status', String(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_circuitbreaker") + ) + + # 12 ding0 mv_generator table + ding0_mv_generator = Table(DING0_TABLES['mv_generator'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('geom', Geometry('POINT', 4326)), + Column('type', String(22)), + Column('subtype', String(30)), + Column('v_level', Integer), + Column('nominal_capacity', Float(10)), + Column('weather_cell_id', BigInteger), + Column('is_aggregated', Boolean), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_generator") + ) + + # 13 ding0 mv_load table + ding0_mv_load = Table(DING0_TABLES['mv_load'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('geom', Geometry('GEOMETRY', 4326)), + Column('is_aggregated', Boolean), + Column('consumption', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_load") + ) + + # 14 ding0 mv_grid table + ding0_mv_grid = Table(DING0_TABLES['mv_grid'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('MULTIPOLYGON', 4326)), + Column('name', String(100)), + Column('population', BigInteger), + Column('voltage_nom', Float(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_grid") + ) + + # 15 ding0 mv_station table + ding0_mv_station = Table(DING0_TABLES['mv_station'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_station") + ) + + # 16 ding0 hvmv_transformer table + ding0_hvmv_transformer = Table(DING0_TABLES['hvmv_transformer'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + Column('voltage_op', Float(10)), + Column('s_nom', Float(10)), + Column('x', Float(10)), + Column('r', Float(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("hvmv_transformer") + ) + + # create all the tables + METADATA.create_all(engine, checkfirst=True) \ No newline at end of file From 8c944fe0e62eb9daaf5bf4e1a583a3f8058c8656 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 12 Jul 2019 20:18:46 +0200 Subject: [PATCH 193/215] Dropped unused imports, Added initail SRID variable with no value, Create_wkt_element now works without hardset srid, moved table definition from skript, replaced code that was not pythonic --- ding0/io/db_export.py | 460 ++++++------------------------------------ 1 file changed, 61 insertions(+), 399 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 51de2865..886de94d 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -10,361 +10,23 @@ __copyright__ = "Reiner Lemoine Institut gGmbH" __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" __url__ = "https://github.com/openego/ding0/blob/master/LICENSE" -__author__ = "nesnoj, gplssm, jh-RLI" +__author__ = "jh-RLI" import numpy as np import pandas as pd import json -import os - -from datetime import datetime - import re - from egoio.tools.db import connection - -import ding0 from ding0.io.export import export_network from ding0.core import NetworkDing0 -from ding0.tools.results import save_nd_to_pickle, load_nd_from_pickle - -from sqlalchemy import MetaData, ARRAY, BigInteger, Boolean, CheckConstraint, Column, Date, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, Integer, JSON, Numeric, SmallInteger, String, Table, Text, UniqueConstraint, text -from geoalchemy2.types import Geometry, Raster, WKTElement -from sqlalchemy.ext.declarative import declarative_base +from ding0.io.ding0_db_tables import DING0_TABLES, METADATA, create_ding0_sql_tables +from ding0.io.io_settings import exporter_config +from geoalchemy2.types import Geometry, WKTElement from sqlalchemy.orm import sessionmaker -DECLARATIVE_BASE = declarative_base() -METADATA = DECLARATIVE_BASE.metadata - -# Set the Database schema which you want to add the tables to -# SCHEMA = "model_draft" - -# Metadata folder Path -METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') - -# set your Table names -DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', - 'line': 'ego_grid_ding0_line', - 'lv_branchtee': 'ego_grid_ding0_lv_branchtee', - 'lv_generator': 'ego_grid_ding0_lv_generator', - 'lv_load': 'ego_grid_ding0_lv_load', - 'lv_grid': 'ego_grid_ding0_lv_grid', - 'lv_station': 'ego_grid_ding0_lv_station', - 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer', - 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping', - 'mv_branchtee': 'ego_grid_ding0_mv_branchtee', - 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker', - 'mv_generator': 'ego_grid_ding0_mv_generator', - 'mv_load': 'ego_grid_ding0_mv_load', - 'mv_grid': 'ego_grid_ding0_mv_grid', - 'mv_station': 'ego_grid_ding0_mv_station', - 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} - -# # set your Table names -# DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning_test', -# 'line': 'ego_grid_ding0_line_test', -# 'lv_branchtee': 'ego_grid_ding0_lv_branchtee_test', -# 'lv_generator': 'ego_grid_ding0_lv_generator_test', -# 'lv_load': 'ego_grid_ding0_lv_load_test', -# 'lv_grid': 'ego_grid_ding0_lv_grid_test', -# 'lv_station': 'ego_grid_ding0_lv_station_test', -# 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer_test', -# 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping_test', -# 'mv_branchtee': 'ego_grid_ding0_mv_branchtee_test', -# 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker_test', -# 'mv_generator': 'ego_grid_ding0_mv_generator_test', -# 'mv_load': 'ego_grid_ding0_mv_load_test', -# 'mv_grid': 'ego_grid_ding0_mv_grid_test', -# 'mv_station': 'ego_grid_ding0_mv_station_test', -# 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer_test'} - - - -def load_json_files(): - """ - Creats a list of all .json files in METADATA_STRING_FOLDER - - Parameters - ---------- - - Returns - ------- - jsonmetadata : dict - contains all .json file names from the folder - """ - - full_dir = os.walk(str(METADATA_STRING_FOLDER)) - jsonmetadata = [] - - for jsonfiles in full_dir: - for jsonfile in jsonfiles: - jsonmetadata = jsonfile - - return jsonmetadata - - -def prepare_metadatastring_fordb(table): - """ - Prepares the JSON String for the sql comment on table - - Required: The .json file names must contain the table name (for example from create_ding0_sql_tables()) - Instruction: Check the SQL "comment on table" for each table (e.g. use pgAdmin) - - Parameters - ---------- - table: str - table name of the sqlAlchemy table - - Returns - ------- - mdsstring:str - Contains the .json file as string - """ - - for json_file in load_json_files(): - json_file_path = os.path.join(METADATA_STRING_FOLDER, json_file) - with open(json_file_path, encoding='UTF-8') as jf: - if table in json_file: - # included for testing / or logging - # print("Comment on table: " + table + "\nusing this metadata string file: " + file + "\n") - mds = json.load(jf) - mdsstring = json.dumps(mds, indent=4, ensure_ascii=False) - return mdsstring - - -def create_ding0_sql_tables(engine, ding0_schema): - """ - Create the 16 ding0 tables - - Parameters - ---------- - engine: :py:mod:`sqlalchemy.engine.base.Engine` - Sqlalchemy database engine - - ding0_schema : str - The schema in which the tables are to be created - Default: static SCHEMA - """ - - # 1 versioning table - versioning = Table(DING0_TABLES['versioning'], METADATA, - Column('run_id', BigInteger, primary_key=True, autoincrement=False, nullable=False), - Column('description', String(6000)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb('versioning') - ) - - # 2 ding0 lines table - ding0_line = Table(DING0_TABLES['line'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('edge_name', String(100)), - Column('grid_name', String(100)), - Column('node1', String(100)), - Column('node2', String(100)), - Column('type_kind', String(100)), - Column('type_name', String(100)), - Column('length', Float(10)), - Column('u_n', Float(10)), - Column('c', Float(10)), - Column('l', Float(10)), - Column('r', Float(10)), - Column('i_max_th', Float(10)), - Column('geom', Geometry('LINESTRING', 4326)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb('line') - ) - - # 3 ding0 lv_branchtee table - ding0_lv_branchtee = Table(DING0_TABLES['lv_branchtee'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb('lv_branchtee') - ) - - # 4 ding0 lv_generator table - ding0_lv_generator = Table(DING0_TABLES['lv_generator'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('la_id', BigInteger), - Column('name', String(100)), - Column('lv_grid_id', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('type', String(22)), - Column('subtype', String(30)), - Column('v_level', Integer), - Column('nominal_capacity', Float(10)), - Column('weather_cell_id', BigInteger), - Column('is_aggregated', Boolean), - schema=ding0_schema, - comment=prepare_metadatastring_fordb('lv_generator') - ) - # 5 ding0 lv_load table - ding0_lv_load = Table(DING0_TABLES['lv_load'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('name', String(100)), - Column('lv_grid_id', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('consumption', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb('lv_load') - ) - - # 6 - ding0_lv_grid = Table(DING0_TABLES['lv_grid'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('name', String(100)), - # Column('geom', Geometry('MULTIPOLYGON', 4326)), - Column('geom', Geometry('GEOMETRY', 4326)), - Column('population', BigInteger), - Column('voltage_nom', Float(10)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb('lv_grid') - ) - - # 7 ding0 lv_station table - ding0_lv_station = Table(DING0_TABLES['lv_station'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb('lv_station') - ) - - # 8 ding0 mvlv_transformer table - ding0_mvlv_transformer = Table(DING0_TABLES['mvlv_transformer'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - Column('voltage_op', Float(10)), - Column('s_nom', Float(10)), - Column('x', Float(10)), - Column('r', Float(10)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("mvlv_transformer") - ) - - # 9 ding0 mvlv_mapping table - ding0_mvlv_mapping = Table(DING0_TABLES['mvlv_mapping'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('lv_grid_id', BigInteger), - Column('lv_grid_name', String(100)), - Column('mv_grid_id', BigInteger), - Column('mv_grid_name', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("mvlv_mapping") - ) - - # 10 ding0 mv_branchtee table - ding0_mv_branchtee = Table(DING0_TABLES['mv_branchtee'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("mv_branchtee") - ) - - # 11 ding0 mv_circuitbreaker table - ding0_mv_circuitbreaker = Table(DING0_TABLES['mv_circuitbreaker'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - Column('status', String(10)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("mv_circuitbreaker") - ) - - # 12 ding0 mv_generator table - ding0_mv_generator = Table(DING0_TABLES['mv_generator'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('name', String(100)), - Column('geom', Geometry('POINT', 4326)), - Column('type', String(22)), - Column('subtype', String(30)), - Column('v_level', Integer), - Column('nominal_capacity', Float(10)), - Column('weather_cell_id', BigInteger), - Column('is_aggregated', Boolean), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("mv_generator") - ) - - # 13 ding0 mv_load table - ding0_mv_load = Table(DING0_TABLES['mv_load'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('name', String(100)), - Column('geom', Geometry('GEOMETRY', 4326)), - Column('is_aggregated', Boolean), - Column('consumption', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("mv_load") - ) - - # 14 ding0 mv_grid table - ding0_mv_grid = Table(DING0_TABLES['mv_grid'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('MULTIPOLYGON', 4326)), - Column('name', String(100)), - Column('population', BigInteger), - Column('voltage_nom', Float(10)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("mv_grid") - ) - - # 15 ding0 mv_station table - ding0_mv_station = Table(DING0_TABLES['mv_station'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("mv_station") - ) - - # 16 ding0 hvmv_transformer table - ding0_hvmv_transformer = Table(DING0_TABLES['hvmv_transformer'], METADATA, - Column('id', Integer, primary_key=True), - Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), - Column('id_db', BigInteger), - Column('geom', Geometry('POINT', 4326)), - Column('name', String(100)), - Column('voltage_op', Float(10)), - Column('s_nom', Float(10)), - Column('x', Float(10)), - Column('r', Float(10)), - schema=ding0_schema, - comment=prepare_metadatastring_fordb("hvmv_transformer") - ) - - # create all the tables - METADATA.create_all(engine, checkfirst=True) +# init SRID +SRID = None def create_wkt_element(geom): @@ -381,10 +43,19 @@ def create_wkt_element(geom): None : None Returns None if the data frame does not contain any geometry """ + if geom is not None: - # return WKTElement(geom, srid=int(SRID), extended=True) - # ToDo: hardset srid for pickle export, find better approach to make it work for dingo run and dingo from pickel - return WKTElement(geom, srid=4326, extended=True) + if SRID is None: + try: + from ding0.io.ding0_pkl2db import PICKLE_SRID + return WKTElement(geom, srid=PICKLE_SRID, extended=True) + except: + print('You need to provide a SRID or PICKLE_SRID') + print('PICKLE_SRID will be set to 4326') + PICKLE_SRID = 4326 + return WKTElement(geom, srid=PICKLE_SRID, extended=True) + else: + return WKTElement(geom, srid=SRID, extended=True) else: return None @@ -431,27 +102,27 @@ def df_sql_write(engine, schema, db_table, dataframe, SRID=None, geom_type=None) # Insert pd data frame with geom column if 'geom' in dataframe.columns: - if geom_type == 'POINT': + if geom_type is 'POINT': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('POINT', srid=int(SRID))}) - elif geom_type == 'POLYGON': + elif geom_type is 'POLYGON': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('POLYGON', srid=int(SRID))}) - elif geom_type == 'MULTIPOLYGON': + elif geom_type is 'MULTIPOLYGON': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('MULTIPOLYGON', srid=int(SRID))}) - elif geom_type == 'LINESTRING': + elif geom_type is 'LINESTRING': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('LINESTRING', srid=int(SRID))}) - elif geom_type == 'GEOMETRY': + elif geom_type is 'GEOMETRY': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('GEOMETRY', srid=int(SRID))}) @@ -466,28 +137,28 @@ def df_sql_write(engine, schema, db_table, dataframe, SRID=None, geom_type=None) if 'geom' in dataframe.columns: # Insert pd Dataframe with geom column - if geom_type == 'POINT': + if geom_type is 'POINT': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('POINT', srid=int(SRID))}) - elif geom_type == 'POLYGON': + elif geom_type is 'POLYGON': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('POLYGON', srid=int(SRID))}) - elif geom_type == 'MULTIPOLYGON': + elif geom_type is 'MULTIPOLYGON': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('MULTIPOLYGON', srid=int(SRID))}) - elif geom_type == 'LINESTRING': + elif geom_type is 'LINESTRING': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('LINESTRING', srid=int(SRID))}) - elif geom_type == 'GEOMETRY': + elif geom_type is 'GEOMETRY': sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, dtype={'geom': Geometry('GEOMETRY', srid=int(SRID))}) @@ -515,50 +186,50 @@ def export_df_to_db(engine, schema, df, tabletype, srid=None): The current srid provided by the ding0 network """ print("Exporting table type : {}".format(tabletype)) - if tabletype == 'line': + if tabletype is 'line': df_sql_write(engine, schema, DING0_TABLES['line'], df, srid, 'LINESTRING') - elif tabletype == 'lv_cd': + elif tabletype is 'lv_cd': df = df.drop(['lv_grid_id'], axis=1) df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df, srid, 'POINT') - elif tabletype == 'lv_gen': + elif tabletype is 'lv_gen': df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df, srid, 'POINT') - elif tabletype == 'lv_load': + elif tabletype is 'lv_load': df_sql_write(engine, schema, DING0_TABLES['lv_load'], df, srid, 'POINT') - elif tabletype == 'lv_grid': + elif tabletype is 'lv_grid': df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df, srid, 'GEOMETRY') - elif tabletype == 'lv_station': + elif tabletype is 'lv_station': df_sql_write(engine, schema, DING0_TABLES['lv_station'], df, srid, 'POINT') - elif tabletype == 'mvlv_trafo': + elif tabletype is 'mvlv_trafo': df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df, srid, 'POINT') - elif tabletype == 'mvlv_mapping': + elif tabletype is 'mvlv_mapping': df_sql_write(engine, schema, DING0_TABLES['mvlv_mapping'], df, srid) - elif tabletype == 'mv_cd': + elif tabletype is 'mv_cd': df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df, srid, 'POINT') - elif tabletype == 'mv_cb': + elif tabletype is 'mv_cb': df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df, srid, 'POINT') - elif tabletype == 'mv_gen': + elif tabletype is 'mv_gen': df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df, srid, 'POINT') - elif tabletype == 'mv_load': + elif tabletype is 'mv_load': df_sql_write(engine, schema, DING0_TABLES['mv_load'], df, srid, 'GEOMETRY') - elif tabletype == 'mv_grid': + elif tabletype is 'mv_grid': df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df, srid, 'MULTIPOLYGON') - elif tabletype == 'mv_station': + elif tabletype is 'mv_station': df_sql_write(engine, schema, DING0_TABLES['mv_station'], df, srid, 'POINT') - elif tabletype == 'hvmv_trafo': + elif tabletype is 'hvmv_trafo': df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df, srid, 'POINT') @@ -854,48 +525,38 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no=None): if __name__ == "__main__": # #########SQLAlchemy and DB table################ - oedb_engine = connection(section='oedb') + # provide a config-file with valid connection credentials to access a Database. + # the config-file should be located in your user directory within a folder named '.config'. + oedb_engine = connection(section='vpn_oedb') session = sessionmaker(bind=oedb_engine)() - # Testing Database - # reiners_engine = connection(section='reiners_db') + # Set the Database schema which you want to add the tables to. + # Configure the SCHEMA in config file located in: ding0/config/exporter_config.cfg . + SCHEMA = exporter_config['EXPORTER_DB']['SCHEMA'] - # Set the Database schema which you want to add the tables to - SCHEMA = "model_draft" + # hardset for testing # SCHEMA = "public" - # #########Ding0 Network and NW Metadata################ - + # #########Ding0 Network################ # create ding0 Network instance nw = NetworkDing0(name='network') - # nw = load_nd_from_pickle(filename='ding0_grids_example.pkl', path='ding0\ding0\examples\ding0_grids_example.pkl') # srid # ToDo: Check why converted to int and string # SRID = str(int(nw.config['geo']['srid'])) SRID = int(nw.config['geo']['srid']) - # provide run_id, note that the run_id is unique to the DB table - # if not set it will be set - # RUN_ID = datetime.now().strftime("%Y%m%d%H%M%S") - - # choose MV Grid Districts to import - # needs to be a list with Integers - mv_grid_districts = list(range(473, 475)) - - - # # run DING0 on selected MV Grid District - # nw.run_ding0(session=session, - # mv_grid_districts_no=mv_grid_districts) - # - # # return values from export_network() as tupels - # network = export_network(nw) + # choose MV Grid Districts to import, use list of integers + # Multiple grids f. e.: grids = list(range(1, 3609)) - 1 to 3608(all of the existing) + # Single grids f. e.: grids = [2] + mv_grid_districts = list(range(2, 6)) + # run DING0 on selected MV Grid District + nw.run_ding0(session=session, + mv_grid_districts_no=mv_grid_districts) - # any list of NetworkDing0 also provides run_id - # nw_metadata = json.dumps(nw_metadata) - # ToDo:might not be necessary to use this metadata - # metadata_json = json.loads(network.metadata_json) + # return values from export_network() as tupels + network = export_network(nw) ##################################################### # Creates all defined tables @@ -903,5 +564,6 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no=None): drop_ding0_db_tables(oedb_engine) # db_tables_change_owner(oedb_engine, SCHEMA) + # ########################### !!! Mind existing tables in DB SCHEMA!!! ####################################### # Export all Dataframes returned form export_network(nw) to DB # export_all_dataframes_to_db(oedb_engine, SCHEMA, network=network, srid=SRID) From 5b3281a05da1f1cc60963d2ce962667dab393a92 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Fri, 12 Jul 2019 20:19:56 +0200 Subject: [PATCH 194/215] renamed variable SRID to PICKLE_SRID --- ding0/io/ding0_pkl2db.py | 48 ++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 26 deletions(-) diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py index 05decc89..3c6501be 100644 --- a/ding0/io/ding0_pkl2db.py +++ b/ding0/io/ding0_pkl2db.py @@ -1,39 +1,32 @@ # coding: utf-8 +"""This file is part of DINGO, the DIstribution Network GeneratOr. +DINGO is a tool to generate synthetic medium and low voltage power +distribution grids based on open data. -import os +It is developed in the project open_eGo: https://openegoproject.wordpress.com -import numpy as np -import json +DING0 lives at github: https://github.com/openego/ding0/ +The documentation is available on RTD: http://ding0.readthedocs.io""" -from egoio.tools import db +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "jh-RLI" -# import required modules of DING0 -from ding0.tools.logger import setup_logger +import os +from egoio.tools import db +from ding0.io.exporter_log import pickle_export_logger from ding0.tools.results import load_nd_from_pickle from ding0.io.export import export_network from ding0.io.db_export import METADATA, create_ding0_sql_tables, \ export_all_pkl_to_db, db_tables_change_owner, drop_ding0_db_tables from sqlalchemy.orm import sessionmaker -# ToDo: Create logger as function ################################## # LOG_FILE_PATH = 'pickle_log' LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') - -# does the file exist? -if not os.path.isfile(LOG_FILE_PATH): - print('ding0 log-file {file} not found. ' - 'This might be the first run of the tool. ' - .format(file=LOG_FILE_PATH)) - base_path = os.path.split(LOG_FILE_PATH)[0] - if not os.path.isdir(base_path): - os.mkdir(base_path) - print('The directory {path} was created.'.format(path=base_path)) - - with open(LOG_FILE_PATH, 'a') as log: - log.write("List of missing grid districts:") - pass -###################################################### +pickle_export_logger(LOG_FILE_PATH) +################################### # database connection/ session oedb_engine = db.connection(section='oedb') @@ -50,7 +43,7 @@ # choose MV Grid Districts to import use list of integers -# f. e.: grids = list(range(1, 3609)) +# f. e.: multiple grids = list(range(1, 3609)) grids = [1658] # generate all the grids and push them to oedb @@ -67,13 +60,16 @@ continue # Extract data from network and put it to DataFrames for csv and for oedb + # run_id is manually provided -> folder name or nw.metadata['run_id'] provide the run_id value network = export_network(nw, run_id=20180823154014) - # Send data to OEDB - srid = int(nw.config['geo']['srid']) + # set SRID form pickle file + PICKLE_SRID = int(nw.config['geo']['srid']) + # provide run id for pickle upload - export_all_pkl_to_db(oedb_engine, SCHEMA, network, srid, grid_no) + export_all_pkl_to_db(oedb_engine, SCHEMA, network, PICKLE_SRID, grid_no) + # db_tables_change_owner(oedb_engine, SCHEMA) From e4f22eaf01d12e87513831f2cc6cb1f8c732f00e Mon Sep 17 00:00:00 2001 From: Jonas H Date: Tue, 16 Jul 2019 18:56:57 +0200 Subject: [PATCH 195/215] add further description in docstring, added grid_no to dataframe export function, add todo remind to create for-loop when exporting ding0 from new run --- ding0/io/db_export.py | 60 ++++++++++++++++++++++++++----------------- 1 file changed, 36 insertions(+), 24 deletions(-) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 886de94d..8e80edf9 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -315,35 +315,45 @@ def change_owner(engine, table, role, schema): change_owner(engine, tab, 'oeuser', schema) -def export_all_dataframes_to_db(engine, schema, network=None, srid=None): +def export_all_dataframes_to_db(engine, schema, network=None, srid=None, grid_no=None): """ exports all data frames from func. export_network() to the db tables This works with a completely generated ding0 network(all grid districts have to be generated at once), all provided DataFrames will be uploaded. + Note_1: Executing this script for all GridDistricts available in Ding0 the export appears to be + quit time consuming. Plan carefully if you want to export more then a couple 100 GridDistricts at once. + + Note_2: Ding0 creates a dataset that is about 20GB large when running on all available (3608) GridDistricts. + Not using a for-loop when running ding0 + exporting ding0 would use too much memory capacity on most local + machines. Example usage with for-loop see script ding0_pkl2db.py. + Instructions: - 1. Create a database connection to the "OEDB" for example use the "from egoio.tools.db import connection" function + 1. Create a database connection/engine to the "OEDB" for example use the "from egoio.tools.db import + connection" function 2. Create a SA session: session = sessionmaker(bind=oedb_engine)() - 3. Create a ding0 network instance: nw = NetworkDing0(name='network') - 4. SET the srid from network config: SRID = str(int(nw.config['geo']['srid'])) - 5. Choose the grid_districts for the ding0 run (nothing chosen all grid_districts will be imported) - mv_grid_districts = [3040, 3045] - 6. run ding0 on selected mv_grid_district - 7. call function export_network from export.py -> this provides the run_id, network metadata as json - and all ding0 result data as pandas data frames - 8. json.loads the metadata, it is needed to provide the values for the - versioning table - 9. Create a database connection to your database for example use the "from egoio.tools.db import connection" function - 10. SET the SCHEMA you want to use within the connected database - 11. Create the ding0 sql tables: create_ding0_sql_tables(engine, SCHEMA) - 12. Call the function: export_all_dataframes_to_db(engine, SCHEMA) with your destination database and SCHEMA - additionally: - 13. If you used the "OEDB" as destination database change the table owner using the function: + 3. SET the SCHEMA you want to use destination for table creation and data export. + One can set the SCHEMA within the exporter_config.cfg file located in ding0/config folder + 4. Create a ding0 network instance: nw = NetworkDing0(name='network') + 5. SET the srid from network-object config: SRID = str(int(nw.config['geo']['srid'])) + 6. Choose the grid_districts for the ding0 run (nothing chosen all grid_districts will be imported) + mv_grid_districts = [3040, 3045], see Note_2. + 7. run ding0 on selected mv_grid_district + 8. call function export_network from export.py -> this provides the network metadata + as json and all ding0 result data as pandas data frames + 9. Create the ding0 sql tables: create_ding0_sql_tables(engine, SCHEMA) + 10. Call the function: export_all_dataframes_to_db(engine, SCHEMA) with your destination database and SCHEMA + + Additionally, if you use the "OEDB/OEP" as destination database: + 11. change the table owner using the function: db_tables_change_owner(engine, schema) - 14. If you need to drop the table call the function drop_ding0_db_tables(engine, schema) immediately after + 12. If you need to drop the table call the function drop_ding0_db_tables(engine, schema) immediately after the called create function: create_ding0_sql_tables(oedb_engine, SCHEMA) drop_ding0_db_tables(oedb_engine, SCHEMA) - 15. Check if all metadata strings are present to the current folder and added as SQL comment on table + + Validate: + 13. Check if all metadata strings are present to the current folder and added as SQL comment on table + 14. Check if the export worked as expected and filled the tables with data Parameters ---------- @@ -355,6 +365,10 @@ def export_all_dataframes_to_db(engine, schema, network=None, srid=None): All the return values(Data Frames) from export_network() srid: int The current srid provided by the ding0 network + grid_no: int + Optional: not implemented yet. ID of currently exported GridDistrict. + This is used to get further information while exporting a range of grids using a for-loop. + Usage example see export_all_pkl_to_db(). """ if engine.dialect.has_table(engine, DING0_TABLES["versioning"], schema=schema): @@ -365,10 +379,8 @@ def export_all_dataframes_to_db(engine, schema, network=None, srid=None): # if metadata_json['run_id'] not in db_versioning['run_id']: # Use if just one run_id should be present to the DB table if db_versioning.empty: - # json.load the metadata_json + metadata_json = json.loads(network.metadata_json) - # this leads to wrong run_id if run_id is SET in __main__ -> 'run_id': metadata_json['run_id'] - # try: metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], 'description': str(metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) @@ -421,7 +433,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no=None): This function basically works the same way export_all_dataframes_to_db() does. It is implemented to handel the diffrent ways of executing the functions: If grids are loaded form pickle files a for loop is included and every grid district will be uploaded one after - another. This chances the requirements for this function. + another. This changes the requirements for the export to db functionality. Parameters ---------- @@ -533,7 +545,6 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no=None): # Set the Database schema which you want to add the tables to. # Configure the SCHEMA in config file located in: ding0/config/exporter_config.cfg . SCHEMA = exporter_config['EXPORTER_DB']['SCHEMA'] - # hardset for testing # SCHEMA = "public" @@ -551,6 +562,7 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no=None): # Single grids f. e.: grids = [2] mv_grid_districts = list(range(2, 6)) + # ToDo: Add for-loop here. Exporting ding0 the GridDistricts should be created and exported incrementally. # run DING0 on selected MV Grid District nw.run_ding0(session=session, mv_grid_districts_no=mv_grid_districts) From 8cd7120c8df52e8548a5d4443339e3d3114abe6a Mon Sep 17 00:00:00 2001 From: Jonas H Date: Tue, 16 Jul 2019 18:57:37 +0200 Subject: [PATCH 196/215] Add missing package --- ding0/io/skeleton.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/ding0/io/skeleton.yml b/ding0/io/skeleton.yml index 59ce7bde..20298126 100644 --- a/ding0/io/skeleton.yml +++ b/ding0/io/skeleton.yml @@ -32,4 +32,5 @@ dependencies: - oedialect - pypsa==0.11.0 - workalendar + - ConfigObj From f4f2e99c86ee7f9d4f7a8b013d3ec34ad05ecf59 Mon Sep 17 00:00:00 2001 From: Jonas H Date: Tue, 16 Jul 2019 18:58:27 +0200 Subject: [PATCH 197/215] add further information in docstring --- ding0/io/exporter_log.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/ding0/io/exporter_log.py b/ding0/io/exporter_log.py index 38e09229..0c58bfe6 100644 --- a/ding0/io/exporter_log.py +++ b/ding0/io/exporter_log.py @@ -1,3 +1,17 @@ +"""This file is part of DINGO, the DIstribution Network GeneratOr. +DINGO is a tool to generate synthetic medium and low voltage power +distribution grids based on open data. + +It is developed in the project open_eGo: https://openegoproject.wordpress.com + +DING0 lives at github: https://github.com/openego/ding0/ +The documentation is available on RTD: http://ding0.readthedocs.io""" + +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "jh-RLI" + import os # LOG_FILE_PATH = 'pickle_log' @@ -8,6 +22,12 @@ def pickle_export_logger(log_file_path): Creates a list for pickle files that are missing for some reason. Most likely the file does not exists @ the pickle file path dir. + Log missing ding0 GridDistricts: + The export_log provides functionality that set ups a logging file. One need to provide a path to the destination + the logfile shall be stored in. + The file can be opened at any code line and input can be provided. This logger is mainly used within a exception + to log the missing GridDistricts that could not be created by ding0. + :param log_file_path: :return: """ From 8259f283d64635260b00422ae7161156b19c92dd Mon Sep 17 00:00:00 2001 From: Jonas H Date: Tue, 16 Jul 2019 18:59:47 +0200 Subject: [PATCH 198/215] remove incorrect information --- ding0/io/env.txt | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/ding0/io/env.txt b/ding0/io/env.txt index 1793dbc5..06632671 100644 --- a/ding0/io/env.txt +++ b/ding0/io/env.txt @@ -1,5 +1,8 @@ -This is the conda env that was used to develope the export funktionality -- open it and change the "name" to whatever you would like to call your environment +This is a short introduction to the anaconda environment that was used to develop +the export functionality. + +- open skelleton.yml file in this directory and change the "name" to whatever you + would like to call your environment - use the command "conda env create -f skeleton.yml" - run the command "pip install -U -e ego.io\" - run the command "pip install -U -e ding0\" From c1002fb010136702e2b1c6b331b86b875c5567da Mon Sep 17 00:00:00 2001 From: Jonas H Date: Tue, 16 Jul 2019 19:01:50 +0200 Subject: [PATCH 199/215] describe the ding0 exporter and provide information on what io functionality is implemented --- doc/usage_details.rst | 46 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index d6bd969a..768b3b2b 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -155,6 +155,52 @@ Parameter Description Unit km_cable Cumulative length of underground cables km ========= ======================================= ==== +Ding0 IO : Ding0 exporter +========================= +Introduction +-------------- +The ding0 exporter contains different input/output functionality. The main component is the export.py. +The following will explain the main usage of the exporter.py and introduce inputs and outputs. + +The exporter contains the function export_network(). This function takes three parameters: a network +object, the mode which is not relevant here, and the run_id. The run_id should be set if the network +is not created but imported from pickle files. + +What is the input? +------------------ +Ding0Network can be run for multiple GridDistricts. As mentioned a Ding0Network can be created from a +versioned run that has been stored in pickle files or a new run can be initialized. The difference will +be most obviously be noticed by looking at the run_id. +So the input would the ding0 network and the run_id. + +What is the output? +------------------- +The function export_network() returns a list of nametuples. The nametuple contains 17 elements. The main +purpose of that is to return Pandas dataframes that store the Ding0Network data. But 2 elements also provide +further information like the actual run_id that was set or newly created and a metadata_json that stores +the assumptions ding0 uses to create the network topology. +Since Pandas is a Python package that is used very frequently the IO functionality of pandas can be used +for several tasks. See pandas IO. + +What IO functionality is implemented? +------------------------------------- +Currently the Ding0Network can be serialized as pickle files in python. It can also be stored in: +Tables on a relational database as well as saved to CSV file. + +IO settings +----------- +The io settings are provided within a config file that is located in the ding0/config folder. The file is +named exporter_config.cfg. In the current state it just stores the database schema that is used as destination +for any exports to a database. The config file is imported as config-object using the package "ConfigObj". + +Export ding0 to database +========================= +This exporter depends on existing tables. The table definition for tables that can store ding0 objects is used +to create these tables is provided. + +Ding0 export provides the Ding0 objects as Pandas dataframes. Pandas provides a IO functionality to export +Dataframes to a database. This is used to export the dataframes. + CSV file export =============== From d7fea0b905486c7408815e4a18dad04da7ee0fa5 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 5 Aug 2019 14:45:24 +0200 Subject: [PATCH 200/215] enhanced usage description on the ding0 exporter --- doc/usage_details.rst | 53 +++++++++++++++++++++++++++++++------------ 1 file changed, 38 insertions(+), 15 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 768b3b2b..ce24ef9d 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -159,19 +159,23 @@ Ding0 IO : Ding0 exporter ========================= Introduction -------------- -The ding0 exporter contains different input/output functionality. The main component is the export.py. -The following will explain the main usage of the exporter.py and introduce inputs and outputs. +The ding0 exporter provides the ding0 network topology data in a structured format. The main component is the +export.py which transforms the ding0 network to several pandas dataframes. The main purpose is to provide +the Data as table based format using a broadly known technology. The following will explain +the main usage of the exporter.py and introduce its inputs and possible outputs. The exporter contains the function export_network(). This function takes three parameters: a network -object, the mode which is not relevant here, and the run_id. The run_id should be set if the network -is not created but imported from pickle files. +object, the mode which is currently not implemented, and the run_id. + +Note: +The run_id should be set if the network is not created but imported from pickle files. What is the input? ------------------ -Ding0Network can be run for multiple GridDistricts. As mentioned a Ding0Network can be created from a -versioned run that has been stored in pickle files or a new run can be initialized. The difference will -be most obviously be noticed by looking at the run_id. -So the input would the ding0 network and the run_id. +Ding0Network can be run for a single or multiple GridDistricts. As mentioned a Ding0Network must be created +from a versioned Ding0 "run" that has been stored in pickle files before. The other options is to initialize +a new version by running ding0 again. The difference will be most obviously be noticed by looking at the run_id. +So the input would the ding0 network and the coherent run_id. What is the output? ------------------- @@ -184,28 +188,47 @@ for several tasks. See pandas IO. What IO functionality is implemented? ------------------------------------- -Currently the Ding0Network can be serialized as pickle files in python. It can also be stored in: -Tables on a relational database as well as saved to CSV file. +Currently the Ding0Network can be serialized as pickle files in python. It can also be stored in +Tables on a relational database as well as saved to CSV files. IO settings ----------- The io settings are provided within a config file that is located in the ding0/config folder. The file is named exporter_config.cfg. In the current state it just stores the database schema that is used as destination for any exports to a database. The config file is imported as config-object using the package "ConfigObj". +In the future all static options should be stored in this file. Export ding0 to database ========================= -This exporter depends on existing tables. The table definition for tables that can store ding0 objects is used -to create these tables is provided. +Ding0 Table +----------- +In order to export the provided, ding0 related, Pandas dataframes to a database one must create specific tables +first. The table definition and metadata(using string version 1.3) is provided within the module "ding0_db_tables.py". + +The table definition is implemented using SQLAlchemy. + +Ding0 Table Metadata +-------------------- +The "ding0 metadata" JSON-strings are located in the "metadatastrings" folder within in the "ding0.io" folder. +They are created using the a versioned metadatastring witch is under continuous development. The provided Metadata +is using the string in version 1.3. -Ding0 export provides the Ding0 objects as Pandas dataframes. Pandas provides a IO functionality to export -Dataframes to a database. This is used to export the dataframes. +Database export +--------------- +This exporter depends on existing tables. +The functionality for this module is implemented in "db_export.py". This module provides functionality to establish +a database connection, create the tables, drop the tables, as well as change the database specific owner for each table. +The core functionality is the data export. This is implemented using Pandas dataframes and a provided Pandas.IO +functionality. +Note: The export to a Database will take a lot of time (about 1 Week). The reason for this is the quantity of the data +ding0 provides. Therefore it is not recommended to export all 3608 available GridDistricts at once. This could be error +prone caused by connection timeout or similar reasons. We work on speeding up the export in the future. CSV file export =============== -Ding0 objects are exported in csv files. +Ding0 objects can be exported in csv files. The functionality is provided by Pandas.IO. Lines ----- From 32b0bf5060353268c7352a3bcc28ae2eec844b55 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 5 Aug 2019 14:46:58 +0200 Subject: [PATCH 201/215] added new entry for future implementation --- ding0/config/exporter_config.cfg | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ding0/config/exporter_config.cfg b/ding0/config/exporter_config.cfg index 3e7cadf9..a7682c9c 100644 --- a/ding0/config/exporter_config.cfg +++ b/ding0/config/exporter_config.cfg @@ -1,6 +1,10 @@ [EXPORTER_DB] SCHEMA = model_draft +[DING0_TABLE] + SRID = '' + METADATA_FOLDER = '' + [GRID_DISTRICT_RANGE] START = '' END = '' From 4ebf3ae8b75033709bc969d1d6f19b1fd95e845c Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 6 Aug 2019 15:14:10 +0200 Subject: [PATCH 202/215] added description on the ding0 related sql table structure. --- doc/usage_details.rst | 203 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 203 insertions(+) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index ce24ef9d..3b99785c 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -207,6 +207,209 @@ first. The table definition and metadata(using string version 1.3) is provided w The table definition is implemented using SQLAlchemy. +The following gives a short description for all tables. Note that all tables have the run_id form the versioning +table as foreignKey. All tables depend on the existing run_id. + +versioning +---------- +| name | description | unit | +|-------------|-----------------------------------|----------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| description | Used parameters for this run | string | + +line +---- +| name | description | unit | +|-----------|--------------------------------------------------------------------------------------|------------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| id_db | unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge | integer | +| edge_name | unambiguous name of edge | string | +| grid_name | unambiguous name of grid | string | +| node1 | id_db of first node | string | +| node2 | id_db of second node | string | +| type_kind | n/a | string | +| type_name | n/a | string | +| length | length of line as float | km | +| u_n | nominal voltage as float | kV | +| c | inductive resistance at 50Hz as float | uF/km | +| l | stored as float | mH/km | +| r | stored as float | Ohm/km | +| i_max_th | stored as float | A | +| geom | geometric coordinates | WGS84 LINESTRING | + +lv_branchtee.json +----------------- +| name | discription | unit | +|--------|---------------------------------------------------------------------------|----------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| geom | geometric coordinates | WGS84 POINT | +| id_db | unambiguous number of LV-Grid | integer | +| name | unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber | string | + +lv_generator.json +----------------- +| name | description | unit | +|------------------|----------------------------------------------------------------------|----------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| id_db | unambiguous number of LV-Grid | integer | +| la_id | FIXME | integer | +| name | unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#' | string | +| lv_grid_id | unambiguous id_db of LV-Grid | integer | +| geom | geometric coordinates | WGS84, POINT | +| type | type of generation {solar; biomass} | string | +| subtype | subtype of generation: {solar_roof_mounted, unknown; biomass} | string | +| v_level | voltage level of generator as integer | FIXME | +| nominal_capacity | nominal capacity as float | FIXME | +| is_aggregated | True if load is aggregated load, else False | boolean | +| weather_cell_id | unambiguous number of the corresponding weather cell | integer | + +lv_grid.json +------------ +| name | description | unit | +|-------------|---------------------------------------------------------|--------------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| id_db | unambiguous number of LV-Grid | integer | +| name | unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid# | string | +| geom | geometric coordinates | WGS84 MULTIPOLYGON | +| population | population in LV-Grid | integer | +| voltage_nom | voltage level of grid as float | kV | + +lv_load.json +------------ +| name | description | unit | +|-------------|------------------------------------------------------------------------------------|----------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| id_db | unambiguous number of LV-Grid | integer | +| name | unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#' | string | +| lv_grid_id | unambiguous id_db of LV-Grid | integer | +| geom | geometric coordinates | WGS84 POINT | +| consumption | type of load {residential, agricultural, industrial} and corresponding consumption | string | + +lv_station.json +--------------- +| name | description | unit | +|--------|-----------------------------------------------------|-------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation in yyyyMMddhhmmss | integer | +| id_db | unambiguous number of LV-Grid | integer | +| geom | geometric coordinates | WGS84 POINT | +| name | FIXME | string | + +mv_branchtee.json +----------------- +| name | description | unit | +|--------|-----------------------------------------------------------------------------|----------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| id_db | unambiguous number of MV-Grid | integer | +| geom | geometric coordinates | WGS84 POINT | +| name | unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#' | string | + +mv_circuitbreaker +----------------- +| name | description | unit | +|--------|-----------------------------------|----------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| id_db | unambiguous number of MV-Grid | integer | +| geom | geometric coordinates | WGS84 POINT | +| name | FIXME | string | +| status | FIXME | string | + +mv_generator +------------ +| name | description | unit | +|------------------|---------------------------------------------------------------------------------------------|----------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| id_db | unambiguous number of MV-Grid | integer | +| name | unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#' | string | +| geom | geometric coordinates | WGS84 POINT | +| type | type of generation: {solar; biomass} | string | +| subtype | subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas} | string | +| v_level | voltage level of generator as integer | FIXME | +| nominal_capacity | nominal capacity as float | FIXME | +| weather_cell_id | unambiguous number of the corresponding weather cell | integer | +| is_aggregated | True if load is aggregated load, else False | boolean | + +mv_grid +------- +| name | description | unit | +|-------------|----------------------------------------------------------|--------------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| id_db | unambiguous number of MV-Grid | integer | +| geom | geometric coordinates | WGS84 MULTIPOLYGON | +| name | unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#' | string | +| population | population in MV-Grid | integer | +| voltage_nom | voltage level of grid as float | kV | + +mv_load +------- +| name | description | unit | +|---------------|--------------------------------------------------------------------------------------------|----------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| name | unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#' | string | +| geom | geometric coordinates | WGS84 GEOMETRY | +| is_aggregated | True if load is aggregated load, else False | boolean | +| consumption | type of load {retail, residential, agricultural, industrial} and corresponding consumption | string | + +mv_station +---------- +| name | description | unit | +|--------|------------------------------------------------------------|----------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| id_db | unambiguous number of MV-Grid | integer | +| geom | geometric coordinates | WGS84 POINT | +| name | unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid# | string | + +mvlv_mapping +------------ +| name | description | unit | +|--------------|----------------------------------------------------------|---------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation in yyyyMMddhhmmss | integer | +| lv_grid_id | unambiguous number of LV-Grid | integer | +| lv_grid_name | unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#' | string | +| mv_grid_id | unambiguous number of MV-Grid | integer | +| mv_grid_name | unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#' | string | + +mvlv_transformer +---------------- +| name | description | unit | +|------------|-----------------------------------------------------|-------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation in yyyyMMddhhmmss | integer | +| id_db | unambiguous number of LV-Grid | integer | +| geom | geometric coordinates | WGS84 POINT | +| name | FIXME | string | +| voltage_op | as float | kV | +| s_nom | nominal apparent power as float | kVA | +| x | as float | Ohm | +| r | as float | Ohm | + +hvmv_transformer.json +--------------------- +| name | description | unit | +|------------|-----------------------------------|----------------| +| id | unambiguous unique numer | integer | +| run_id | time and date of table generation | yyyyMMddhhmmss | +| geom | geometric coordinates | WGS84 POINT | +| name | FIXME | string | +| voltage_op | FIXME | float | +| s_nom | nominal apparent power as float | kVA | +| x | as float | Ohm | +| r | as float | Ohm | + + Ding0 Table Metadata -------------------- The "ding0 metadata" JSON-strings are located in the "metadatastrings" folder within in the "ding0.io" folder. From a8f9233f588df42ccae6fe51bdf94f4ccec7720f Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 6 Aug 2019 15:15:52 +0200 Subject: [PATCH 203/215] Idea on how to improve the code structure when exporting all df to db --- ding0/io/db_export.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py index 8e80edf9..4fcc1d81 100644 --- a/ding0/io/db_export.py +++ b/ding0/io/db_export.py @@ -390,6 +390,8 @@ def export_all_dataframes_to_db(engine, schema, network=None, srid=None, grid_no # 'description': str(metadata_json)}, index=[0]) # df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) + # ToDo: use DING0_TABLES from ding0_db_tables.py and the df label (name of df) to loop over + # Use a loop based on a list that is compared with the df name for insert the df in the corresponding table # 1 export_df_to_db(engine, schema, network.lines, "line", srid) # 2 @@ -463,6 +465,8 @@ def export_all_pkl_to_db(engine, schema, network, srid, grid_no=None): 'description': str(metadata_json)}, index=[0]) df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) + # ToDo: use DING0_TABLES from ding0_db_tables.py and the df label (name of df) to loop over + # Use a loop based on a list that is compared with the df name for insert the df in the corresponding table # 1 export_df_to_db(engine, schema, network.lines, "line", srid) # 2 From d8ceda3f654cf2531bc620465131d877dae9f88b Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Tue, 6 Aug 2019 15:27:52 +0200 Subject: [PATCH 204/215] Attempt to fix display markdown table in RST file --- doc/usage_details.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 3b99785c..25476656 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -212,11 +212,13 @@ table as foreignKey. All tables depend on the existing run_id. versioning ---------- + | name | description | unit | -|-------------|-----------------------------------|----------------| +| ------------- | ----------------------------------- | ---------------- | | id | unambiguous unique numer | integer | | run_id | time and date of table generation | yyyyMMddhhmmss | | description | Used parameters for this run | string | + line ---- From 3a8f166d7ac23b2414a3dde9e2673aebb010c04d Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 22 Aug 2019 14:15:27 +0200 Subject: [PATCH 205/215] replace .md format table with .rst formatted table --- doc/usage_details.rst | 337 +++++++++++++++++++++++------------------- 1 file changed, 183 insertions(+), 154 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 25476656..6e5b3452 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -212,204 +212,233 @@ table as foreignKey. All tables depend on the existing run_id. versioning ---------- - -| name | description | unit | -| ------------- | ----------------------------------- | ---------------- | -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| description | Used parameters for this run | string | - +=========== ================================= ============== +name description unit +=========== ================================= ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +description Used parameters for this run string +=========== ================================= ============== line ---- -| name | description | unit | -|-----------|--------------------------------------------------------------------------------------|------------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| id_db | unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge | integer | -| edge_name | unambiguous name of edge | string | -| grid_name | unambiguous name of grid | string | -| node1 | id_db of first node | string | -| node2 | id_db of second node | string | -| type_kind | n/a | string | -| type_name | n/a | string | -| length | length of line as float | km | -| u_n | nominal voltage as float | kV | -| c | inductive resistance at 50Hz as float | uF/km | -| l | stored as float | mH/km | -| r | stored as float | Ohm/km | -| i_max_th | stored as float | A | -| geom | geometric coordinates | WGS84 LINESTRING | +========= ==================================================================================== ================ +name description unit +========= ==================================================================================== ================ +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge integer +edge_name unambiguous name of edge string +grid_name unambiguous name of grid string +node1 id_db of first node string +node2 id_db of second node string +type_kind n/a string +type_name n/a string +length length of line as float km +u_n nominal voltage as float kV +c inductive resistance at 50Hz as float uF/km +l stored as float mH/km +r stored as float Ohm/km +i_max_th stored as float A +geom geometric coordinates WGS84 LINESTRING +========= ==================================================================================== ================ lv_branchtee.json ----------------- -| name | discription | unit | -|--------|---------------------------------------------------------------------------|----------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| geom | geometric coordinates | WGS84 POINT | -| id_db | unambiguous number of LV-Grid | integer | -| name | unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber | string | +====== ========================================================================= ============== +name discription unit +====== ========================================================================= ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +geom geometric coordinates WGS84 POINT +id_db unambiguous number of LV-Grid integer +name unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber string +====== ========================================================================= ============== lv_generator.json ----------------- -| name | description | unit | -|------------------|----------------------------------------------------------------------|----------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| id_db | unambiguous number of LV-Grid | integer | -| la_id | FIXME | integer | -| name | unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#' | string | -| lv_grid_id | unambiguous id_db of LV-Grid | integer | -| geom | geometric coordinates | WGS84, POINT | -| type | type of generation {solar; biomass} | string | -| subtype | subtype of generation: {solar_roof_mounted, unknown; biomass} | string | -| v_level | voltage level of generator as integer | FIXME | -| nominal_capacity | nominal capacity as float | FIXME | -| is_aggregated | True if load is aggregated load, else False | boolean | -| weather_cell_id | unambiguous number of the corresponding weather cell | integer | +================ ==================================================================== ============== +name description unit +================ ==================================================================== ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of LV-Grid integer +la_id FIXME integer +name unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#' string +lv_grid_id unambiguous id_db of LV-Grid integer +geom geometric coordinates WGS84, POINT +type type of generation {solar; biomass} string +subtype subtype of generation: {solar_roof_mounted, unknown; biomass} string +v_level voltage level of generator as integer FIXME +nominal_capacity nominal capacity as float FIXME +is_aggregated True if load is aggregated load, else False boolean +weather_cell_id unambiguous number of the corresponding weather cell integer +================ ==================================================================== ============== lv_grid.json ------------ -| name | description | unit | -|-------------|---------------------------------------------------------|--------------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| id_db | unambiguous number of LV-Grid | integer | -| name | unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid# | string | -| geom | geometric coordinates | WGS84 MULTIPOLYGON | -| population | population in LV-Grid | integer | -| voltage_nom | voltage level of grid as float | kV | - +=========== ======================================================= ================== +name description unit +=========== ======================================================= ================== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of LV-Grid integer +name unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid# string +geom geometric coordinates WGS84 MULTIPOLYGON +population population in LV-Grid integer +voltage_nom voltage level of grid as float kV +=========== ======================================================= ================== lv_load.json ------------ -| name | description | unit | -|-------------|------------------------------------------------------------------------------------|----------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| id_db | unambiguous number of LV-Grid | integer | -| name | unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#' | string | -| lv_grid_id | unambiguous id_db of LV-Grid | integer | -| geom | geometric coordinates | WGS84 POINT | -| consumption | type of load {residential, agricultural, industrial} and corresponding consumption | string | +=========== ================================================================================== ============== +name description unit +=========== ================================================================================== ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of LV-Grid integer +name unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#' string +lv_grid_id unambiguous id_db of LV-Grid integer +geom geometric coordinates WGS84 POINT +consumption type of load {residential, agricultural, industrial} and corresponding consumption string +=========== ================================================================================== ============== lv_station.json --------------- -| name | description | unit | -|--------|-----------------------------------------------------|-------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation in yyyyMMddhhmmss | integer | -| id_db | unambiguous number of LV-Grid | integer | -| geom | geometric coordinates | WGS84 POINT | -| name | FIXME | string | +====== =================================================== =========== +name description unit +====== =================================================== =========== +id unambiguous unique numer integer +run_id time and date of table generation in yyyyMMddhhmmss integer +id_db unambiguous number of LV-Grid integer +geom geometric coordinates WGS84 POINT +name FIXME string +====== =================================================== =========== mv_branchtee.json ----------------- -| name | description | unit | -|--------|-----------------------------------------------------------------------------|----------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| id_db | unambiguous number of MV-Grid | integer | -| geom | geometric coordinates | WGS84 POINT | -| name | unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#' | string | +====== =========================================================================== ============== +name description unit +====== =========================================================================== ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of MV-Grid integer +geom geometric coordinates WGS84 POINT +name unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#' string +====== =========================================================================== ============== mv_circuitbreaker ----------------- -| name | description | unit | -|--------|-----------------------------------|----------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| id_db | unambiguous number of MV-Grid | integer | -| geom | geometric coordinates | WGS84 POINT | -| name | FIXME | string | -| status | FIXME | string | +====== ================================= ============== +name description unit +====== ================================= ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of MV-Grid integer +geom geometric coordinates WGS84 POINT +name FIXME string +status FIXME string +====== ================================= ============== mv_generator ------------ -| name | description | unit | -|------------------|---------------------------------------------------------------------------------------------|----------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| id_db | unambiguous number of MV-Grid | integer | -| name | unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#' | string | -| geom | geometric coordinates | WGS84 POINT | -| type | type of generation: {solar; biomass} | string | -| subtype | subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas} | string | -| v_level | voltage level of generator as integer | FIXME | -| nominal_capacity | nominal capacity as float | FIXME | -| weather_cell_id | unambiguous number of the corresponding weather cell | integer | -| is_aggregated | True if load is aggregated load, else False | boolean | +================ =========================================================================================== ============== +name description unit +================ =========================================================================================== ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of MV-Grid integer +name unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#' string +geom geometric coordinates WGS84 POINT +type type of generation: {solar; biomass} string +subtype subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas} string +v_level voltage level of generator as integer FIXME +nominal_capacity nominal capacity as float FIXME +weather_cell_id unambiguous number of the corresponding weather cell integer +is_aggregated True if load is aggregated load, else False boolean +================ =========================================================================================== ============== mv_grid ------- -| name | description | unit | -|-------------|----------------------------------------------------------|--------------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| id_db | unambiguous number of MV-Grid | integer | -| geom | geometric coordinates | WGS84 MULTIPOLYGON | -| name | unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#' | string | -| population | population in MV-Grid | integer | -| voltage_nom | voltage level of grid as float | kV | +=========== ======================================================== ================== +name description unit +=========== ======================================================== ================== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of MV-Grid integer +geom geometric coordinates WGS84 MULTIPOLYGON +name unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#' string +population population in MV-Grid integer +voltage_nom voltage level of grid as float kV +=========== ======================================================== ================== mv_load ------- -| name | description | unit | -|---------------|--------------------------------------------------------------------------------------------|----------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| name | unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#' | string | -| geom | geometric coordinates | WGS84 GEOMETRY | -| is_aggregated | True if load is aggregated load, else False | boolean | -| consumption | type of load {retail, residential, agricultural, industrial} and corresponding consumption | string | +============= ========================================================================================== ============== +name description unit +============= ========================================================================================== ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +name unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#' string +geom geometric coordinates WGS84 GEOMETRY +is_aggregated True if load is aggregated load, else False boolean +consumption type of load {retail, residential, agricultural, industrial} and corresponding consumption string +============= ========================================================================================== ============== mv_station ---------- -| name | description | unit | -|--------|------------------------------------------------------------|----------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| id_db | unambiguous number of MV-Grid | integer | -| geom | geometric coordinates | WGS84 POINT | -| name | unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid# | string | +====== ========================================================== ============== +name description unit +====== ========================================================== ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of MV-Grid integer +geom geometric coordinates WGS84 POINT +name unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid# string +====== ========================================================== ============== mvlv_mapping ------------ -| name | description | unit | -|--------------|----------------------------------------------------------|---------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation in yyyyMMddhhmmss | integer | -| lv_grid_id | unambiguous number of LV-Grid | integer | -| lv_grid_name | unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#' | string | -| mv_grid_id | unambiguous number of MV-Grid | integer | -| mv_grid_name | unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#' | string | +============ ======================================================== ======= +name description unit +============ ======================================================== ======= +id unambiguous unique numer integer +run_id time and date of table generation in yyyyMMddhhmmss integer +lv_grid_id unambiguous number of LV-Grid integer +lv_grid_name unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#' string +mv_grid_id unambiguous number of MV-Grid integer +mv_grid_name unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#' string +============ ======================================================== ======= mvlv_transformer ---------------- -| name | description | unit | -|------------|-----------------------------------------------------|-------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation in yyyyMMddhhmmss | integer | -| id_db | unambiguous number of LV-Grid | integer | -| geom | geometric coordinates | WGS84 POINT | -| name | FIXME | string | -| voltage_op | as float | kV | -| s_nom | nominal apparent power as float | kVA | -| x | as float | Ohm | -| r | as float | Ohm | +========== =================================================== =========== +name description unit +========== =================================================== =========== +id unambiguous unique numer integer +run_id time and date of table generation in yyyyMMddhhmmss integer +id_db unambiguous number of LV-Grid integer +geom geometric coordinates WGS84 POINT +name FIXME string +voltage_op as float kV +s_nom nominal apparent power as float kVA +x as float Ohm +r as float Ohm +========== =================================================== =========== hvmv_transformer.json --------------------- -| name | description | unit | -|------------|-----------------------------------|----------------| -| id | unambiguous unique numer | integer | -| run_id | time and date of table generation | yyyyMMddhhmmss | -| geom | geometric coordinates | WGS84 POINT | -| name | FIXME | string | -| voltage_op | FIXME | float | -| s_nom | nominal apparent power as float | kVA | -| x | as float | Ohm | -| r | as float | Ohm | +========== ================================= ============== +name description unit +========== ================================= ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +geom geometric coordinates WGS84 POINT +name FIXME string +voltage_op FIXME float +s_nom nominal apparent power as float kVA +x as float Ohm +r as float Ohm +========== ================================= ============== Ding0 Table Metadata From 8fa0eae0147c47d8c13d0823e5482d6324d6e5d5 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 22 Aug 2019 14:21:07 +0200 Subject: [PATCH 206/215] remove missleading .json from table name --- doc/usage_details.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 6e5b3452..a52cd4ae 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -243,7 +243,7 @@ i_max_th stored as float geom geometric coordinates WGS84 LINESTRING ========= ==================================================================================== ================ -lv_branchtee.json +lv_branchtee ----------------- ====== ========================================================================= ============== name discription unit @@ -255,7 +255,7 @@ id_db unambiguous number of LV-Grid name unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber string ====== ========================================================================= ============== -lv_generator.json +lv_generator ----------------- ================ ==================================================================== ============== name description unit @@ -275,7 +275,7 @@ is_aggregated True if load is aggregated load, else False weather_cell_id unambiguous number of the corresponding weather cell integer ================ ==================================================================== ============== -lv_grid.json +lv_grid ------------ =========== ======================================================= ================== name description unit @@ -288,7 +288,7 @@ geom geometric coordinates WGS84 MULT population population in LV-Grid integer voltage_nom voltage level of grid as float kV =========== ======================================================= ================== -lv_load.json +lv_load ------------ =========== ================================================================================== ============== name description unit @@ -302,7 +302,7 @@ geom geometric coordinates consumption type of load {residential, agricultural, industrial} and corresponding consumption string =========== ================================================================================== ============== -lv_station.json +lv_station --------------- ====== =================================================== =========== name description unit @@ -314,7 +314,7 @@ geom geometric coordinates WGS84 POINT name FIXME string ====== =================================================== =========== -mv_branchtee.json +mv_branchtee ----------------- ====== =========================================================================== ============== name description unit @@ -425,7 +425,7 @@ x as float Ohm r as float Ohm ========== =================================================== =========== -hvmv_transformer.json +hvmv_transformer --------------------- ========== ================================= ============== name description unit From 304e794ed406c951e07bff8417ba08088e49f954 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 22 Aug 2019 14:50:18 +0200 Subject: [PATCH 207/215] fix formation, add link to metadata template, add information about the location of the ding0 tables on the OEP --- doc/usage_details.rst | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index a52cd4ae..7416e3f7 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -203,13 +203,20 @@ Export ding0 to database Ding0 Table ----------- In order to export the provided, ding0 related, Pandas dataframes to a database one must create specific tables -first. The table definition and metadata(using string version 1.3) is provided within the module "ding0_db_tables.py". +first. The table definition and metadata(using string version 1.3: see Ding0 table metadata) is provided within +the module "ding0_db_tables.py". The table definition is implemented using SQLAlchemy. The following gives a short description for all tables. Note that all tables have the run_id form the versioning table as foreignKey. All tables depend on the existing run_id. +Table specification +------------------- +OpenEnergyDatabase schema name: grid + +The database schema is selected based on the topic for which the data provides information. + versioning ---------- =========== ================================= ============== @@ -288,6 +295,7 @@ geom geometric coordinates WGS84 MULT population population in LV-Grid integer voltage_nom voltage level of grid as float kV =========== ======================================================= ================== + lv_load ------------ =========== ================================================================================== ============== @@ -445,7 +453,9 @@ Ding0 Table Metadata -------------------- The "ding0 metadata" JSON-strings are located in the "metadatastrings" folder within in the "ding0.io" folder. They are created using the a versioned metadatastring witch is under continuous development. The provided Metadata -is using the string in version 1.3. +is using a OEP specific json string in version1.3_. + +.. _version1.3: herf=https://github.com/OpenEnergyPlatform/examples/blob/master/metadata/archiv/oep_metadata_template_v1.3.json Database export --------------- From e3cf1d32ee7db3df8fa354ab78dd63ba0538b6c1 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 22 Aug 2019 14:56:49 +0200 Subject: [PATCH 208/215] fix hyperlink --- doc/usage_details.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 7416e3f7..83328388 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -455,7 +455,7 @@ The "ding0 metadata" JSON-strings are located in the "metadatastrings" folder wi They are created using the a versioned metadatastring witch is under continuous development. The provided Metadata is using a OEP specific json string in version1.3_. -.. _version1.3: herf=https://github.com/OpenEnergyPlatform/examples/blob/master/metadata/archiv/oep_metadata_template_v1.3.json +.. _version1.3: https://github.com/OpenEnergyPlatform/examples/blob/master/metadata/archiv/oep_metadata_template_v1.3.json Database export --------------- From 4dfe058bc4afe9e97d1239cf547454de5b76afde Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 22 Aug 2019 15:16:00 +0200 Subject: [PATCH 209/215] add description on how to update the provided information on all ding0 tables from metadata json strings --- doc/usage_details.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 83328388..64ac5da9 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -208,7 +208,7 @@ the module "ding0_db_tables.py". The table definition is implemented using SQLAlchemy. -The following gives a short description for all tables. Note that all tables have the run_id form the versioning +The following gives a short description for all tables. Note that all tables own the run_id form the versioning table as foreignKey. All tables depend on the existing run_id. Table specification @@ -217,6 +217,11 @@ OpenEnergyDatabase schema name: grid The database schema is selected based on the topic for which the data provides information. +Note: The following tables can be generated automatically from a metadata string. +To create the tables this script_ is provided. + +.. _script: https://gist.github.com/gplssm/63f11276387875763f2bbc7f9a5fdb8f + versioning ---------- =========== ================================= ============== From 0ba5126065ebdeecffca62d989dc2080455a2fa1 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 22 Aug 2019 16:03:32 +0200 Subject: [PATCH 210/215] update author --- ding0/io/file_export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ding0/io/file_export.py b/ding0/io/file_export.py index 24192879..a8d7f4c9 100644 --- a/ding0/io/file_export.py +++ b/ding0/io/file_export.py @@ -10,7 +10,7 @@ __copyright__ = "Reiner Lemoine Institut gGmbH" __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" __url__ = "https://github.com/openego/ding0/blob/master/LICENSE" -__author__ = "nesnoj, gplssm" +__author__ = "nesnoj, gplssm, jh-RLI" import os From da805193cf495f90d458cf5048edcd2a37dc6598 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 22 Aug 2019 16:04:27 +0200 Subject: [PATCH 211/215] added usage description on the csv file exporter --- doc/usage_details.rst | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 64ac5da9..a5706dc0 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -479,6 +479,24 @@ CSV file export Ding0 objects can be exported in csv files. The functionality is provided by Pandas.IO. +Usage +----- +The export functionality is implemented here: file_export_. + +The CSV exporter is used as a command line script. Its core functionality is realized by using Pandas io functions. + +To use the CSV exporter, you need to specify the specific destination folder in the script to specify where the +generated CSV files should be stored. The operator must also specify the range of grid districts to be exported. + +The prerequisite is that you are able to create (or have access to) a ding0 network from pickle files or from the +actual ding0 run process. The current state of the script assumes that you are using pickle files to create +the ding0 network. + +.. _file_export: https://github.com/openego/ding0/blob/features/stats-export/ding0/io/file_export.py + +CSV Table specification +----------------------- + Lines ----- .. csv-table:: line.csv From 044a45c10b5bd09fe0f99c56b799c144ad4558af Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Thu, 22 Aug 2019 16:57:04 +0200 Subject: [PATCH 212/215] added description on ding0 table, resturctured file, added usage description to db exporter --- doc/usage_details.rst | 53 +++++++++++++++++++++++++++++++------------ 1 file changed, 38 insertions(+), 15 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index a5706dc0..fd8220c3 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -198,22 +198,31 @@ named exporter_config.cfg. In the current state it just stores the database sche for any exports to a database. The config file is imported as config-object using the package "ConfigObj". In the future all static options should be stored in this file. -Export ding0 to database -========================= + Ding0 Table ------------ +=========== In order to export the provided, ding0 related, Pandas dataframes to a database one must create specific tables first. The table definition and metadata(using string version 1.3: see Ding0 table metadata) is provided within the module "ding0_db_tables.py". -The table definition is implemented using SQLAlchemy. +The table definition is implemented using SQLAlchemy and can be found here: ding0_sqla_tables_. + +.. _ding0_sqla_tables: https://github.com/openego/ding0/blob/features/stats-export/ding0/io/ding0_db_tables.py + + +Ding0 Table Metadata +-------------------- +The "ding0 metadata" JSON-strings are located in the "metadatastrings" folder within in the "ding0.io" folder. +They are created using the a versioned metadatastring witch is under continuous development. The provided Metadata +is using a OEP specific json string in version1.3_. + +.. _version1.3: https://github.com/OpenEnergyPlatform/examples/blob/master/metadata/archiv/oep_metadata_template_v1.3.json -The following gives a short description for all tables. Note that all tables own the run_id form the versioning -table as foreignKey. All tables depend on the existing run_id. Table specification ------------------- -OpenEnergyDatabase schema name: grid +In the following a short description is given, which covers all tables. Note that all tables own the run_id form +the versioning table as foreignKey. All tables depend on the existing run_id. The database schema is selected based on the topic for which the data provides information. @@ -222,6 +231,8 @@ To create the tables this script_ is provided. .. _script: https://gist.github.com/gplssm/63f11276387875763f2bbc7f9a5fdb8f +**OpenEnergyDatabase schema name: grid.tablename** + versioning ---------- =========== ================================= ============== @@ -454,18 +465,13 @@ r as float Ohm ========== ================================= ============== -Ding0 Table Metadata --------------------- -The "ding0 metadata" JSON-strings are located in the "metadatastrings" folder within in the "ding0.io" folder. -They are created using the a versioned metadatastring witch is under continuous development. The provided Metadata -is using a OEP specific json string in version1.3_. - -.. _version1.3: https://github.com/OpenEnergyPlatform/examples/blob/master/metadata/archiv/oep_metadata_template_v1.3.json +Export ding0 to database +========================= Database export --------------- This exporter depends on existing tables. -The functionality for this module is implemented in "db_export.py". This module provides functionality to establish +The functionality for this module is implemented in db_export.py_ . This module provides functionality to establish a database connection, create the tables, drop the tables, as well as change the database specific owner for each table. The core functionality is the data export. This is implemented using Pandas dataframes and a provided Pandas.IO functionality. @@ -474,6 +480,23 @@ Note: The export to a Database will take a lot of time (about 1 Week). The reaso ding0 provides. Therefore it is not recommended to export all 3608 available GridDistricts at once. This could be error prone caused by connection timeout or similar reasons. We work on speeding up the export in the future. +.. _db_export.py: https://github.com/openego/ding0/blob/features/stats-export/ding0/io/db_export.py + +Usage +----- +The module is implemented as a command line based script. To run the script one needs to be able to create a +ding0 network by using pickle files or by using a new ding0 run as mentioned before. The ding0 network is used as +input for the export_network function which returns the pandas dataframes as nametupels(see Ding0 IO : Ding0 exporter). + +The nametupels are one of the parameters that are input for the export functionality. Other Inputs are a valid +connection to the Open Energy Platform (the tcp based connection is used here), the schema name that specifies the +destination on the database as well as the value for the srid. + +.. code-block:: python + # Export all Dataframes returned form export_network(nw) to DB + export_all_dataframes_to_db(oedb_engine, SCHEMA, network=network, srid=SRID) + + CSV file export =============== From 8bc1e17b399ee06b7f2c5d9f4435f96d7f63f59b Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 2 Sep 2019 17:04:58 +0200 Subject: [PATCH 213/215] Add code examples for better understanding, Fix some typos --- doc/usage_details.rst | 102 +++++++++++++++++++++++++++++++++++------- 1 file changed, 85 insertions(+), 17 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index fd8220c3..57dc02f6 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -177,14 +177,52 @@ from a versioned Ding0 "run" that has been stored in pickle files before. The ot a new version by running ding0 again. The difference will be most obviously be noticed by looking at the run_id. So the input would the ding0 network and the coherent run_id. +.. code-block:: python + + # 1. + # example pickle file path + pkl_filepath = "/home/local/user/Ding0/20180823154014" + + # 2. + # choose MV Grid Districts to import, use list of integers + grids = list(range(2, 6)) + + # 3. + # loop over selected grids in the a directory + for grid_no in grids: + try: + nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) + except: + print('Something went wrong') + continue + What is the output? ------------------- -The function export_network() returns a list of nametuples. The nametuple contains 17 elements. The main -purpose of that is to return Pandas dataframes that store the Ding0Network data. But 2 elements also provide -further information like the actual run_id that was set or newly created and a metadata_json that stores -the assumptions ding0 uses to create the network topology. -Since Pandas is a Python package that is used very frequently the IO functionality of pandas can be used -for several tasks. See pandas IO. +The export_network() function returns a list of nametuples. The nametuple contains 17 elements. They contain all +data of a Ding0Network instance as well as the corresponding metadata. +15 elements store the Ding0Network data and 2 elements contain meta information like the current run_id and +Metadata_json. The metadata_json contains the assumptions that ding0 uses to create the network topology. + +Since Pandas is a Python package that is used very often, the goal was to improve the usability of the IO functionality +of ding0 with it. For further information see Panda's IO. + +.. code-block:: python + + # 4. + # Create 15 pandas dataframes and 2 metadata elements as namedtuple + # use the run_id from the pickle files in this case = 20180823154014 + network = export_network(nw, run_id=pickled_run_id_value) + + + # available namedtuple + 'Network', + [ + 'run_id', 'metadata_json', 'lv_grid', 'lv_gen', 'lv_cd', 'lv_stations', 'mvlv_trafos', 'lv_loads', + 'mv_grid', 'mv_gen', 'mv_cb', 'mv_cd', 'mv_stations', 'hvmv_trafos', 'mv_loads', 'lines', 'mvlv_mapping' + ] + + + What IO functionality is implemented? ------------------------------------- @@ -193,15 +231,15 @@ Tables on a relational database as well as saved to CSV files. IO settings ----------- -The io settings are provided within a config file that is located in the ding0/config folder. The file is -named exporter_config.cfg. In the current state it just stores the database schema that is used as destination -for any exports to a database. The config file is imported as config-object using the package "ConfigObj". +The IO settings are provided within a config file that is located in the ding0/config folder. The file is +named exporter_config.cfg. In the current state it just stores the database schema name that is used as destination +for any data export to a database. The config file is imported as config-object using the package "ConfigObj". In the future all static options should be stored in this file. Ding0 Table =========== -In order to export the provided, ding0 related, Pandas dataframes to a database one must create specific tables +In order to export a Pandas dataframe to a database one must create specific tables first. The table definition and metadata(using string version 1.3: see Ding0 table metadata) is provided within the module "ding0_db_tables.py". @@ -213,7 +251,7 @@ The table definition is implemented using SQLAlchemy and can be found here: ding Ding0 Table Metadata -------------------- The "ding0 metadata" JSON-strings are located in the "metadatastrings" folder within in the "ding0.io" folder. -They are created using the a versioned metadatastring witch is under continuous development. The provided Metadata +They are created using the a versioned metadatastring which is under continuous development. The provided Metadata is using a OEP specific json string in version1.3_. .. _version1.3: https://github.com/OpenEnergyPlatform/examples/blob/master/metadata/archiv/oep_metadata_template_v1.3.json @@ -221,8 +259,8 @@ is using a OEP specific json string in version1.3_. Table specification ------------------- -In the following a short description is given, which covers all tables. Note that all tables own the run_id form -the versioning table as foreignKey. All tables depend on the existing run_id. +In the following a short description is given, which covers the structure of all tables. Note that all tables own +the run_id column form the versioning table as foreignKey. All tables depend on the same value in run_id. The database schema is selected based on the topic for which the data provides information. @@ -470,15 +508,15 @@ Export ding0 to database Database export --------------- -This exporter depends on existing tables. +This exporter depends on existing tables as described in chapter "Ding0 Table". The functionality for this module is implemented in db_export.py_ . This module provides functionality to establish a database connection, create the tables, drop the tables, as well as change the database specific owner for each table. The core functionality is the data export. This is implemented using Pandas dataframes and a provided Pandas.IO functionality. -Note: The export to a Database will take a lot of time (about 1 Week). The reason for this is the quantity of the data -ding0 provides. Therefore it is not recommended to export all 3608 available GridDistricts at once. This could be error -prone caused by connection timeout or similar reasons. We work on speeding up the export in the future. +Note: The export to a Database will take a lot of time (about 1 Week). The reason for this is the amount of the data +provided by ding0. Therefore it is not recommended to export all 3608 available GridDistricts at once. This could be +error prone caused by connection timeout or similar reasons. We work on speeding up the export in the future. .. _db_export.py: https://github.com/openego/ding0/blob/features/stats-export/ding0/io/db_export.py @@ -488,12 +526,42 @@ The module is implemented as a command line based script. To run the script one ding0 network by using pickle files or by using a new ding0 run as mentioned before. The ding0 network is used as input for the export_network function which returns the pandas dataframes as nametupels(see Ding0 IO : Ding0 exporter). +.. code-block:: python + + # create ding0 Network instance + nw = NetworkDing0(name='network') + + #geo. ref. sys. e.g. 4326==WGS84 + SRID = int(nw.config['geo']['srid']) + + # choose MV Grid Districts to import, use list of integers + mv_grid_districts = list(range(2, 6)) + + # run DING0 on selected MV Grid District + nw.run_ding0(session=session, + mv_grid_districts_no=mv_grid_districts) + + # return values from export_network() as tupels + network = export_network(nw) + The nametupels are one of the parameters that are input for the export functionality. Other Inputs are a valid connection to the Open Energy Platform (the tcp based connection is used here), the schema name that specifies the destination on the database as well as the value for the srid. .. code-block:: python + + # establish database connection and SQLAlchemy session + # one need a database user + # example OEP-API is not supported yet + oedb_engine = connection(section='oedb') + session = sessionmaker(bind=oedb_engine)() + + # Set the Database schema which you want to add the tables to. + # Configure the SCHEMA in config file located in: ding0/config/exporter_config.cfg . + SCHEMA = exporter_config['EXPORTER_DB']['SCHEMA'] + # Export all Dataframes returned form export_network(nw) to DB + # example: export_all_dataframes_to_db(oedb_engine, my_schema_name, network=ding0_network_tuples, srid=4326) export_all_dataframes_to_db(oedb_engine, SCHEMA, network=network, srid=SRID) From 8ddd96b939c13dc13faa022cc09d2fe6509abc02 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 2 Sep 2019 17:17:55 +0200 Subject: [PATCH 214/215] Imporve codeblock contend --- doc/usage_details.rst | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 57dc02f6..35a662cd 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -208,12 +208,12 @@ of ding0 with it. For further information see Panda's IO. .. code-block:: python - # 4. + # 4. inside for-loop # Create 15 pandas dataframes and 2 metadata elements as namedtuple # use the run_id from the pickle files in this case = 20180823154014 network = export_network(nw, run_id=pickled_run_id_value) - + # This is the structure of the output: # available namedtuple 'Network', [ @@ -222,8 +222,6 @@ of ding0 with it. For further information see Panda's IO. ] - - What IO functionality is implemented? ------------------------------------- Currently the Ding0Network can be serialized as pickle files in python. It can also be stored in @@ -528,6 +526,8 @@ input for the export_network function which returns the pandas dataframes as nam .. code-block:: python + # db_export.py + # create ding0 Network instance nw = NetworkDing0(name='network') @@ -544,19 +544,30 @@ input for the export_network function which returns the pandas dataframes as nam # return values from export_network() as tupels network = export_network(nw) + # Create Tables + # Creates all defined tables from "ding0_db_tables.py" + create_ding0_sql_tables(oedb_engine, SCHEMA) + + # Delete only the created tables + drop_ding0_db_tables(oedb_engine) + + # change the owner (open-energy-database specific function) + db_tables_change_owner(oedb_engine, SCHEMA) + The nametupels are one of the parameters that are input for the export functionality. Other Inputs are a valid connection to the Open Energy Platform (the tcp based connection is used here), the schema name that specifies the destination on the database as well as the value for the srid. .. code-block:: python + # db_export.py + # establish database connection and SQLAlchemy session - # one need a database user - # example OEP-API is not supported yet + # one need a database user (OEP-API is not supported yet) oedb_engine = connection(section='oedb') session = sessionmaker(bind=oedb_engine)() - # Set the Database schema which you want to add the tables to. + # Set the Database schema which you want to add the tables # Configure the SCHEMA in config file located in: ding0/config/exporter_config.cfg . SCHEMA = exporter_config['EXPORTER_DB']['SCHEMA'] @@ -564,7 +575,6 @@ destination on the database as well as the value for the srid. # example: export_all_dataframes_to_db(oedb_engine, my_schema_name, network=ding0_network_tuples, srid=4326) export_all_dataframes_to_db(oedb_engine, SCHEMA, network=network, srid=SRID) - CSV file export =============== From dba000ff034cba1f60bb5786637b74065dd9de23 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 2 Sep 2019 17:22:28 +0200 Subject: [PATCH 215/215] Imporve codeblock contend --- doc/usage_details.rst | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 35a662cd..d1ecff92 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -544,6 +544,13 @@ input for the export_network function which returns the pandas dataframes as nam # return values from export_network() as tupels network = export_network(nw) + +Before the data can be exported, the tables must be created in the database. + +.. code-block:: python + + # db_export.py + # Create Tables # Creates all defined tables from "ding0_db_tables.py" create_ding0_sql_tables(oedb_engine, SCHEMA) @@ -556,7 +563,7 @@ input for the export_network function which returns the pandas dataframes as nam The nametupels are one of the parameters that are input for the export functionality. Other Inputs are a valid connection to the Open Energy Platform (the tcp based connection is used here), the schema name that specifies the -destination on the database as well as the value for the srid. +destination on the database as well as the srid. .. code-block:: python