diff --git a/.gitignore b/.gitignore index 3fb578c5..9bbcebf0 100644 --- a/.gitignore +++ b/.gitignore @@ -58,6 +58,7 @@ target/ # OWN STUFF #.gitignore +ding0/examples/ding0_grids_example.pkl .idea/ ding0/output/ /Line_loading_feed-in_case.png diff --git a/ding0/config/config_files.cfg b/ding0/config/config_files.cfg index 79c474d2..cee97f2f 100644 --- a/ding0/config/config_files.cfg +++ b/ding0/config/config_files.cfg @@ -26,4 +26,7 @@ model_grids_lv_apartment_string = model_grids-lv_relation_apartment_string.csv animation_file_prefix = mv-routing_ani_ nd_pickle = ding0_grids_{}.pkl edges_stats = mvgd_edges_stats_{}.csv -nodes_stats = mvgd_nodes_stats_{}.csv \ No newline at end of file +nodes_stats = mvgd_nodes_stats_{}.csv + +[metadata_strings] +version = 1.3 \ No newline at end of file diff --git a/ding0/config/exporter_config.cfg b/ding0/config/exporter_config.cfg new file mode 100644 index 00000000..a7682c9c --- /dev/null +++ b/ding0/config/exporter_config.cfg @@ -0,0 +1,10 @@ +[EXPORTER_DB] + SCHEMA = model_draft + +[DING0_TABLE] + SRID = '' + METADATA_FOLDER = '' + +[GRID_DISTRICT_RANGE] + START = '' + END = '' diff --git a/ding0/examples/example_single_grid_district.py b/ding0/examples/example_single_grid_district.py index 3d27aef9..477d56d0 100644 --- a/ding0/examples/example_single_grid_district.py +++ b/ding0/examples/example_single_grid_district.py @@ -34,7 +34,7 @@ # ===== MAIN ===== # database connection/ session -engine = db.connection(section='oedb') +engine = db.connection(section='oedb_vpn') session = sessionmaker(bind=engine)() # instantiate new ding0 network object diff --git a/ding0/io/__init__.py b/ding0/io/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ding0/io/confirm_existing_gd_in_pkl_dir.py b/ding0/io/confirm_existing_gd_in_pkl_dir.py new file mode 100644 index 00000000..a0a094bd --- /dev/null +++ b/ding0/io/confirm_existing_gd_in_pkl_dir.py @@ -0,0 +1,39 @@ +import os +import logging +from ding0.tools.results import load_nd_from_pickle + +# LOG_FILE_PATH = 'pickle_log' +LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') + +# does the file exist? +if not os.path.isfile(LOG_FILE_PATH): + print('ding0 log-file {file} not found. ' + 'This might be the first run of the tool. ' + .format(file=LOG_FILE_PATH)) + base_path = os.path.split(LOG_FILE_PATH)[0] + if not os.path.isdir(base_path): + os.mkdir(base_path) + print('The directory {path} was created.'.format(path=base_path)) + + with open(LOG_FILE_PATH, 'a') as log: + log.write("List of missing grid districts:\n") + pass + + +# logging.basicConfig(filename=LOG_FILE_PATH, level=logging.DEBUG) + +# pickle file locations path to RLI_Daten_Flex01 mount +pkl_filepath = "/home/local/RL-INSTITUT/jonas.huber/rli/Daten_flexibel_01/Ding0/20180823154014" + +# choose MV Grid Districts to import +grids = list(range(61, 70)) + +for grid_no in grids: + try: + nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) + except: + # logging.debug('ding0_grids__{}.pkl not present to the current directory'.format(grid_no)) + with open(LOG_FILE_PATH, 'a') as log: + log.write('ding0_grids__{}.pkl not present to the current directory\n'.format(grid_no)) + pass + continue \ No newline at end of file diff --git a/ding0/io/db_export.py b/ding0/io/db_export.py new file mode 100644 index 00000000..4fcc1d81 --- /dev/null +++ b/ding0/io/db_export.py @@ -0,0 +1,585 @@ +"""This file is part of DINGO, the DIstribution Network GeneratOr. +DINGO is a tool to generate synthetic medium and low voltage power +distribution grids based on open data. + +It is developed in the project open_eGo: https://openegoproject.wordpress.com + +DING0 lives at github: https://github.com/openego/ding0/ +The documentation is available on RTD: http://ding0.readthedocs.io""" + +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "jh-RLI" + +import numpy as np +import pandas as pd +import json +import re +from egoio.tools.db import connection +from ding0.io.export import export_network +from ding0.core import NetworkDing0 +from ding0.io.ding0_db_tables import DING0_TABLES, METADATA, create_ding0_sql_tables +from ding0.io.io_settings import exporter_config +from geoalchemy2.types import Geometry, WKTElement +from sqlalchemy.orm import sessionmaker + + +# init SRID +SRID = None + + +def create_wkt_element(geom): + """ + Use GeoAlchemy's WKTElement to create a geom with SRID + GeoAlchemy2 WKTElement (PostGis func:ST_GeomFromText) + + Parameters + ---------- + geom: Shaply geometry from script export.py + + Returns + ------- + None : None + Returns None if the data frame does not contain any geometry + """ + + if geom is not None: + if SRID is None: + try: + from ding0.io.ding0_pkl2db import PICKLE_SRID + return WKTElement(geom, srid=PICKLE_SRID, extended=True) + except: + print('You need to provide a SRID or PICKLE_SRID') + print('PICKLE_SRID will be set to 4326') + PICKLE_SRID = 4326 + return WKTElement(geom, srid=PICKLE_SRID, extended=True) + else: + return WKTElement(geom, srid=SRID, extended=True) + else: + return None + + +def df_sql_write(engine, schema, db_table, dataframe, SRID=None, geom_type=None): + """ + Convert data frames such that their column names + are made small and the index is renamed 'id_db' so as to + correctly load its data to its appropriate sql table. Also handles the + upload to a DB data frames with different geometry types. + + NOTE: This function does not check if the data frame columns + matches the db_table fields, if they do not then no warning + is given. + + Parameters + ---------- + pandas.DataFrame + dataframe : The pandas dataframe to be transferred to its + apprpritate db_table + + db_table: :py:mod:`sqlalchemy.sql.schema.Table` + A table instance definition from sqlalchemy. + NOTE: This isn't an orm definition + + engine: :py:mod:`sqlalchemy.engine.base.Engine` + Sqlalchemy database engine + + schema: str + The schema in which the tables are to be created + + geom_type: str + Prameter for handling data frames with different geometry types + SRID: int + The current srid provided by the ding0 network + """ + + # rename data frame column DB like + if 'id' in dataframe.columns: + dataframe.rename(columns={'id':'id_db'}, inplace=True) + sql_write_df = dataframe.copy() + sql_write_df.columns = sql_write_df.columns.map(str.lower) + # sql_write_df = sql_write_df.set_index('id_db') + + # Insert pd data frame with geom column + if 'geom' in dataframe.columns: + if geom_type is 'POINT': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('POINT', srid=int(SRID))}) + + elif geom_type is 'POLYGON': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('POLYGON', srid=int(SRID))}) + + elif geom_type is 'MULTIPOLYGON': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('MULTIPOLYGON', srid=int(SRID))}) + + elif geom_type is 'LINESTRING': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('LINESTRING', srid=int(SRID))}) + + elif geom_type is 'GEOMETRY': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('GEOMETRY', srid=int(SRID))}) + + else: + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) + # If the Dataframe does not contain id named column (like already named id_db) + else: + sql_write_df = dataframe.copy() + sql_write_df.columns = sql_write_df.columns.map(str.lower) + # sql_write_df = sql_write_df.set_index('id') + + if 'geom' in dataframe.columns: + # Insert pd Dataframe with geom column + if geom_type is 'POINT': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('POINT', srid=int(SRID))}) + + elif geom_type is 'POLYGON': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('POLYGON', srid=int(SRID))}) + + elif geom_type is 'MULTIPOLYGON': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('MULTIPOLYGON', srid=int(SRID))}) + + elif geom_type is 'LINESTRING': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('LINESTRING', srid=int(SRID))}) + + elif geom_type is 'GEOMETRY': + sql_write_df['geom'] = sql_write_df['geom'].apply(create_wkt_element) + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None, + dtype={'geom': Geometry('GEOMETRY', srid=int(SRID))}) + + else: + sql_write_df.to_sql(db_table, con=engine, schema=schema, if_exists='append', index=None) + + +def export_df_to_db(engine, schema, df, tabletype, srid=None): + """ + Writes values to the connected DB. Values from Pandas data frame. + Decides which table by tabletype + + Parameters + ---------- + engine: sqlalchemy.engine.base.Engine` + Sqlalchemy database engine + schema : str + The schema in which the tables are to be created + pandas.DataFrame + df : pandas data frame + tabletype : str + Set the destination table where the pd data frame will be stored in + srid: int + The current srid provided by the ding0 network + """ + print("Exporting table type : {}".format(tabletype)) + if tabletype is 'line': + df_sql_write(engine, schema, DING0_TABLES['line'], df, srid, 'LINESTRING') + + elif tabletype is 'lv_cd': + df = df.drop(['lv_grid_id'], axis=1) + df_sql_write(engine, schema, DING0_TABLES['lv_branchtee'], df, srid, 'POINT') + + elif tabletype is 'lv_gen': + df_sql_write(engine, schema, DING0_TABLES['lv_generator'], df, srid, 'POINT') + + elif tabletype is 'lv_load': + df_sql_write(engine, schema, DING0_TABLES['lv_load'], df, srid, 'POINT') + + elif tabletype is 'lv_grid': + df_sql_write(engine, schema, DING0_TABLES['lv_grid'], df, srid, 'GEOMETRY') + + elif tabletype is 'lv_station': + df_sql_write(engine, schema, DING0_TABLES['lv_station'], df, srid, 'POINT') + + elif tabletype is 'mvlv_trafo': + df_sql_write(engine, schema, DING0_TABLES['mvlv_transformer'], df, srid, 'POINT') + + elif tabletype is 'mvlv_mapping': + df_sql_write(engine, schema, DING0_TABLES['mvlv_mapping'], df, srid) + + elif tabletype is 'mv_cd': + df_sql_write(engine, schema, DING0_TABLES['mv_branchtee'], df, srid, 'POINT') + + elif tabletype is 'mv_cb': + df_sql_write(engine, schema, DING0_TABLES['mv_circuitbreaker'], df, srid, 'POINT') + + elif tabletype is 'mv_gen': + df_sql_write(engine, schema, DING0_TABLES['mv_generator'], df, srid, 'POINT') + + elif tabletype is 'mv_load': + df_sql_write(engine, schema, DING0_TABLES['mv_load'], df, srid, 'GEOMETRY') + + elif tabletype is 'mv_grid': + df_sql_write(engine, schema, DING0_TABLES['mv_grid'], df, srid, 'MULTIPOLYGON') + + elif tabletype is 'mv_station': + df_sql_write(engine, schema, DING0_TABLES['mv_station'], df, srid, 'POINT') + + elif tabletype is 'hvmv_trafo': + df_sql_write(engine, schema, DING0_TABLES['hvmv_transformer'], df, srid, 'POINT') + + +# ToDo: function works but throws unexpected error (versioning tbl dosent exists) +def drop_ding0_db_tables(engine): + """ + Instructions: In order to drop tables all tables need to be stored in METADATA (create tables before dropping them) + Drops the tables in the schema where they have been created. + Parameters + ---------- + engine: sqlalchemy.engine.base.Engine` + Sqlalchemy database engine + """ + tables = METADATA.sorted_tables + reversed_tables = reversed(tables) + + print("Please confirm that you would like to drop the following tables:") + for n, tab in enumerate(tables): + print("{: 3d}. {}".format(n, tab)) + + print("Please confirm with either of the choices below:\n" + + "- yes\n" + + "- no\n" + + "- the indexes to drop in the format 0, 2, 3, 5") + confirmation = input( + "Please type the choice completely as there is no default choice.") + if re.fullmatch('[Yy]es', confirmation): + for tab in reversed_tables: + tab.drop(engine, checkfirst=True) + elif re.fullmatch('[Nn]o', confirmation): + print("Cancelled dropping of tables") + else: + try: + indlist = confirmation.split(',') + indlist = list(map(int, indlist)) + print("Please confirm deletion of the following tables:") + tablist = np.array(reversed_tables)[indlist].tolist() + for n, tab in enumerate(tablist): + print("{: 3d}. {}".format(n, tab)) + con2 = input("Please confirm with either of the choices below:\n" + + "- yes\n" + + "- no") + if re.fullmatch('[Yy]es', con2): + for tab in tablist: + tab.drop(engine, checkfirst=True) + elif re.fullmatch('[Nn]o', con2): + print("Cancelled dropping of tables") + else: + print("The input is unclear, no action taken") + except ValueError: + print("Confirmation unclear, no action taken") + + +def db_tables_change_owner(engine, schema): + tables = METADATA.sorted_tables + + def change_owner(engine, table, role, schema): + """ + Gives access to database users/ groups + + Parameters + ---------- + engine: sqlalchemy session object + A valid connection to a database + schema: The schema in which the tables are to be created + table : sqlalchmy Table class definition + The database table + role : str + database role that access is granted to + """ + tablename = table + + grant_str = """ALTER TABLE {schema}.{table} + OWNER TO {role};""".format(schema=schema, table=tablename.name, + role=role) + + # engine.execute(grant_str) + engine.execution_options(autocommit=True).execute(grant_str) + + # engine.echo=True + + for tab in tables: + change_owner(engine, tab, 'oeuser', schema) + + +def export_all_dataframes_to_db(engine, schema, network=None, srid=None, grid_no=None): + """ + exports all data frames from func. export_network() to the db tables + This works with a completely generated ding0 network(all grid districts have to be generated at once), + all provided DataFrames will be uploaded. + + Note_1: Executing this script for all GridDistricts available in Ding0 the export appears to be + quit time consuming. Plan carefully if you want to export more then a couple 100 GridDistricts at once. + + Note_2: Ding0 creates a dataset that is about 20GB large when running on all available (3608) GridDistricts. + Not using a for-loop when running ding0 + exporting ding0 would use too much memory capacity on most local + machines. Example usage with for-loop see script ding0_pkl2db.py. + + Instructions: + 1. Create a database connection/engine to the "OEDB" for example use the "from egoio.tools.db import + connection" function + 2. Create a SA session: session = sessionmaker(bind=oedb_engine)() + 3. SET the SCHEMA you want to use destination for table creation and data export. + One can set the SCHEMA within the exporter_config.cfg file located in ding0/config folder + 4. Create a ding0 network instance: nw = NetworkDing0(name='network') + 5. SET the srid from network-object config: SRID = str(int(nw.config['geo']['srid'])) + 6. Choose the grid_districts for the ding0 run (nothing chosen all grid_districts will be imported) + mv_grid_districts = [3040, 3045], see Note_2. + 7. run ding0 on selected mv_grid_district + 8. call function export_network from export.py -> this provides the network metadata + as json and all ding0 result data as pandas data frames + 9. Create the ding0 sql tables: create_ding0_sql_tables(engine, SCHEMA) + 10. Call the function: export_all_dataframes_to_db(engine, SCHEMA) with your destination database and SCHEMA + + Additionally, if you use the "OEDB/OEP" as destination database: + 11. change the table owner using the function: + db_tables_change_owner(engine, schema) + 12. If you need to drop the table call the function drop_ding0_db_tables(engine, schema) immediately after + the called create function: create_ding0_sql_tables(oedb_engine, SCHEMA) + drop_ding0_db_tables(oedb_engine, SCHEMA) + + Validate: + 13. Check if all metadata strings are present to the current folder and added as SQL comment on table + 14. Check if the export worked as expected and filled the tables with data + + Parameters + ---------- + engine : sqlalchemy.engine.base.Engine + Sqlalchemy database engine + schema : str + The schema in which the tables are to be created + network: namedtuple + All the return values(Data Frames) from export_network() + srid: int + The current srid provided by the ding0 network + grid_no: int + Optional: not implemented yet. ID of currently exported GridDistrict. + This is used to get further information while exporting a range of grids using a for-loop. + Usage example see export_all_pkl_to_db(). + """ + + if engine.dialect.has_table(engine, DING0_TABLES["versioning"], schema=schema): + + db_versioning = pd.read_sql_table(DING0_TABLES['versioning'], engine, schema, + columns=['run_id', 'description']) + # Use for another run with different run_id + # if metadata_json['run_id'] not in db_versioning['run_id']: + # Use if just one run_id should be present to the DB table + if db_versioning.empty: + + metadata_json = json.loads(network.metadata_json) + metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], + 'description': str(metadata_json)}, index=[0]) + df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) + # except: + # print(metadata_json['run_id']) + # metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], + # 'description': str(metadata_json)}, index=[0]) + # df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) + + # ToDo: use DING0_TABLES from ding0_db_tables.py and the df label (name of df) to loop over + # Use a loop based on a list that is compared with the df name for insert the df in the corresponding table + # 1 + export_df_to_db(engine, schema, network.lines, "line", srid) + # 2 + export_df_to_db(engine, schema, network.lv_cd, "lv_cd", srid) + # 3 + export_df_to_db(engine, schema, network.lv_gen, "lv_gen", srid) + # 4 + export_df_to_db(engine, schema, network.lv_stations, "lv_station", srid) + # 5 + export_df_to_db(engine, schema, network.lv_loads, "lv_load", srid) + # 6 + export_df_to_db(engine, schema, network.lv_grid, "lv_grid", srid) + # 7 + export_df_to_db(engine, schema, network.mv_cb, "mv_cb", srid) + # 8 + export_df_to_db(engine, schema, network.mv_cd, "mv_cd", srid) + # 9 + export_df_to_db(engine, schema, network.mv_gen, "mv_gen", srid) + # 10 + export_df_to_db(engine, schema, network.mv_stations, "mv_station", srid) + # 11 + export_df_to_db(engine, schema, network.mv_loads, "mv_load", srid) + # 12 + export_df_to_db(engine, schema, network.mv_grid, "mv_grid", srid) + # 13 + export_df_to_db(engine, schema, network.mvlv_trafos, "mvlv_trafo", srid) + # 14 + export_df_to_db(engine, schema, network.hvmv_trafos, "hvmv_trafo", srid) + # 15 + export_df_to_db(engine, schema, network.mvlv_mapping, "mvlv_mapping", srid) + + else: + raise KeyError("a run_id already present! No tables are input!") + + else: + print("WARNING: There is no " + DING0_TABLES["versioning"] + " table in the schema: " + schema) + + +def export_all_pkl_to_db(engine, schema, network, srid, grid_no=None): + """ + This function basically works the same way export_all_dataframes_to_db() does. + It is implemented to handel the diffrent ways of executing the functions: + If grids are loaded form pickle files a for loop is included and every grid district will be uploaded one after + another. This changes the requirements for the export to db functionality. + + Parameters + ---------- + engine : sqlalchemy.engine.base.Engine + Sqlalchemy database engine + schema : str + The schema in which the tables are to be created + network: namedtuple + All the return values(Data Frames) from export_network() + srid: int + The current srid provided by the ding0 network + grid_no: int + The Griddistrict number + """ + + if engine.dialect.has_table(engine, DING0_TABLES["versioning"], schema=schema): + + db_versioning = pd.read_sql_table(DING0_TABLES['versioning'], engine, schema, + columns=['run_id', 'description']) + + metadata_json = json.loads(network.metadata_json) + + if db_versioning.empty: + print("run_id: " + str(metadata_json['run_id'])) + + metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'], + 'description': str(metadata_json)}, index=[0]) + df_sql_write(engine, schema, DING0_TABLES['versioning'], metadata_df) + + # ToDo: use DING0_TABLES from ding0_db_tables.py and the df label (name of df) to loop over + # Use a loop based on a list that is compared with the df name for insert the df in the corresponding table + # 1 + export_df_to_db(engine, schema, network.lines, "line", srid) + # 2 + export_df_to_db(engine, schema, network.lv_cd, "lv_cd", srid) + # 3 + export_df_to_db(engine, schema, network.lv_gen, "lv_gen", srid) + # 4 + export_df_to_db(engine, schema, network.lv_stations, "lv_station", srid) + # 5 + export_df_to_db(engine, schema, network.lv_loads, "lv_load", srid) + # 6 + export_df_to_db(engine, schema, network.lv_grid, "lv_grid", srid) + # 7 + export_df_to_db(engine, schema, network.mv_cb, "mv_cb", srid) + # 8 + export_df_to_db(engine, schema, network.mv_cd, "mv_cd", srid) + # 9 + export_df_to_db(engine, schema, network.mv_gen, "mv_gen", srid) + # 10 + export_df_to_db(engine, schema, network.mv_stations, "mv_station", srid) + # 11 + export_df_to_db(engine, schema, network.mv_loads, "mv_load", srid) + # 12 + export_df_to_db(engine, schema, network.mv_grid, "mv_grid", srid) + # 13 + export_df_to_db(engine, schema, network.mvlv_trafos, "mvlv_trafo", srid) + # 14 + export_df_to_db(engine, schema, network.hvmv_trafos, "hvmv_trafo", srid) + # 15 + export_df_to_db(engine, schema, network.mvlv_mapping, "mvlv_mapping", srid) + + print('Griddistrict_' + str(grid_no) + '_has been exported to the database') + else: + print("run_id: " + str(metadata_json['run_id'])) + # 1 + export_df_to_db(engine, schema, network.lines, "line", srid) + # 2 + export_df_to_db(engine, schema, network.lv_cd, "lv_cd", srid) + # 3 + export_df_to_db(engine, schema, network.lv_gen, "lv_gen", srid) + # 4 + export_df_to_db(engine, schema, network.lv_stations, "lv_station", srid) + # 5 + export_df_to_db(engine, schema, network.lv_loads, "lv_load", srid) + # 6 + export_df_to_db(engine, schema, network.lv_grid, "lv_grid", srid) + # 7 + export_df_to_db(engine, schema, network.mv_cb, "mv_cb", srid) + # 8 + export_df_to_db(engine, schema, network.mv_cd, "mv_cd", srid) + # 9 + export_df_to_db(engine, schema, network.mv_gen, "mv_gen", srid) + # 10 + export_df_to_db(engine, schema, network.mv_stations, "mv_station", srid) + # 11 + export_df_to_db(engine, schema, network.mv_loads, "mv_load", srid) + # 12 + export_df_to_db(engine, schema, network.mv_grid, "mv_grid", srid) + # 13 + export_df_to_db(engine, schema, network.mvlv_trafos, "mvlv_trafo", srid) + # 14 + export_df_to_db(engine, schema, network.hvmv_trafos, "hvmv_trafo", srid) + # 15 + export_df_to_db(engine, schema, network.mvlv_mapping, "mvlv_mapping", srid) + + print('Griddistrict_' + str(grid_no) + '_has been exported to the database') + + else: + print("WARNING: There is no " + DING0_TABLES["versioning"] + " table in the schema: " + schema) + + +if __name__ == "__main__": + + # #########SQLAlchemy and DB table################ + # provide a config-file with valid connection credentials to access a Database. + # the config-file should be located in your user directory within a folder named '.config'. + oedb_engine = connection(section='vpn_oedb') + session = sessionmaker(bind=oedb_engine)() + + # Set the Database schema which you want to add the tables to. + # Configure the SCHEMA in config file located in: ding0/config/exporter_config.cfg . + SCHEMA = exporter_config['EXPORTER_DB']['SCHEMA'] + # hardset for testing + # SCHEMA = "public" + + # #########Ding0 Network################ + # create ding0 Network instance + nw = NetworkDing0(name='network') + + # srid + # ToDo: Check why converted to int and string + # SRID = str(int(nw.config['geo']['srid'])) + SRID = int(nw.config['geo']['srid']) + + # choose MV Grid Districts to import, use list of integers + # Multiple grids f. e.: grids = list(range(1, 3609)) - 1 to 3608(all of the existing) + # Single grids f. e.: grids = [2] + mv_grid_districts = list(range(2, 6)) + + # ToDo: Add for-loop here. Exporting ding0 the GridDistricts should be created and exported incrementally. + # run DING0 on selected MV Grid District + nw.run_ding0(session=session, + mv_grid_districts_no=mv_grid_districts) + + # return values from export_network() as tupels + network = export_network(nw) + + ##################################################### + # Creates all defined tables + create_ding0_sql_tables(oedb_engine, SCHEMA) + drop_ding0_db_tables(oedb_engine) + # db_tables_change_owner(oedb_engine, SCHEMA) + + # ########################### !!! Mind existing tables in DB SCHEMA!!! ####################################### + # Export all Dataframes returned form export_network(nw) to DB + # export_all_dataframes_to_db(oedb_engine, SCHEMA, network=network, srid=SRID) diff --git a/ding0/io/ding0_db_tables.py b/ding0/io/ding0_db_tables.py new file mode 100644 index 00000000..102284a4 --- /dev/null +++ b/ding0/io/ding0_db_tables.py @@ -0,0 +1,339 @@ +import json +import os +import ding0 +from sqlalchemy import BigInteger, Boolean, Column, Float, ForeignKey, Integer, String, Table +from geoalchemy2.types import Geometry +from sqlalchemy.ext.declarative import declarative_base + + +DECLARATIVE_BASE = declarative_base() +METADATA = DECLARATIVE_BASE.metadata + +# Set the Database schema which you want to add the tables to +# SCHEMA = "model_draft" + +# Metadata folder Path +METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') + +# set your Table names +DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', + 'line': 'ego_grid_ding0_line', + 'lv_branchtee': 'ego_grid_ding0_lv_branchtee', + 'lv_generator': 'ego_grid_ding0_lv_generator', + 'lv_load': 'ego_grid_ding0_lv_load', + 'lv_grid': 'ego_grid_ding0_lv_grid', + 'lv_station': 'ego_grid_ding0_lv_station', + 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer', + 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping', + 'mv_branchtee': 'ego_grid_ding0_mv_branchtee', + 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker', + 'mv_generator': 'ego_grid_ding0_mv_generator', + 'mv_load': 'ego_grid_ding0_mv_load', + 'mv_grid': 'ego_grid_ding0_mv_grid', + 'mv_station': 'ego_grid_ding0_mv_station', + 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} + +# # set your Table names +# DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning_test', +# 'line': 'ego_grid_ding0_line_test', +# 'lv_branchtee': 'ego_grid_ding0_lv_branchtee_test', +# 'lv_generator': 'ego_grid_ding0_lv_generator_test', +# 'lv_load': 'ego_grid_ding0_lv_load_test', +# 'lv_grid': 'ego_grid_ding0_lv_grid_test', +# 'lv_station': 'ego_grid_ding0_lv_station_test', +# 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer_test', +# 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping_test', +# 'mv_branchtee': 'ego_grid_ding0_mv_branchtee_test', +# 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker_test', +# 'mv_generator': 'ego_grid_ding0_mv_generator_test', +# 'mv_load': 'ego_grid_ding0_mv_load_test', +# 'mv_grid': 'ego_grid_ding0_mv_grid_test', +# 'mv_station': 'ego_grid_ding0_mv_station_test', +# 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer_test'} + + + +def load_json_files(): + """ + Creats a list of all .json files in METADATA_STRING_FOLDER + + Parameters + ---------- + + Returns + ------- + jsonmetadata : dict + contains all .json file names from the folder + """ + + full_dir = os.walk(str(METADATA_STRING_FOLDER)) + jsonmetadata = [] + + for jsonfiles in full_dir: + for jsonfile in jsonfiles: + jsonmetadata = jsonfile + + return jsonmetadata + + +def prepare_metadatastring_fordb(table): + """ + Prepares the JSON String for the sql comment on table + + Required: The .json file names must contain the table name (for example from create_ding0_sql_tables()) + Instruction: Check the SQL "comment on table" for each table (e.g. use pgAdmin) + + Parameters + ---------- + table: str + table name of the sqlAlchemy table + + Returns + ------- + mdsstring:str + Contains the .json file as string + """ + + for json_file in load_json_files(): + json_file_path = os.path.join(METADATA_STRING_FOLDER, json_file) + with open(json_file_path, encoding='UTF-8') as jf: + if table in json_file: + # included for testing / or logging + # print("Comment on table: " + table + "\nusing this metadata string file: " + file + "\n") + mds = json.load(jf) + mdsstring = json.dumps(mds, indent=4, ensure_ascii=False) + return mdsstring + + +def create_ding0_sql_tables(engine, ding0_schema): + """ + Create the 16 ding0 tables + + Parameters + ---------- + engine: :py:mod:`sqlalchemy.engine.base.Engine` + Sqlalchemy database engine + + ding0_schema : str + The schema in which the tables are to be created + Default: static SCHEMA + """ + + # 1 versioning table + versioning = Table(DING0_TABLES['versioning'], METADATA, + Column('run_id', BigInteger, primary_key=True, autoincrement=False, nullable=False), + Column('description', String(6000)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('versioning') + ) + + # 2 ding0 lines table + ding0_line = Table(DING0_TABLES['line'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('edge_name', String(100)), + Column('grid_name', String(100)), + Column('node1', String(100)), + Column('node2', String(100)), + Column('type_kind', String(100)), + Column('type_name', String(100)), + Column('length', Float(10)), + Column('u_n', Float(10)), + Column('c', Float(10)), + Column('l', Float(10)), + Column('r', Float(10)), + Column('i_max_th', Float(10)), + Column('geom', Geometry('LINESTRING', 4326)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('line') + ) + + # 3 ding0 lv_branchtee table + ding0_lv_branchtee = Table(DING0_TABLES['lv_branchtee'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_branchtee') + ) + + # 4 ding0 lv_generator table + ding0_lv_generator = Table(DING0_TABLES['lv_generator'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('la_id', BigInteger), + Column('name', String(100)), + Column('lv_grid_id', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('type', String(22)), + Column('subtype', String(30)), + Column('v_level', Integer), + Column('nominal_capacity', Float(10)), + Column('weather_cell_id', BigInteger), + Column('is_aggregated', Boolean), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_generator') + ) + # 5 ding0 lv_load table + ding0_lv_load = Table(DING0_TABLES['lv_load'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('lv_grid_id', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('consumption', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_load') + ) + + # 6 + ding0_lv_grid = Table(DING0_TABLES['lv_grid'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + # Column('geom', Geometry('MULTIPOLYGON', 4326)), + Column('geom', Geometry('GEOMETRY', 4326)), + Column('population', BigInteger), + Column('voltage_nom', Float(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_grid') + ) + + # 7 ding0 lv_station table + ding0_lv_station = Table(DING0_TABLES['lv_station'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb('lv_station') + ) + + # 8 ding0 mvlv_transformer table + ding0_mvlv_transformer = Table(DING0_TABLES['mvlv_transformer'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + Column('voltage_op', Float(10)), + Column('s_nom', Float(10)), + Column('x', Float(10)), + Column('r', Float(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mvlv_transformer") + ) + + # 9 ding0 mvlv_mapping table + ding0_mvlv_mapping = Table(DING0_TABLES['mvlv_mapping'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('lv_grid_id', BigInteger), + Column('lv_grid_name', String(100)), + Column('mv_grid_id', BigInteger), + Column('mv_grid_name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mvlv_mapping") + ) + + # 10 ding0 mv_branchtee table + ding0_mv_branchtee = Table(DING0_TABLES['mv_branchtee'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_branchtee") + ) + + # 11 ding0 mv_circuitbreaker table + ding0_mv_circuitbreaker = Table(DING0_TABLES['mv_circuitbreaker'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + Column('status', String(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_circuitbreaker") + ) + + # 12 ding0 mv_generator table + ding0_mv_generator = Table(DING0_TABLES['mv_generator'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('geom', Geometry('POINT', 4326)), + Column('type', String(22)), + Column('subtype', String(30)), + Column('v_level', Integer), + Column('nominal_capacity', Float(10)), + Column('weather_cell_id', BigInteger), + Column('is_aggregated', Boolean), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_generator") + ) + + # 13 ding0 mv_load table + ding0_mv_load = Table(DING0_TABLES['mv_load'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('name', String(100)), + Column('geom', Geometry('GEOMETRY', 4326)), + Column('is_aggregated', Boolean), + Column('consumption', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_load") + ) + + # 14 ding0 mv_grid table + ding0_mv_grid = Table(DING0_TABLES['mv_grid'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('MULTIPOLYGON', 4326)), + Column('name', String(100)), + Column('population', BigInteger), + Column('voltage_nom', Float(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_grid") + ) + + # 15 ding0 mv_station table + ding0_mv_station = Table(DING0_TABLES['mv_station'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("mv_station") + ) + + # 16 ding0 hvmv_transformer table + ding0_hvmv_transformer = Table(DING0_TABLES['hvmv_transformer'], METADATA, + Column('id', Integer, primary_key=True), + Column('run_id', BigInteger, ForeignKey(versioning.columns.run_id), nullable=False), + Column('id_db', BigInteger), + Column('geom', Geometry('POINT', 4326)), + Column('name', String(100)), + Column('voltage_op', Float(10)), + Column('s_nom', Float(10)), + Column('x', Float(10)), + Column('r', Float(10)), + schema=ding0_schema, + comment=prepare_metadatastring_fordb("hvmv_transformer") + ) + + # create all the tables + METADATA.create_all(engine, checkfirst=True) \ No newline at end of file diff --git a/ding0/io/ding0_pkl2db.py b/ding0/io/ding0_pkl2db.py new file mode 100644 index 00000000..3c6501be --- /dev/null +++ b/ding0/io/ding0_pkl2db.py @@ -0,0 +1,75 @@ +# coding: utf-8 +"""This file is part of DINGO, the DIstribution Network GeneratOr. +DINGO is a tool to generate synthetic medium and low voltage power +distribution grids based on open data. + +It is developed in the project open_eGo: https://openegoproject.wordpress.com + +DING0 lives at github: https://github.com/openego/ding0/ +The documentation is available on RTD: http://ding0.readthedocs.io""" + +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "jh-RLI" + +import os +from egoio.tools import db +from ding0.io.exporter_log import pickle_export_logger +from ding0.tools.results import load_nd_from_pickle +from ding0.io.export import export_network +from ding0.io.db_export import METADATA, create_ding0_sql_tables, \ + export_all_pkl_to_db, db_tables_change_owner, drop_ding0_db_tables +from sqlalchemy.orm import sessionmaker + +################################## +# LOG_FILE_PATH = 'pickle_log' +LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') +pickle_export_logger(LOG_FILE_PATH) +################################### + +# database connection/ session +oedb_engine = db.connection(section='oedb') +session = sessionmaker(bind=oedb_engine)() + +SCHEMA = "model_draft" + +create_ding0_sql_tables(oedb_engine, SCHEMA) +db_tables_change_owner(oedb_engine, SCHEMA) +# drop_ding0_db_tables(oedb_engine) + +# pickle file locations path to RLI_Daten_Flex01 mount +pkl_filepath = "/home/local/RL-INSTITUT/jonas.huber/rli/Daten_flexibel_01/Ding0/20180823154014" + + +# choose MV Grid Districts to import use list of integers +# f. e.: multiple grids = list(range(1, 3609)) +grids = [1658] + +# generate all the grids and push them to oedb +for grid_no in grids: + + try: + nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) + except: + print('Something went wrong, created log entry in: {}'.format(LOG_FILE_PATH)) + with open(LOG_FILE_PATH, 'a') as log: + log.write('ding0_grids__{}.pkl not present to the current directory\n'.format(grid_no)) + pass + + continue + + # Extract data from network and put it to DataFrames for csv and for oedb + # run_id is manually provided -> folder name or nw.metadata['run_id'] provide the run_id value + network = export_network(nw, run_id=20180823154014) + + # set SRID form pickle file + PICKLE_SRID = int(nw.config['geo']['srid']) + + + # provide run id for pickle upload + + export_all_pkl_to_db(oedb_engine, SCHEMA, network, PICKLE_SRID, grid_no) + + +# db_tables_change_owner(oedb_engine, SCHEMA) diff --git a/ding0/io/ego_dp_versioning.py b/ding0/io/ego_dp_versioning.py new file mode 100644 index 00000000..ee0992bd --- /dev/null +++ b/ding0/io/ego_dp_versioning.py @@ -0,0 +1,185 @@ +# """This file is part of DINGO, the DIstribution Network GeneratOr. +# DINGO is a tool to generate synthetic medium and low voltage power +# distribution grids based on open data. +# +# It is developed in the project open_eGo: https://openegoproject.wordpress.com +# +# DING0 lives at github: https://github.com/openego/ding0/ +# The documentation is available on RTD: http://ding0.readthedocs.io""" +# +# __copyright__ = "Reiner Lemoine Institut gGmbH" +# __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +# __url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +# __author__ = "jh-RLI" + +import os + +from sqlalchemy import create_engine, MetaData, Table, exc +from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.declarative import declarative_base + +from egoio.tools.db import connection + +import ding0 +from ding0.io.db_export import prepare_metadatastring_fordb +from ding0.io.ego_scenario_log import write_scenario_log + +# set your Table names +DING0_TABLES = {'versioning': 'ego_grid_ding0_versioning', + 'line': 'ego_grid_ding0_line', + 'lv_branchtee': 'ego_grid_ding0_lv_branchtee', + 'lv_generator': 'ego_grid_ding0_lv_generator', + 'lv_load': 'ego_grid_ding0_lv_load', + 'lv_grid': 'ego_grid_ding0_lv_grid', + 'lv_station': 'ego_grid_ding0_lv_station', + 'mvlv_transformer': 'ego_grid_ding0_mvlv_transformer', + 'mvlv_mapping': 'ego_grid_ding0_mvlv_mapping', + 'mv_branchtee': 'ego_grid_ding0_mv_branchtee', + 'mv_circuitbreaker': 'ego_grid_ding0_mv_circuitbreaker', + 'mv_generator': 'ego_grid_ding0_mv_generator', + 'mv_load': 'ego_grid_ding0_mv_load', + 'mv_grid': 'ego_grid_ding0_mv_grid', + 'mv_station': 'ego_grid_ding0_mv_station', + 'hvmv_transformer': 'ego_grid_ding0_hvmv_transformer'} + + +def get_table_names(t): + tables = [] + for k, v in t.items(): + tables.append(v) + return tables + + +def make_session(engine): + Session = sessionmaker(bind=engine) + return Session(), engine + + +def migrate_tables_to_destination(from_db, s_schema, to_db, d_schema, runid=None): + """ + Note: This function will throw a exception caused by the already existing index. + Functionality is still given. + + Copys the table from source to the destination database.schema + + Step-by-Step: + 1. Set up the connection using the egoio.tools.db -> connection() function + 2. SET the SOURCE_SCHEMA and DESTINATION_SCHEMA + 3. Insert your table (key: names) to dict like DING0_TABLES + 4. Call the function get_table_names() with your Table dictionary as parameter save the result in + variable "tables = get_table_names(dict)" + 5. For ding0 data set the RUN_ID + 6. Save the dynamic path to the metadata_string.json in METADATA_STRING_FOLDERĀ“ + Note: Metadata_string file names need to contain the the table name See: + https://github.com/openego/ding0/tree/features/stats-export/ding0/io/metadatastrings + 7. Call the function with parameters like: + migrate_tables_to_destination(oedb_engine, SOURCE_SCHEMA, oedb_engine, DESTINATION_SCHEMA, RUN_ID) + 8. In function migrate_tables_to_destination() check the function write_scenario_log() + 9. Check if the tables in your source schema exist and named equally to the table dict like in DING0_TABLES{} + Parameters + ---------- + from_db: + s_schema: + to_db: + d_schema: + runid: + """ + source, sengine = make_session(from_db) + smeta = MetaData(bind=sengine, schema=s_schema) + destination, dengine = make_session(to_db) + + for table_name in get_table_names(DING0_TABLES): + print('Processing', table_name) + print('Pulling schema from source server') + table = Table(table_name, smeta, autoload=True) + print('Creating table on destination server or schema') + try: + table.schema = d_schema + table.metadata.create_all(dengine, checkfirst=True) + except exc.ProgrammingError: + print("WARNING: The Index on the table already exists, warning can be ignored.") + table.schema = s_schema + new_record = quick_mapper(table) + columns = table.columns.keys() + print('Transferring records') + for record in source.query(table).all(): + data = dict( + [(str(column), getattr(record, column)) for column in columns] + ) + table.schema = d_schema + destination.merge(new_record(**data)) + + print('Committing changes') + destination.commit() + + rows = destination.query(table.c.run_id).count() + json_tbl_name = [] + for k,v in DING0_TABLES.items(): + if v == table_name: + json_tbl_name.append(k) + metadata_string_json = prepare_metadatastring_fordb(json_tbl_name[0]) + write_scenario_log(oedb_engine, 'open_eGo', runid, 'output', s_schema, table_name, 'db_export.py', + entries=rows, comment='versioning', metadata=metadata_string_json) + + +def quick_mapper(table): + Base = declarative_base() + + class GenericMapper(Base): + __table__ = table + return GenericMapper + + +def db_tables_change_owner(engine, schema): + DECLARATIVE_BASE = declarative_base() + METADATA = DECLARATIVE_BASE.metadata + + tables = METADATA.sorted_tables + + def change_owner(engine, table, role, schema): + """ + Gives access to database users/ groups + + Parameters + ---------- + engine: sqlalchemy session object + A valid connection to a database + schema: The schema in which the tables are to be created + table : sqlalchmy Table class definition + The database table + role : str + database role that access is granted to + """ + tablename = table + + grant_str = """ALTER TABLE {schema}.{table} + OWNER TO {role};""".format(schema=schema, table=tablename.name, + role=role) + + # engine.execute(grant_str) + engine.execution_options(autocommit=True).execute(grant_str) + + # engine.echo=True + + for tab in tables: + change_owner(engine, tab, 'oeuser', schema) + + +if __name__ == '__main__': + # source + oedb_engine = connection(section='oedb') + # # Testing Database -> destination + # reiners_engine = connection(section='reiners_db') + + SOURCE_SCHEMA = 'model_draft' + DESTINATION_SCHEMA = 'grid' + tables = get_table_names(DING0_TABLES) + + # Enter the current run_id, Inserted in scenario_log + RUN_ID = '20180823154014' + + # Metadata folder Path + METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings') + + migrate_tables_to_destination(oedb_engine, SOURCE_SCHEMA, oedb_engine, DESTINATION_SCHEMA, RUN_ID) + # db_tables_change_owner(oedb_engine, DESTINATION_SCHEMA) diff --git a/ding0/io/ego_scenario_log.py b/ding0/io/ego_scenario_log.py new file mode 100644 index 00000000..3905b429 --- /dev/null +++ b/ding0/io/ego_scenario_log.py @@ -0,0 +1,84 @@ +""" +Write entry into scenario log table +""" + +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/data_processing/blob/master/LICENSE" +__author__ = "nesnoj, Ludee" + + +from datetime import datetime +from sqlalchemy.orm import sessionmaker +from egoio.db_tables.model_draft import ScenarioLog as orm_scenario_log + + +def write_scenario_log(conn, project, version, io, schema, table, + script, entries=None, comment=None, metadata=None): + """ + Write entry into scenario log table + + Parameters + ---------- + conn: SQLAlchemy connection object + project: str + Project name + version: str + Version number + io: str + IO-type (input, output, temp) + schema: str + Database schema + table: str + Database table + script: str + Script name + entries: int + Number of entries + comment: str + Comment + metadata: str + Meta data + + Example + ------- + write_scenario_log(conn=conn, + project='eGoDP' + version='v0.3.0', + io='output', + schema='model_draft', + table='ego_demand_loadarea_peak_load', + script='peak_load_per_load_area.py', + entries=1000) + """ + + Session = sessionmaker(bind=conn) + session = Session() + + # extract user from connection details + # is there a better way? + try: + conn_details = conn.connection.connection.dsn + for entry in conn_details.split(' '): + if entry.split('=')[0] == 'user': + user = entry.split('=')[1] + break + except: + user = 'unknown' + + # Add data to orm object + log_entry = orm_scenario_log(project=project, + version=version, + io=io, + schema_name=schema, + table_name=table, + script_name=script, + entries=entries, + comment=comment, + user_name=user, + timestamp=datetime.now(), + meta_data=metadata) + + # Commit to DB + session.add(log_entry) + session.commit() diff --git a/ding0/io/env.txt b/ding0/io/env.txt new file mode 100644 index 00000000..06632671 --- /dev/null +++ b/ding0/io/env.txt @@ -0,0 +1,9 @@ +This is a short introduction to the anaconda environment that was used to develop +the export functionality. + +- open skelleton.yml file in this directory and change the "name" to whatever you + would like to call your environment +- use the command "conda env create -f skeleton.yml" +- run the command "pip install -U -e ego.io\" +- run the command "pip install -U -e ding0\" +- run the command "pip install -U -e eDisGo\" \ No newline at end of file diff --git a/ding0/io/export.py b/ding0/io/export.py new file mode 100644 index 00000000..b93cecd1 --- /dev/null +++ b/ding0/io/export.py @@ -0,0 +1,627 @@ +"""This file is part of DINGO, the DIstribution Network GeneratOr. +DINGO is a tool to generate synthetic medium and low voltage power +distribution grids based on open data. + +It is developed in the project open_eGo: https://openegoproject.wordpress.com + +DING0 lives at github: https://github.com/openego/ding0/ +The documentation is available on RTD: http://ding0.readthedocs.io""" + +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "nesnoj, gplssm" + +import os +import numpy as np +import pandas as pd +from collections import namedtuple + +import json + +from ding0.core import NetworkDing0 +from ding0.core import GeneratorDing0, GeneratorFluctuatingDing0 +from ding0.core import LVCableDistributorDing0, MVCableDistributorDing0 +from ding0.core import MVStationDing0, LVStationDing0 +from ding0.core import CircuitBreakerDing0 +from ding0.core.network.loads import LVLoadDing0, MVLoadDing0 +from ding0.core import LVLoadAreaCentreDing0 + +from shapely.geometry import Point, MultiPoint, MultiLineString, LineString, MultiPolygon, shape, mapping + +os.environ['PROJ_LIB']=r"B:\Anaconda3\Library\share" +#ToDo: python 3.5??? yes was new in version 2.6 +Network = namedtuple( + 'Network', + [ + 'run_id', 'metadata_json', 'lv_grid', 'lv_gen', 'lv_cd', 'lv_stations', 'mvlv_trafos', 'lv_loads', + 'mv_grid', 'mv_gen', 'mv_cb', 'mv_cd', 'mv_stations', 'hvmv_trafos', 'mv_loads', 'lines', 'mvlv_mapping' + ] +) + + +def export_network(nw, mode='', run_id=None): + """ + Export all nodes and lines of the network nw as DataFrames + + Parameters + ---------- + nw: :any:`list` of NetworkDing0 + The MV grid(s) to be studied + mode: str + If 'MV' export only medium voltage nodes and lines + If 'LV' export only low voltage nodes and lines + else, exports MV and LV nodes and lines + + Returns + ------- + pandas.DataFrame + nodes_df : Dataframe containing nodes and its attributes + pandas.DataFrame + lines_df : Dataframe containing lines and its attributes + """ + + # close circuit breakers + nw.control_circuit_breakers(mode='close') + # srid + srid = str(int(nw.config['geo']['srid'])) + ############################## + # check what to do + lv_info = True + mv_info = True + if mode == 'LV': + mv_info = False + if mode == 'MV': + lv_info = False + ############################## + # from datetime import datetime + if not run_id: + run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") + metadata_json = json.dumps(nw.metadata) + else: + # ToDo: This seems to have no effect so check why + # nw.metadata['run_id'] = run_id + + metadata_json = json.dumps(nw.metadata) + metadata_json = json.loads(metadata_json) + metadata_json['run_id'] = run_id + metadata_json = json.dumps(metadata_json) + + ############################## + ############################# + # go through the grid collecting info + lvgrid_idx = 0 + lv_grid_dict = {} + lvloads_idx = 0 + lv_loads_dict = {} + mvgrid_idx = 0 + mv_grid_dict = {} + mvloads_idx = 0 + mv_loads_dict = {} + mvgen_idx = 0 + mv_gen_dict = {} + mvcb_idx = 0 + mvcb_dict = {} + mvcd_idx = 0 + mv_cd_dict = {} + # mvstations_idx = 0 + mv_stations_dict = {} + mvtrafos_idx = 0 + hvmv_trafos_dict = {} + lvgen_idx = 0 + lv_gen_dict = {} + lvcd_idx = 0 + lv_cd_dict = {} + lvstations_idx = 0 + lv_stations_dict = {} + lvtrafos_idx = 0 + mvlv_trafos_dict = {} + areacenter_idx = 0 + areacenter_dict = {} + lines_idx = 0 + lines_dict = {} + LVMVmapping_idx = 0 + mvlv_mapping_dict = {} + + def aggregate_loads(la_center, aggr): + """Aggregate consumption in load area per sector + Parameters + ---------- + la_center: LVLoadAreaCentreDing0 + Load area center object from Ding0 + Returns + ------- + """ + for s in ['retail', 'industrial', 'agricultural', 'residential']: + if s not in aggr['load']: + aggr['load'][s] = {} + + for t in ['nominal', 'peak']: + if t not in aggr['load'][s]: + aggr['load'][s][t] = 0 + + aggr['load']['retail']['nominal'] += sum( + [_.sector_consumption_retail + for _ in la_center.lv_load_area._lv_grid_districts]) + aggr['load']['industrial']['nominal'] += sum( + [_.sector_consumption_industrial + for _ in la_center.lv_load_area._lv_grid_districts]) + aggr['load']['agricultural']['nominal'] += sum( + [_.sector_consumption_agricultural + for _ in la_center.lv_load_area._lv_grid_districts]) + aggr['load']['residential']['nominal'] += sum( + [_.sector_consumption_residential + for _ in la_center.lv_load_area._lv_grid_districts]) + + aggr['load']['retail']['peak'] += sum( + [_.peak_load_retail + for _ in la_center.lv_load_area._lv_grid_districts]) + aggr['load']['industrial']['peak'] += sum( + [_.peak_load_industrial + for _ in la_center.lv_load_area._lv_grid_districts]) + aggr['load']['agricultural']['peak'] += sum( + [_.peak_load_agricultural + for _ in la_center.lv_load_area._lv_grid_districts]) + aggr['load']['residential']['peak'] += sum( + [_.peak_load_residential + for _ in la_center.lv_load_area._lv_grid_districts]) + + return aggr + + for mv_district in nw.mv_grid_districts(): + from shapely.wkt import dumps as wkt_dumps + mv_grid_id = mv_district.mv_grid.id_db + mv_grid_name = '_'.join( + [str(mv_district.mv_grid.__class__.__name__), 'MV', str(mv_grid_id), + str(mv_district.mv_grid.id_db)]) + + if mv_info: + lv_grid_id = 0 + + # MV-grid + # ToDo: geom <- Polygon + mvgrid_idx += 1 + mv_grid_dict[mvgrid_idx] = { + 'id': mv_grid_id, + 'name': mv_grid_name, + 'geom': wkt_dumps(mv_district.geo_data), + 'population': + sum([_.zensus_sum + for _ in + mv_district._lv_load_areas + #ToDo: Check if this returns any value -> changed np.inan to pd.isnull, fixes type error for the export + if not pd.isnull(_.zensus_sum)]), + 'voltage_nom': mv_district.mv_grid.v_level, # in kV + 'run_id': run_id + } + + # MV station + mv_station = mv_district.mv_grid._station + mv_station_name = '_'.join( + ['MVStationDing0', 'MV', str(mv_station.id_db), + str(mv_station.id_db)]) + mv_stations_dict[0] = { + 'id': mv_district.mv_grid.id_db, + 'name': mv_station_name, + 'geom': wkt_dumps(mv_station.geo_data), + 'run_id': run_id} + + # Trafos MV + for t in mv_station.transformers(): + mvtrafos_idx += 1 + hvmv_trafos_dict[mvtrafos_idx] = { + 'id': mv_station.id_db, + 'geom': wkt_dumps(mv_station.geo_data), + 'name': '_'.join( + ['MVTransformerDing0', 'MV', str(mv_station.id_db), + str(mv_station.id_db)]), + 'voltage_op': t.v_level, + 'S_nom': t.s_max_a, + 'X': t.x, + 'R': t.r, + 'run_id': run_id, + } + + # MV grid components + for node in mv_district.mv_grid.graph_nodes_sorted(): + geom = wkt_dumps(node.geo_data) + + # LVStation + if isinstance(node, LVStationDing0): + if not node.lv_load_area.is_aggregated: + lvstations_idx += 1 + lv_grid_name = '_'.join( + ['LVGridDing0', 'LV', str(node.id_db), + str(node.id_db)]) + lv_stations_dict[lvstations_idx] = { + 'id': node.id_db, + 'name': '_'.join([node.__class__.__name__, + 'MV', str(mv_grid_id), + str(node.id_db)]), + 'geom': geom, + 'run_id': run_id, + } + + # LV-MV mapping + LVMVmapping_idx += 1 + mvlv_mapping_dict[LVMVmapping_idx] = { + 'mv_grid_id': mv_grid_id, + 'mv_grid_name': mv_grid_name, + 'lv_grid_id': node.id_db, + 'lv_grid_name': lv_grid_name, + 'run_id': run_id, + } + + # Trafos LV + for t in node.transformers(): + lvtrafos_idx += 1 + mvlv_trafos_dict[lvtrafos_idx] = { + 'id': node.id_db, + 'geom': geom, + 'name': '_'.join(['LVTransformerDing0', 'LV', + str(node.id_db), + str(node.id_db)]), + 'voltage_op': t.v_level, + 'S_nom': t.s_max_a, + 'X': t.x, + 'R': t.r, + 'run_id': run_id, + } + + # MVGenerator + elif isinstance(node, (GeneratorDing0, GeneratorFluctuatingDing0)): + if node.subtype == None: + subtype = 'other' + else: + subtype = node.subtype + if isinstance(node, GeneratorFluctuatingDing0): + type = node.type + mvgen_idx += 1 + mv_gen_dict[mvgen_idx] = { + 'id': node.id_db, + 'name': '_'.join(['GeneratorFluctuatingDing0', 'MV', + str(mv_grid_id), + str(node.id_db)]), + 'geom': geom, + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'is_aggregated': False, + 'weather_cell_id': node.weather_cell_id + } + else: + type = node.type + mvgen_idx += 1 + mv_gen_dict[mvgen_idx] = { + 'id': node.id_db, + 'name': '_'.join( + ['GeneratorDing0', 'MV', str(mv_grid_id), + str(node.id_db)]), + 'geom': geom, + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'is_aggregated': False, + 'weather_cell_id': np.nan + } + + # MVBranchTees + elif isinstance(node, MVCableDistributorDing0): + mvcd_idx += 1 + mv_cd_dict[mvcd_idx] = { + 'id': node.id_db, + 'name': '_'.join( + [str(node.__class__.__name__), 'MV', + str(mv_grid_id), str(node.id_db)]), + 'geom': geom, + 'run_id': run_id, + } + + # LoadAreaCentre + elif isinstance(node, LVLoadAreaCentreDing0): + + # type = 'Load area center of aggregated load area' + + areacenter_idx += 1 + aggr_lines = 0 + + aggr = {'generation': {}, 'load': {}, 'aggregates': []} + + # Determine aggregated load in MV grid + # -> Implement once loads in Ding0 MV grids exist + + # Determine aggregated load in LV grid + aggr = aggregate_loads(node, aggr) + + # Collect metadata of aggregated load areas + aggr['aggregates'] = { + 'population': node.lv_load_area.zensus_sum, + 'geom': wkt_dumps(node.lv_load_area.geo_area)} + aggr_line_type = nw._static_data['MV_cables'].iloc[ + nw._static_data['MV_cables']['I_max_th'].idxmax()] + geom = wkt_dumps(node.geo_data) + + for aggr_node in aggr: + if aggr_node == 'generation': + pass + + elif aggr_node == 'load': + for type in aggr['load']: + mvloads_idx += 1 + aggr_line_id = 100 * node.lv_load_area.id_db + mvloads_idx + 1 + mv_aggr_load_name = '_'.join( + ['Load_aggregated', str(type), + repr(mv_district.mv_grid), + # str(node.lv_load_area.id_db)]) + str(aggr_line_id)]) + mv_loads_dict[mvloads_idx] = { + # Exception: aggregated loads get a string as id + 'id': aggr_line_id, #node.lv_load_area.id_db, #mv_aggr_load_name, + 'name': mv_aggr_load_name, + 'geom': geom, + 'consumption': json.dumps( + {type: aggr['load'][type]['nominal']}), + 'is_aggregated': True, + 'run_id': run_id, + } + + lines_idx += 1 + aggr_lines += 1 + edge_name = '_'.join( + ['line_aggr_load_la', + str(node.lv_load_area.id_db), str(type), + # str(node.lv_load_area.id_db)]) + str(aggr_line_id)]) + lines_dict[lines_idx] = { + 'id': aggr_line_id, #node.lv_load_area.id_db, + 'edge_name': edge_name, + 'grid_name': mv_grid_name, + 'type_name': aggr_line_type.name, + 'type_kind': 'cable', + 'length': 1e-3, # in km + 'U_n': aggr_line_type.U_n, + 'I_max_th': aggr_line_type.I_max_th, + 'R': aggr_line_type.R, + 'L': aggr_line_type.L, + 'C': aggr_line_type.C, + 'node1': mv_aggr_load_name, + 'node2': mv_station_name, + 'run_id': run_id, + 'geom': LineString([mv_station.geo_data, mv_station.geo_data]) + } + + # TODO: eventually remove export of DisconnectingPoints from export + # DisconnectingPoints + elif isinstance(node, CircuitBreakerDing0): + mvcb_idx += 1 + mvcb_dict[mvcb_idx] = { + 'id': node.id_db, + 'name': '_'.join([str(node.__class__.__name__), 'MV', + str(mv_grid_id), str(node.id_db)]), + 'geom': geom, + 'status': node.status, + 'run_id': run_id, + } + else: + type = 'Unknown' + + # MVedges + for branch in mv_district.mv_grid.graph_edges(): + # geom_string = from_shape(LineString([branch['adj_nodes'][0].geo_data, + # branch['adj_nodes'][1].geo_data]), + # srid=srid) + # geom = wkt_dumps(geom_string) + + if not any([isinstance(branch['adj_nodes'][0], + LVLoadAreaCentreDing0), + isinstance(branch['adj_nodes'][1], + LVLoadAreaCentreDing0)]): + lines_idx += 1 + lines_dict[lines_idx] = { + 'id': branch['branch'].id_db, + 'edge_name': '_'.join( + [branch['branch'].__class__.__name__, + str(branch['branch'].id_db)]), + 'grid_name': mv_grid_name, + 'type_name': branch['branch'].type['name'], + 'type_kind': branch['branch'].kind, + 'length': branch['branch'].length / 1e3, + 'U_n': branch['branch'].type['U_n'], + 'I_max_th': branch['branch'].type['I_max_th'], + 'R': branch['branch'].type['R'], + 'L': branch['branch'].type['L'], + 'C': branch['branch'].type['C'], + 'node1': '_'.join( + [str(branch['adj_nodes'][0].__class__.__name__), + 'MV', str(mv_grid_id), + str(branch['adj_nodes'][0].id_db)]), + 'node2': '_'.join( + [str(branch['adj_nodes'][1].__class__.__name__), + 'MV', str(mv_grid_id), + str(branch['adj_nodes'][1].id_db)]), + 'run_id': run_id, + 'geom': LineString([branch['adj_nodes'][0].geo_data, branch['adj_nodes'][1].geo_data]) + } + + if lv_info: + for LA in mv_district.lv_load_areas(): + for lv_district in LA.lv_grid_districts(): + + if not lv_district.lv_grid.grid_district.lv_load_area.is_aggregated: + lv_grid_id = lv_district.lv_grid.id_db + lv_grid_name = '_'.join( + [str(lv_district.lv_grid.__class__.__name__), 'LV', + str(lv_district.lv_grid.id_db), + str(lv_district.lv_grid.id_db)]) + + lvgrid_idx += 1 + lv_grid_dict[lvgrid_idx] = { + 'id': lv_district.lv_grid.id_db, + 'name': lv_grid_name, + 'geom': wkt_dumps(lv_district.geo_data), + 'population': lv_district.population, + 'voltage_nom': lv_district.lv_grid.v_level / 1e3, + 'run_id': run_id + } + + # geom = from_shape(Point(lv_district.lv_grid.station().geo_data), srid=srid) + # geom = wkt_dumps(lv_district.geo_data)# lv_grid.station() #ding0_lv_grid.grid_district.geo_data + for node in lv_district.lv_grid.graph_nodes_sorted(): + # geom = wkt_dumps(node.geo_data) + + # LVGenerator + if isinstance(node, (GeneratorDing0, GeneratorFluctuatingDing0)): + if node.subtype == None: + subtype = 'other' + else: + subtype = node.subtype + if isinstance(node, GeneratorFluctuatingDing0): + type = node.type + lvgen_idx += 1 + lv_gen_dict[lvgen_idx] = { + 'id': node.id_db, + 'la_id': LA.id_db, + 'name': '_'.join( + ['GeneratorFluctuatingDing0', 'LV', + str(lv_grid_id), + str(node.id_db)]), + 'lv_grid_id': lv_grid_id, + 'geom': wkt_dumps(node.geo_data), + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'is_aggregated': node.lv_load_area.is_aggregated, + 'weather_cell_id': node.weather_cell_id, + } + else: + type = node.type + lvgen_idx += 1 + lv_gen_dict[lvgen_idx] = { + 'id': node.id_db, + 'name': '_'.join( + ['GeneratorDing0', 'LV', + str(lv_grid_id), + str(node.id_db)]), + 'la_id': LA.id_db, + 'lv_grid_id': lv_grid_id, + 'geom': wkt_dumps(node.geo_data), + 'type': type, + 'subtype': subtype, + 'v_level': node.v_level, + 'nominal_capacity': node.capacity, + 'run_id': run_id, + 'is_aggregated': node.lv_load_area.is_aggregated, + 'weather_cell_id': np.nan + } + + # LVcd + elif isinstance(node, LVCableDistributorDing0): + if not node.grid.grid_district.lv_load_area.is_aggregated: + lvcd_idx += 1 + lv_cd_dict[lvcd_idx] = { + 'name': '_'.join( + [str(node.__class__.__name__), 'LV', + str(lv_grid_id), str(node.id_db)]), + 'id': node.id_db, + 'lv_grid_id': lv_grid_id, + 'geom': None, + # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? + 'run_id': run_id, + } + + # LVload + elif isinstance(node, LVLoadDing0): + if not node.grid.grid_district.lv_load_area.is_aggregated: + lvloads_idx += 1 + lv_loads_dict[lvloads_idx] = { + 'id': node.id_db, + 'name': '_'.join( + [str(node.__class__.__name__), 'LV', + str(lv_grid_id), str(node.id_db)]), + 'lv_grid_id': lv_grid_id, + 'geom': None, + # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? + 'consumption': json.dumps(node.consumption), + 'run_id': run_id, + } + + # LVedges + for branch in lv_district.lv_grid.graph_edges(): + if not branch['branch'].connects_aggregated: + if not any([isinstance(branch['adj_nodes'][0], + LVLoadAreaCentreDing0), + isinstance(branch['adj_nodes'][1], + LVLoadAreaCentreDing0)]): + lines_idx += 1 + lines_dict[lines_idx] = { + 'id': branch['branch'].id_db, + 'edge_name': '_'.join( + [branch['branch'].__class__.__name__, + str(branch['branch'].id_db)]), + 'grid_name': lv_grid_name, + 'type_name': branch[ + 'branch'].type.to_frame().columns[0], + 'type_kind': branch['branch'].kind, + 'length': branch['branch'].length / 1e3, + # length in km + 'U_n': branch['branch'].type['U_n'] / 1e3, + # U_n in kV + 'I_max_th': branch['branch'].type[ + 'I_max_th'], + 'R': branch['branch'].type['R'], + 'L': branch['branch'].type['L'], + 'C': branch['branch'].type['C'], + 'node1': '_'.join( + [str(branch['adj_nodes'][ + 0].__class__.__name__), 'LV', + str(lv_grid_id), + str(branch['adj_nodes'][0].id_db)]) + if not isinstance(branch['adj_nodes'][0], + LVStationDing0) else '_'.join( + [str(branch['adj_nodes'][ + 0].__class__.__name__), 'MV', + str(mv_grid_id), + str(branch['adj_nodes'][0].id_db)]), + 'node2': '_'.join( + [str(branch['adj_nodes'][ + 1].__class__.__name__), 'LV', + str(lv_grid_id), + str(branch['adj_nodes'][1].id_db)]) + if not isinstance(branch['adj_nodes'][1], + LVStationDing0) else '_'.join( + [str(branch['adj_nodes'][ + 1].__class__.__name__), 'MV', + str(mv_grid_id), + str(branch['adj_nodes'][1].id_db)]), + 'run_id': run_id, + 'geom': None + } + + lv_grid = pd.DataFrame.from_dict(lv_grid_dict, orient='index') + lv_gen = pd.DataFrame.from_dict(lv_gen_dict, orient='index') + lv_cd = pd.DataFrame.from_dict(lv_cd_dict, orient='index') + lv_stations = pd.DataFrame.from_dict(lv_stations_dict, orient='index') + mvlv_trafos = pd.DataFrame.from_dict(mvlv_trafos_dict, orient='index') + lv_loads = pd.DataFrame.from_dict(lv_loads_dict, orient='index') + mv_grid = pd.DataFrame.from_dict(mv_grid_dict, orient='index') + mv_gen = pd.DataFrame.from_dict(mv_gen_dict, orient='index') + mv_cb = pd.DataFrame.from_dict(mvcb_dict, orient='index') + mv_cd = pd.DataFrame.from_dict(mv_cd_dict, orient='index') + mv_stations = pd.DataFrame.from_dict(mv_stations_dict, orient='index') + hvmv_trafos = pd.DataFrame.from_dict(hvmv_trafos_dict, orient='index') + mv_loads = pd.DataFrame.from_dict(mv_loads_dict, orient='index') + lines = pd.DataFrame.from_dict(lines_dict, orient='index') + mvlv_mapping = pd.DataFrame.from_dict(mvlv_mapping_dict, orient='index') + + lines = lines[sorted(lines.columns.tolist())] + + return Network( + run_id, metadata_json, lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, mv_grid, mv_gen, mv_cb, + mv_cd, mv_stations, hvmv_trafos, mv_loads, lines, mvlv_mapping + ) diff --git a/ding0/io/exporter_log.py b/ding0/io/exporter_log.py new file mode 100644 index 00000000..0c58bfe6 --- /dev/null +++ b/ding0/io/exporter_log.py @@ -0,0 +1,46 @@ +"""This file is part of DINGO, the DIstribution Network GeneratOr. +DINGO is a tool to generate synthetic medium and low voltage power +distribution grids based on open data. + +It is developed in the project open_eGo: https://openegoproject.wordpress.com + +DING0 lives at github: https://github.com/openego/ding0/ +The documentation is available on RTD: http://ding0.readthedocs.io""" + +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "jh-RLI" + +import os + +# LOG_FILE_PATH = 'pickle_log' +# LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') + +def pickle_export_logger(log_file_path): + """ + Creates a list for pickle files that are missing for some reason. + Most likely the file does not exists @ the pickle file path dir. + + Log missing ding0 GridDistricts: + The export_log provides functionality that set ups a logging file. One need to provide a path to the destination + the logfile shall be stored in. + The file can be opened at any code line and input can be provided. This logger is mainly used within a exception + to log the missing GridDistricts that could not be created by ding0. + + :param log_file_path: + :return: + """ + # does the file exist? + if not os.path.isfile(log_file_path): + print('ding0 log-file {file} not found. ' + 'This might be the first run of the tool. ' + .format(file=log_file_path)) + base_path = os.path.split(log_file_path)[0] + if not os.path.isdir(base_path): + os.mkdir(base_path) + print('The directory {path} was created.'.format(path=base_path)) + + with open(log_file_path, 'a') as log: + log.write("List of missing grid districts:") + pass diff --git a/ding0/io/file_export.py b/ding0/io/file_export.py new file mode 100644 index 00000000..a8d7f4c9 --- /dev/null +++ b/ding0/io/file_export.py @@ -0,0 +1,99 @@ +"""This file is part of DINGO, the DIstribution Network GeneratOr. +DINGO is a tool to generate synthetic medium and low voltage power +distribution grids based on open data. + +It is developed in the project open_eGo: https://openegoproject.wordpress.com + +DING0 lives at github: https://github.com/openego/ding0/ +The documentation is available on RTD: http://ding0.readthedocs.io""" + +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "nesnoj, gplssm, jh-RLI" + +import os + +import json + +from ding0.tools.results import load_nd_from_pickle +from ding0.io.export import export_network +from ding0.io.exporter_log import pickle_export_logger + + +def create_destination_dir(): + pass + + +def export_data_tocsv(path, network, csv_sep=','): + # put a text file with the metadata + metadata = json.loads(network.metadata_json) + # canĀ“t test this -> no permission for my user + with open(os.path.join(path, 'metadata.json'), 'w') as metafile: + json.dump(metadata, metafile) + + # Exports data to csv + def export_network_tocsv(path, table, tablename): + return table.to_csv(os.path.join(path, tablename + '.csv'), sep=csv_sep) + + export_network_tocsv(path, network.lv_grid, 'lv_grid') + export_network_tocsv(path, network.lv_gen, 'lv_generator') + export_network_tocsv(path, network.lv_cd, 'lv_branchtee') + export_network_tocsv(path, network.lv_stations, 'lv_station') + export_network_tocsv(path, network.mvlv_trafos, 'mvlv_transformer') + export_network_tocsv(path, network.lv_loads, 'lv_load') + export_network_tocsv(path, network.mv_grid, 'mv_grid') + export_network_tocsv(path, network.mv_gen, 'mv_generator') + export_network_tocsv(path, network.mv_cd, 'mv_branchtee') + export_network_tocsv(path, network.mv_stations, 'mv_station') + export_network_tocsv(path, network.hvmv_trafos, 'hvmv_transformer') + export_network_tocsv(path, network.mv_cb, 'mv_circuitbreaker') + export_network_tocsv(path, network.mv_loads, 'mv_load') + export_network_tocsv(path, network.lines, 'line') + export_network_tocsv(path, network.mvlv_mapping, 'mvlv_mapping') + # export_network_tocsv(path, areacenter, 'areacenter') + + +if __name__ == '__main__': + """ + Advise: + First off check for existing .csv files in your destination folder. + Existing files will be extended. + Multiple grids will be stored all in one file. + """ + + # Path to user dir, Log file for missing Grid_Districts, Will be crated if not existing + LOG_FILE_PATH = os.path.join(os.path.expanduser("~"), '.ding0_log', 'pickle_log') + pickle_export_logger(LOG_FILE_PATH) + + # static path + # Insert your own path + pkl_filepath = "/home/local/RL-INSTITUT/jonas.huber/rli/Daten_flexibel_01/Ding0/20180823154014" + + # static path, .csv will be stored here + # change this to your own destination folder + destination_path = pkl_filepath + + # choose MV Grid Districts to import use list of integers + # Multiple grids f. e.: grids = list(range(1, 3609)) - 1 to 3608(all of the existing) + # Single grids f. e.: grids = [2] + grids = list(range(1, 3)) + + # Loop over all selected Grids, exports every singele one to file like .csv + for grid_no in grids: + + try: + nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) + except: + print('Something went wrong, created log entry in: {}'.format(LOG_FILE_PATH)) + with open(LOG_FILE_PATH, 'a') as log: + log.write('ding0_grids__{}.pkl not present to the current directory\n'.format(grid_no)) + pass + + continue + + # Extract data from network and create DataFrames + # pandas DataFrames will be exported as .csv file + network_tupels = export_network(nw, run_id=nw.metadata['run_id']) + export_data_tocsv(destination_path, network_tupels) + diff --git a/ding0/io/io_settings.py b/ding0/io/io_settings.py new file mode 100644 index 00000000..87a41776 --- /dev/null +++ b/ding0/io/io_settings.py @@ -0,0 +1,13 @@ +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "jh-RLI" + +import os +from configobj import ConfigObj + + +io_base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +ding0_config_path = os.path.join(io_base_dir, 'config', 'exporter_config.cfg') +exporter_config = ConfigObj(ding0_config_path) diff --git a/ding0/io/metadatastrings/hvmv_transformer.json b/ding0/io/metadatastrings/hvmv_transformer.json new file mode 100644 index 00000000..e44eac67 --- /dev/null +++ b/ding0/io/metadatastrings/hvmv_transformer.json @@ -0,0 +1,49 @@ +{"title": "DING0 - Result data for hvmv transformer", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_hvmv_transformer", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","description": "FIXME","unit": "string"}, + {"name": "voltage_op","description": "FIXME","unit": "float"}, + {"name": "s_nom","description": "nominal apparent power as float","unit": "kVA"}, + {"name": "x","description": "as float","unit": "Ohm"}, + {"name": "r","description": "as float","unit": "Ohm"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ding0/io/metadatastrings/line.json b/ding0/io/metadatastrings/line.json new file mode 100644 index 00000000..1f2c8449 --- /dev/null +++ b/ding0/io/metadatastrings/line.json @@ -0,0 +1,57 @@ +{"title": "DING0 - Result data for line(cable)", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_line", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge","unit": "integer"}, + {"name": "edge_name","description": "unambiguous name of edge","unit": "string"}, + {"name": "grid_name","description": "unambiguous name of grid","unit": "string"}, + {"name": "node1","description": "id_db of first node","unit": "string"}, + {"name": "node2","description": "id_db of second node","unit": "string"}, + {"name": "type_kind","description": "n/a","unit": "string"}, + {"name": "type_name","description": "n/a","unit": "string"}, + {"name": "length","description": "length of line as float","unit": "km"}, + {"name": "u_n","description": "nominal voltage as float","unit": "kV"}, + {"name": "c","description": "inductive resistance at 50Hz as float","unit": "uF/km"}, + {"name": "l","description": "stored as float","unit": " mH/km"}, + {"name": "r","description": "stored as float","unit": "Ohm/km"}, + {"name": "i_max_th","description": "stored as float","unit": "A"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 LINESTRING"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } diff --git a/ding0/io/metadatastrings/lv_branchtee.json b/ding0/io/metadatastrings/lv_branchtee.json new file mode 100644 index 00000000..3b29c6ca --- /dev/null +++ b/ding0/io/metadatastrings/lv_branchtee.json @@ -0,0 +1,46 @@ +{"title": "DING0 - Result data for lv branchtee", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_lv_branchtee", + "format": "PostgreSQL", + "fields": [ + {"name": "id","discription": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","discription": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "geom","discription": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "id_db","discription": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "name","discription": "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber","unit": "string "} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ding0/io/metadatastrings/lv_generator.json b/ding0/io/metadatastrings/lv_generator.json new file mode 100644 index 00000000..769164e2 --- /dev/null +++ b/ding0/io/metadatastrings/lv_generator.json @@ -0,0 +1,54 @@ +{"title": "DING0 - Result data for lv generator", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_lv_generator", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "la_id","description": "FIXME","unit": "integer"}, + {"name": "name","description": "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "lv_grid_id","description": "unambiguous id_db of LV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84, POINT"}, + {"name": "type","description": "type of generation {solar; biomass}","unit": "string"}, + {"name": "subtype","description": "subtype of generation: {solar_roof_mounted, unknown; biomass}","unit": "string"}, + {"name": "v_level","description": "voltage level of generator as integer","unit": "FIXME"}, + {"name": "nominal_capacity","description": "nominal capacity as float","unit": "FIXME"}, + {"name": "is_aggregated","description": "True if load is aggregated load, else False","unit": "boolean"}, + {"name": "weather_cell_id","description": "unambiguous number of the corresponding weather cell","unit": "integer"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } diff --git a/ding0/io/metadatastrings/lv_grid.json b/ding0/io/metadatastrings/lv_grid.json new file mode 100644 index 00000000..263d8a1d --- /dev/null +++ b/ding0/io/metadatastrings/lv_grid.json @@ -0,0 +1,46 @@ +{"title": "DING0 - Result data lv grid", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_lv_grid", "format": "PostgreSQL", "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "name","description": "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#","unit": "string"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 MULTIPOLYGON"}, + {"name": "population","description": "population in LV-Grid","unit": "integer"}, + {"name": "voltage_nom","description": "voltage level of grid as float","unit": "kV"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ding0/io/metadatastrings/lv_load.json b/ding0/io/metadatastrings/lv_load.json new file mode 100644 index 00000000..7276db63 --- /dev/null +++ b/ding0/io/metadatastrings/lv_load.json @@ -0,0 +1,48 @@ +{"title": "DING0 - Result data for lv load areas", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_lv_load", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "name","description": "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "lv_grid_id","description": "unambiguous id_db of LV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "consumption","description": "type of load {residential, agricultural, industrial} and corresponding consumption","unit": "string "} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } diff --git a/ding0/io/metadatastrings/lv_station.json b/ding0/io/metadatastrings/lv_station.json new file mode 100644 index 00000000..b57b2bc3 --- /dev/null +++ b/ding0/io/metadatastrings/lv_station.json @@ -0,0 +1,46 @@ +{"title": "DING0 - Result data for lv station", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "", + "name": "", + "version": "", + "url": "", + "instruction": "", + "copyright": ""}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_lv_station", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, + {"name": "id_db","description": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","description": "FIXME","unit": "string"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } diff --git a/ding0/io/metadatastrings/mv_branchtee.json b/ding0/io/metadatastrings/mv_branchtee.json new file mode 100644 index 00000000..7663db04 --- /dev/null +++ b/ding0/io/metadatastrings/mv_branchtee.json @@ -0,0 +1,46 @@ +{"title": "DING0 - Result data for mv branchtee", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_mv_branchtee", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","description": "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } diff --git a/ding0/io/metadatastrings/mv_circuitbreaker.json b/ding0/io/metadatastrings/mv_circuitbreaker.json new file mode 100644 index 00000000..81b45a77 --- /dev/null +++ b/ding0/io/metadatastrings/mv_circuitbreaker.json @@ -0,0 +1,47 @@ +{"title": "DING0 - Result data for mv circuitbreaker", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_mv_circuitbreaker", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","description": "FIXME","unit": "string"}, + {"name": "status","description": "FIXME","unit": "string "} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } diff --git a/ding0/io/metadatastrings/mv_generator.json b/ding0/io/metadatastrings/mv_generator.json new file mode 100644 index 00000000..a13ce158 --- /dev/null +++ b/ding0/io/metadatastrings/mv_generator.json @@ -0,0 +1,52 @@ +{"title": "DING0 - Result data for mv generator", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_mv_generator", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "name","description": "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "type","description": "type of generation: {solar; biomass}","unit": "string"}, + {"name": "subtype","description": "subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}","unit": "string"}, + {"name": "v_level","description": "voltage level of generator as integer","unit": "FIXME"}, + {"name": "nominal_capacity","description": "nominal capacity as float","unit": "FIXME"}, + {"name": "weather_cell_id","description": "unambiguous number of the corresponding weather cell","unit": "integer"}, + {"name": "is_aggregated","description": "True if load is aggregated load, else False","unit": "boolean"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } diff --git a/ding0/io/metadatastrings/mv_grid.json b/ding0/io/metadatastrings/mv_grid.json new file mode 100644 index 00000000..e7632581 --- /dev/null +++ b/ding0/io/metadatastrings/mv_grid.json @@ -0,0 +1,48 @@ +{"title": "DING0 - Result data for mv grid area", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_mv_grid", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 MULTIPOLYGON"}, + {"name": "name","description": "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'","unit": "string"}, + {"name": "population","description": "population in MV-Grid","unit": "integer"}, + {"name": "voltage_nom","description": "voltage level of grid as float","unit": "kV" } ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } diff --git a/ding0/io/metadatastrings/mv_load.json b/ding0/io/metadatastrings/mv_load.json new file mode 100644 index 00000000..5b246425 --- /dev/null +++ b/ding0/io/metadatastrings/mv_load.json @@ -0,0 +1,47 @@ +{"title": "DING0 - Result data for mv load area", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": +{"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_mv_load", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "name","description": "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'","unit": "string"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 GEOMETRY"}, + {"name": "is_aggregated","description": "True if load is aggregated load, else False","unit": "boolean"}, + {"name": "consumption","description": "type of load {retail, residential, agricultural, industrial} and corresponding consumption","unit": "string" } ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ding0/io/metadatastrings/mv_station.json b/ding0/io/metadatastrings/mv_station.json new file mode 100644 index 00000000..a28d6a3f --- /dev/null +++ b/ding0/io/metadatastrings/mv_station.json @@ -0,0 +1,46 @@ +{"title": "DING0 - Result data for mv station", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_mv_station", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "id_db","description": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","description": "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#","unit": "string"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ding0/io/metadatastrings/mvlv_mapping.json b/ding0/io/metadatastrings/mvlv_mapping.json new file mode 100644 index 00000000..831f4bb2 --- /dev/null +++ b/ding0/io/metadatastrings/mvlv_mapping.json @@ -0,0 +1,47 @@ +{"title": "DING0 - Result data for mvlv mapping", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_mvlv_mapping", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, + {"name": "lv_grid_id","description": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "lv_grid_name","description": "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'","unit": "string"}, + {"name": "mv_grid_id","description": "unambiguous number of MV-Grid","unit": "integer"}, + {"name": "mv_grid_name","description": "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'","unit": "string"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } diff --git a/ding0/io/metadatastrings/mvlv_transformer.json b/ding0/io/metadatastrings/mvlv_transformer.json new file mode 100644 index 00000000..ff09a4fb --- /dev/null +++ b/ding0/io/metadatastrings/mvlv_transformer.json @@ -0,0 +1,50 @@ +{"title": "DING0 - Result data for mvlv transformer", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_mvlv_transformer", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation in yyyyMMddhhmmss","unit": "integer"}, + {"name": "id_db","description": "unambiguous number of LV-Grid","unit": "integer"}, + {"name": "geom","description": "geometric coordinates","unit": "WGS84 POINT"}, + {"name": "name","description": "FIXME","unit": "string"}, + {"name": "voltage_op","description": "as float ","unit": "kV"}, + {"name": "s_nom","description": "nominal apparent power as float ","unit": "kVA"}, + {"name": "x","description": "as float","unit": "Ohm"}, + {"name": "r","description": "as float","unit": "Ohm"}] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": " http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ding0/io/metadatastrings/versioning.json b/ding0/io/metadatastrings/versioning.json new file mode 100644 index 00000000..f67b5bcc --- /dev/null +++ b/ding0/io/metadatastrings/versioning.json @@ -0,0 +1,44 @@ +{"title": "DING0 - Result data for ding0 versioning", +"description": "DIstribution Network Generat0r - A tool to generate synthetic medium and low voltage power distribution grids based on open (or at least accessible) data.", +"language": [ "eng","ger "], +"spatial": + {"location": "none", + "extent": "none", + "resolution": "none"}, +"temporal": + {"reference_date": "none", + "start": "none", + "end": "none", + "resolution": "none"}, +"sources": [ + {"name": "ding0","description": "Ding0 reference","url": "https://github.com/openego/ding0","license": "GNU Affero General Public License-3.0","copyright": "Ā© 2017 openego project group"}, + {"name": "readthedocs.io","description": "Ding0 documentation","url": "https://dingo.readthedocs.io/en/dev/","license": "GNU Affero General Public License-3.0","copyright": "Ā© Copyright 2015-2018, open_eGo-Team"}, + {"name": "zenodo","description": "Distribution grid data generated by DINGO","url": "https://doi.org/10.5281/zenodo.890479","license": "CC BY-SA 4.0","copyright": "Ā© 2017 openego project group"} ], +"license": + {"id": "ODbL-1.0", + "name": "Open Data Commons Open Database License 1.0", + "version": "1", + "url": "https://opendatacommons.org/licenses/odbl/1.0/", + "instruction": "You are free: To Share, To Create, To Adapt; As long as you: Attribute, Share-Alike, Keep open!", + "copyright": "Ā© Reiner Lemoine Institut"}, +"contributors": [ + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-08-17", "comment": "Create metadata"}, + {"name": "Jonas Huber", "email": "jonas.huber@rl-institut.de", "date": "2018-09-12", "comment": "Update metadata section source"} ], +"resources": [ + {"name": "grid.ego_grid_ding0_versioning", + "format": "PostgreSQL", + "fields": [ + {"name": "id","description": "unambiguous unique numer","unit": "integer"}, + {"name": "run_id","description": "time and date of table generation","unit": "yyyyMMddhhmmss"}, + {"name": "description","description": "Used parameters for this run","unit": "string"} ] } ], +"metadata_version": "1.3", +"_comment": { + "_url": "https://github.com/OpenEnergyPlatform/examples/tree/master/metadata", + "_copyright": "Ā© Reiner Lemoine Institut", + "_metadata_license": "Creative Commons Zero v1.0 Universal (CC0-1.0)", + "_metadata_license_url": "https://creativecommons.org/publicdomain/zero/1.0/", + "_contains": "http://www.json.org/; http://stackoverflow.com/questions/383692/what-is-json-and-why-would-i-use-it", + "_additional_information": { + "_dates": "Dates must follow the ISO8601 (JJJJ-MM-TT)", + "_units": "Use a space between Numbers and units (100 m)", + "_none": "If not applicable use: none"} } } \ No newline at end of file diff --git a/ding0/io/skeleton.yml b/ding0/io/skeleton.yml new file mode 100644 index 00000000..20298126 --- /dev/null +++ b/ding0/io/skeleton.yml @@ -0,0 +1,36 @@ +name: exporter +channels: + - conda-forge + - defaults +dependencies: + - geoalchemy2=0.4.1 + - geopy=1.11.0 + - jupyter + - keyring + - matplotlib=1.5.3 + - multiprocess + - nb_conda + - networkx=1.11 + - numpy=1.11.3 + - openpyxl + - pandas=0.20.3 + - psycopg2 + - pyomo=5.5.0 + - pyproj=1.9.5.1 + - python=3.6.5 + - seaborn + - shapely=1.6.3 + - sphinx + - sphinx_rtd_theme + - sqlite + - xlrd + - sqlalchemy=1.2.0 + - unittest2 + - pip: + - demandlib + - keyrings.alt + - oedialect + - pypsa==0.11.0 + - workalendar + - ConfigObj + diff --git a/ding0/tools/results.py b/ding0/tools/results.py index 094e9903..103a431c 100644 --- a/ding0/tools/results.py +++ b/ding0/tools/results.py @@ -7,11 +7,10 @@ DING0 lives at github: https://github.com/openego/ding0/ The documentation is available on RTD: http://ding0.readthedocs.io""" -__copyright__ = "Reiner Lemoine Institut gGmbH" -__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" -__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" -__author__ = "nesnoj, gplssm" - +__copyright__ = "Reiner Lemoine Institut gGmbH" +__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" +__url__ = "https://github.com/openego/ding0/blob/master/LICENSE" +__author__ = "nesnoj, gplssm" import pickle import numpy as np @@ -25,7 +24,7 @@ from egoio.tools import db from ding0.core import NetworkDing0 -from ding0.core import GeneratorDing0 +from ding0.core import GeneratorDing0, GeneratorFluctuatingDing0 from ding0.core import LVCableDistributorDing0, MVCableDistributorDing0 from ding0.core import MVStationDing0, LVStationDing0 from ding0.core import CircuitBreakerDing0 @@ -114,7 +113,8 @@ def save_nd_to_pickle(nd, path='', filename=None): number=nd._mv_grid_districts[0].id_db, number2=nd._mv_grid_districts[-1].id_db) else: - name_extension = '_{number}'.format(number=nd._mv_grid_districts[0].id_db) + name_extension = '_{number}'.format( + number=nd._mv_grid_districts[0].id_db) if filename is None: filename = "ding0_grids_{ext}.pkl".format( @@ -318,7 +318,9 @@ def calculate_lvgd_stats(nw): for LA in mv_district.lv_load_areas(): for lv_district in LA.lv_grid_districts(): lv_dist_idx += 1 - branches_from_station = len(lv_district.lv_grid.graph_branches_from_node(lv_district.lv_grid.station())) + branches_from_station = len( + lv_district.lv_grid.graph_branches_from_node( + lv_district.lv_grid.station())) lv_dist_dict[lv_dist_idx] = { 'MV_grid_id': mv_district.mv_grid.id_db, 'LV_grid_id': lv_district.lv_grid.id_db, @@ -377,7 +379,8 @@ def calculate_lvgd_stats(nw): branches_dict[branch_idx] = { 'LV_grid_id': lv_district.lv_grid.id_db, 'length': branch['branch'].length / 1e3, - 'type_name': branch['branch'].type.to_frame().columns[0], + 'type_name': branch['branch'].type.to_frame().columns[ + 0], 'type_kind': branch['branch'].kind, } # Transformers @@ -392,7 +395,8 @@ def calculate_lvgd_stats(nw): district_geo = transform(proj, lv_district.geo_data) lv_dist_dict[lv_dist_idx].update({'Area': district_geo.area}) - lvgd_stats = pd.DataFrame.from_dict(lv_dist_dict, orient='index').set_index('LV_grid_id') + lvgd_stats = pd.DataFrame.from_dict(lv_dist_dict, orient='index').set_index( + 'LV_grid_id') # generate partial dataframes gen_df = pd.DataFrame.from_dict(lv_gen_dict, orient='index') load_df = pd.DataFrame.from_dict(lv_load_dict, orient='index') @@ -402,62 +406,87 @@ def calculate_lvgd_stats(nw): # resque desired data if not gen_df.empty: # generation by voltage level - lv_generation = gen_df.groupby(['LV_grid_id', 'v_level'])['GenCap'].sum().to_frame().unstack(level=-1) - lv_generation.columns = ['Gen. Cap. v_level ' + str(_[1]) if isinstance(_, tuple) else str(_) for _ in - lv_generation.columns] + lv_generation = gen_df.groupby(['LV_grid_id', 'v_level'])[ + 'GenCap'].sum().to_frame().unstack(level=-1) + lv_generation.columns = [ + 'Gen. Cap. v_level ' + str(_[1]) if isinstance(_, tuple) else str(_) + for _ in + lv_generation.columns] lvgd_stats = pd.concat([lvgd_stats, lv_generation], axis=1) # generation by type/subtype - lv_generation = gen_df.groupby(['LV_grid_id', 'subtype'])['GenCap'].sum().to_frame().unstack(level=-1) - lv_generation.columns = ['Gen. Cap. type ' + str(_[1]) if isinstance(_, tuple) else str(_) for _ in - lv_generation.columns] + lv_generation = gen_df.groupby(['LV_grid_id', 'subtype'])[ + 'GenCap'].sum().to_frame().unstack(level=-1) + lv_generation.columns = [ + 'Gen. Cap. type ' + str(_[1]) if isinstance(_, tuple) else str(_) + for _ in + lv_generation.columns] lvgd_stats = pd.concat([lvgd_stats, lv_generation], axis=1) if not load_df.empty: # number of residential loads - lv_loads = load_df[load_df['load_type'] == 'residential'].groupby(['LV_grid_id'])[ + lv_loads = \ + load_df[load_df['load_type'] == 'residential'].groupby(['LV_grid_id'])[ 'load_type'].count().to_frame() # .unstack(level=-1) lv_loads.columns = ['NĀ° of loads residential'] lvgd_stats = pd.concat([lvgd_stats, lv_loads], axis=1) # number of agricultural loads - lv_loads = load_df[load_df['load_type'] == 'agricultural'].groupby(['LV_grid_id'])[ + lv_loads = \ + load_df[load_df['load_type'] == 'agricultural'].groupby(['LV_grid_id'])[ 'load_type'].count().to_frame() # .unstack(level=-1) lv_loads.columns = ['NĀ° of loads agricultural'] lvgd_stats = pd.concat([lvgd_stats, lv_loads], axis=1) # number of mixed industrial / retail loads - lv_loads = load_df[load_df['load_type'] == 'ind_ret'].groupby(['LV_grid_id'])[ + lv_loads = \ + load_df[load_df['load_type'] == 'ind_ret'].groupby(['LV_grid_id'])[ 'load_type'].count().to_frame() # .unstack(level=-1) lv_loads.columns = ['NĀ° of loads mixed industrial/retail'] lvgd_stats = pd.concat([lvgd_stats, lv_loads], axis=1) if not branch_df.empty: # branches by type name - lv_branches = branch_df.groupby(['LV_grid_id', 'type_name'])['length'].sum().to_frame().unstack(level=-1) - lv_branches.columns = ['Length Type ' + _[1] if isinstance(_, tuple) else _ for _ in lv_branches.columns] + lv_branches = branch_df.groupby(['LV_grid_id', 'type_name'])[ + 'length'].sum().to_frame().unstack(level=-1) + lv_branches.columns = [ + 'Length Type ' + _[1] if isinstance(_, tuple) else _ for _ in + lv_branches.columns] lvgd_stats = pd.concat([lvgd_stats, lv_branches], axis=1) # branches by kind - lv_branches = branch_df[branch_df['type_kind'] == 'line'].groupby(['LV_grid_id'])['length'].sum().to_frame() + lv_branches = \ + branch_df[branch_df['type_kind'] == 'line'].groupby(['LV_grid_id'])[ + 'length'].sum().to_frame() lv_branches.columns = ['Length of overhead lines'] lvgd_stats = pd.concat([lvgd_stats, lv_branches], axis=1) - lv_branches = branch_df[branch_df['type_kind'] == 'cable'].groupby(['LV_grid_id'])['length'].sum().to_frame() + lv_branches = \ + branch_df[branch_df['type_kind'] == 'cable'].groupby(['LV_grid_id'])[ + 'length'].sum().to_frame() lv_branches.columns = ['Length of underground cables'] lvgd_stats = pd.concat([lvgd_stats, lv_branches], axis=1) # NĀ°of branches - lv_branches = branch_df.groupby(['LV_grid_id', 'type_name'])['length'].count().to_frame().unstack(level=-1) - lv_branches.columns = ['NĀ° of branches Type ' + _[1] if isinstance(_, tuple) else _ for _ in - lv_branches.columns] + lv_branches = branch_df.groupby(['LV_grid_id', 'type_name'])[ + 'length'].count().to_frame().unstack(level=-1) + lv_branches.columns = [ + 'NĀ° of branches Type ' + _[1] if isinstance(_, tuple) else _ for _ + in + lv_branches.columns] lvgd_stats = pd.concat([lvgd_stats, lv_branches], axis=1) - lv_branches = branch_df[branch_df['type_kind'] == 'line'].groupby(['LV_grid_id'])['length'].count().to_frame() + lv_branches = \ + branch_df[branch_df['type_kind'] == 'line'].groupby(['LV_grid_id'])[ + 'length'].count().to_frame() lv_branches.columns = ['NĀ° of branches overhead lines'] lvgd_stats = pd.concat([lvgd_stats, lv_branches], axis=1) - lv_branches = branch_df[branch_df['type_kind'] == 'cable'].groupby(['LV_grid_id'])['length'].count().to_frame() + lv_branches = \ + branch_df[branch_df['type_kind'] == 'cable'].groupby(['LV_grid_id'])[ + 'length'].count().to_frame() lv_branches.columns = ['NĀ° of branches underground cables'] lvgd_stats = pd.concat([lvgd_stats, lv_branches], axis=1) if not trafos_df.empty: # N of trafos - lv_trafos = trafos_df.groupby(['LV_grid_id'])['s_max_a'].count().to_frame() + lv_trafos = trafos_df.groupby(['LV_grid_id'])[ + 's_max_a'].count().to_frame() lv_trafos.columns = ['NĀ° of MV/LV Trafos'] lvgd_stats = pd.concat([lvgd_stats, lv_trafos], axis=1) # Capacity of trafos - lv_trafos = trafos_df.groupby(['LV_grid_id'])['s_max_a'].sum().to_frame() + lv_trafos = trafos_df.groupby(['LV_grid_id'])[ + 's_max_a'].sum().to_frame() lv_trafos.columns = ['Accumulated s_max_a in MVLV trafos'] lvgd_stats = pd.concat([lvgd_stats, lv_trafos], axis=1) @@ -540,10 +569,11 @@ def calculate_mvgd_stats(nw): continue mv_impedance = 0 mv_path_length = 0 - if not isinstance(node, MVCableDistributorDing0) and not isinstance(node, CircuitBreakerDing0): + if not isinstance(node, MVCableDistributorDing0) and not isinstance( + node, CircuitBreakerDing0): if not nx.has_path(G, root, node): continue - #print(node, node.lv_load_area.is_aggregated) # only debug + # print(node, node.lv_load_area.is_aggregated) # only debug else: path = nx.shortest_path(G, root, node) for i in range(len(path) - 1): @@ -561,14 +591,17 @@ def calculate_mvgd_stats(nw): mv_impedances[node] = mv_impedance mv_path_lengths[node] = mv_path_length + mv_thermal_limit = G.adj[path[0]][path[1]]['branch'].type['I_max_th'] + mv_thermal_limits[node] = mv_thermal_limit if isinstance(node, LVStationDing0): # add impedance of transformers in LV station lvstation_impedance = 0. for trafo in node.transformers(): - lvstation_impedance += 1. / np.hypot(trafo.r,trafo.x) # transformers operating in parallel + lvstation_impedance += 1. / np.hypot(trafo.r, + trafo.x) # transformers operating in parallel if lvstation_impedance > 0.: # avoid dividing by zero lvstation_impedance = 1. / lvstation_impedance else: @@ -580,10 +613,14 @@ def calculate_mvgd_stats(nw): G_lv = lv_dist.lv_grid._graph # loop over all LV terminal nodes belonging to LV station for lv_node in G_lv.nodes(): - if isinstance(lv_node, GeneratorDing0) or isinstance(lv_node, LVLoadDing0): - path = nx.shortest_path(G_lv, node, lv_node) + if isinstance(lv_node, + GeneratorDing0) or isinstance( + lv_node, LVLoadDing0): + path = nx.shortest_path(G_lv, node, + lv_node) lv_impedance = lvstation_impedance lv_path_length = 0. + for i in range(len(path)-1): lv_impedance += np.sqrt((G_lv.adj[path[i]][path[i+1]]['branch'].type['L'] * 1e-3 * omega * \ G_lv.adj[path[i]][path[i+1]]['branch'].length)**2. + \ @@ -609,7 +646,8 @@ def calculate_mvgd_stats(nw): # terminal nodes on MV for terminal_node in mv_impedances.keys(): # neglect LVStations here because already part of MVLV paths below - if not isinstance(terminal_node, LVStationDing0) and not isinstance(terminal_node, MVStationDing0): + if not isinstance(terminal_node, LVStationDing0) and not isinstance( + terminal_node, MVStationDing0): sum_impedances += mv_impedances[terminal_node] sum_thermal_limits += mv_thermal_limits[terminal_node] sum_path_lengths += mv_path_lengths[terminal_node] @@ -713,7 +751,8 @@ def calculate_mvgd_stats(nw): elif isinstance(node, LVStationDing0): LVs_count += 1 lv_trafo_count += len([trafo for trafo in node.transformers()]) - lv_trafo_cap += np.sum([trafo.s_max_a for trafo in node.transformers()]) + lv_trafo_cap += np.sum( + [trafo.s_max_a for trafo in node.transformers()]) if not node.lv_load_area.is_aggregated: mv_path_length = district.mv_grid.graph_path_length( @@ -727,7 +766,8 @@ def calculate_mvgd_stats(nw): lv_path_length = lv_dist.lv_grid.graph_path_length( node_source=node, node_target=lv_node) - max_lv_path = max(max_lv_path, lv_path_length) + max_lv_path = max(max_lv_path, + lv_path_length) mvlv_path_length = mv_path_length + max_lv_path elif isinstance(node, CircuitBreakerDing0): @@ -799,7 +839,8 @@ def calculate_mvgd_stats(nw): lv_branches_dict[lv_branches_idx] = { 'grid_id': district.mv_grid.id_db, 'length': br['branch'].length / 1e3, - 'type_name': br['branch'].type.to_frame().columns[0], # why is it different as for MV grids? + 'type_name': br['branch'].type.to_frame().columns[0], + # why is it different as for MV grids? 'type_kind': br['branch'].kind, } @@ -825,7 +866,8 @@ def calculate_mvgd_stats(nw): pyproj.Proj(init='epsg:4326'), # source coordinate system pyproj.Proj(init='epsg:3035')) # destination coordinate system district_geo = transform(proj, district.geo_data) - other_nodes_dict[district.mv_grid.id_db].update({'Dist_area': district_geo.area}) + other_nodes_dict[district.mv_grid.id_db].update( + {'Dist_area': district_geo.area}) mvgd_stats = pd.DataFrame.from_dict({}, orient='index') ################################### @@ -841,22 +883,29 @@ def calculate_mvgd_stats(nw): ################################### # Aggregated data HV/MV Trafos if not trafos_df.empty: - mvgd_stats = pd.concat([mvgd_stats, trafos_df.groupby('grid_id').count()['s_max_a']], axis=1) - mvgd_stats = pd.concat([mvgd_stats, trafos_df.groupby('grid_id').sum()[['s_max_a']]], axis=1) + mvgd_stats = pd.concat( + [mvgd_stats, trafos_df.groupby('grid_id').count()['s_max_a']], + axis=1) + mvgd_stats = pd.concat( + [mvgd_stats, trafos_df.groupby('grid_id').sum()[['s_max_a']]], + axis=1) mvgd_stats.columns = ['NĀ° of HV/MV Trafos', 'Trafos HV/MV Acc s_max_a'] ################################### # Aggregated data Generators if not generators_df.empty: # MV generation per sub_type - mv_generation = generators_df.groupby(['grid_id', 'sub_type'])['gen_cap'].sum().to_frame().unstack(level=-1) - mv_generation.columns = ['Gen. Cap. of MV ' + _[1] if isinstance(_, tuple) else _ - for _ in mv_generation.columns] + mv_generation = generators_df.groupby(['grid_id', 'sub_type'])[ + 'gen_cap'].sum().to_frame().unstack(level=-1) + mv_generation.columns = [ + 'Gen. Cap. of MV ' + _[1] if isinstance(_, tuple) else _ + for _ in mv_generation.columns] mvgd_stats = pd.concat([mvgd_stats, mv_generation], axis=1) # MV generation at V levels mv_generation = generators_df.groupby( - ['grid_id', 'v_level'])['gen_cap'].sum().to_frame().unstack(level=-1) + ['grid_id', 'v_level'])['gen_cap'].sum().to_frame().unstack( + level=-1) mv_generation.columns = ['Gen. Cap. of MV at v_level ' + str(_[1]) if isinstance(_, tuple) else _ for _ in mv_generation.columns] @@ -871,21 +920,31 @@ def calculate_mvgd_stats(nw): # Aggregated data of other nodes if not other_nodes_df.empty: # print(other_nodes_df['CD_count'].to_frame()) - mvgd_stats['NĀ° of Cable Distr'] = other_nodes_df['CD_count'].to_frame().astype(int) - mvgd_stats['NĀ° of LV Stations'] = other_nodes_df['LV_count'].to_frame().astype(int) - mvgd_stats['NĀ° of Circuit Breakers'] = other_nodes_df['CB_count'].to_frame().astype(int) + mvgd_stats['NĀ° of Cable Distr'] = other_nodes_df[ + 'CD_count'].to_frame().astype(int) + mvgd_stats['NĀ° of LV Stations'] = other_nodes_df[ + 'LV_count'].to_frame().astype(int) + mvgd_stats['NĀ° of Circuit Breakers'] = other_nodes_df[ + 'CB_count'].to_frame().astype(int) mvgd_stats['District Area'] = other_nodes_df['Dist_area'].to_frame() - mvgd_stats['NĀ° of MV/LV Trafos'] = other_nodes_df['MVLV_trafo_count'].to_frame().astype(int) - mvgd_stats['Trafos MV/LV Acc s_max_a'] = other_nodes_df['MVLV_trafo_cap'].to_frame() - mvgd_stats['Length of MV max path'] = other_nodes_df['max_mv_path'].to_frame() - mvgd_stats['Length of MVLV max path'] = other_nodes_df['max_mvlv_path'].to_frame() + mvgd_stats['NĀ° of MV/LV Trafos'] = other_nodes_df[ + 'MVLV_trafo_count'].to_frame().astype(int) + mvgd_stats['Trafos MV/LV Acc s_max_a'] = other_nodes_df[ + 'MVLV_trafo_cap'].to_frame() + mvgd_stats['Length of MV max path'] = other_nodes_df[ + 'max_mv_path'].to_frame() + mvgd_stats['Length of MVLV max path'] = other_nodes_df[ + 'max_mvlv_path'].to_frame() mvgd_stats['Impedance Z of path to terminal node (mean value)'] = \ other_nodes_df['mean_impedance'].to_frame() - mvgd_stats['I_max of first segment of path from MV station to terminal node (mean value)'] = \ + mvgd_stats[ + 'I_max of first segment of path from MV station to terminal node (mean value)'] = \ other_nodes_df['mean_thermal_limit'].to_frame() - mvgd_stats['I_max of first segment of path from LV station to terminal node (mean value)'] = \ + mvgd_stats[ + 'I_max of first segment of path from LV station to terminal node (mean value)'] = \ other_nodes_df['mean_thermal_limit_LV'].to_frame() - mvgd_stats['Length of path from MV station to terminal node (mean value)'] = \ + mvgd_stats[ + 'Length of path from MV station to terminal node (mean value)'] = \ other_nodes_df['mean_path_length'].to_frame() mvgd_stats['Number of lines and cables going out from LV stations'] = \ other_nodes_df['number_outgoing_LV'].to_frame() @@ -896,7 +955,8 @@ def calculate_mvgd_stats(nw): # Aggregated data of MV Branches if not branches_df.empty: # km of underground cable - branches_data = branches_df[branches_df['type_kind'] == 'cable'].groupby( + branches_data = \ + branches_df[branches_df['type_kind'] == 'cable'].groupby( ['grid_id'])['length'].sum().to_frame() branches_data.columns = ['Length of MV underground cables'] mvgd_stats = pd.concat([mvgd_stats, branches_data], axis=1) @@ -909,9 +969,11 @@ def calculate_mvgd_stats(nw): # km of different wire types branches_data = branches_df.groupby( - ['grid_id', 'type_name'])['length'].sum().to_frame().unstack(level=-1) - branches_data.columns = ['Length of MV type ' + _[1] if isinstance(_, tuple) else _ - for _ in branches_data.columns] + ['grid_id', 'type_name'])['length'].sum().to_frame().unstack( + level=-1) + branches_data.columns = [ + 'Length of MV type ' + _[1] if isinstance(_, tuple) else _ + for _ in branches_data.columns] mvgd_stats = pd.concat([mvgd_stats, branches_data], axis=1) # branches not in ring @@ -928,26 +990,31 @@ def calculate_mvgd_stats(nw): # Aggregated data of LV Branches if not lv_branches_df.empty: # km of underground cable - lv_branches_data = lv_branches_df[lv_branches_df['type_kind'] == 'cable'].groupby( + lv_branches_data = \ + lv_branches_df[lv_branches_df['type_kind'] == 'cable'].groupby( ['grid_id'])['length'].sum().to_frame() lv_branches_data.columns = ['Length of LV underground cables'] mvgd_stats = pd.concat([mvgd_stats, lv_branches_data], axis=1) # km of overhead lines - lv_branches_data = lv_branches_df[lv_branches_df['type_kind'] == 'line'].groupby( + lv_branches_data = \ + lv_branches_df[lv_branches_df['type_kind'] == 'line'].groupby( ['grid_id'])['length'].sum().to_frame() lv_branches_data.columns = ['Length of LV overhead lines'] mvgd_stats = pd.concat([mvgd_stats, lv_branches_data], axis=1) # km of different wire types lv_branches_data = lv_branches_df.groupby( - ['grid_id', 'type_name'])['length'].sum().to_frame().unstack(level=-1) - lv_branches_data.columns = ['Length of LV type ' + _[1] if isinstance(_, tuple) else _ - for _ in lv_branches_data.columns] + ['grid_id', 'type_name'])['length'].sum().to_frame().unstack( + level=-1) + lv_branches_data.columns = [ + 'Length of LV type ' + _[1] if isinstance(_, tuple) else _ + for _ in lv_branches_data.columns] mvgd_stats = pd.concat([mvgd_stats, lv_branches_data], axis=1) # nĀ° of branches - total_lv_br = lv_branches_df.groupby(['grid_id'])['length'].count().to_frame() + total_lv_br = lv_branches_df.groupby(['grid_id'])[ + 'length'].count().to_frame() total_lv_br.columns = ['NĀ° of LV branches'] mvgd_stats = pd.concat([mvgd_stats, total_lv_br], axis=1) @@ -966,7 +1033,8 @@ def calculate_mvgd_stats(nw): ring_data = ring_df.groupby(['grid_id'])['ring_length'].max().to_frame() ring_data.columns = ['Length of MV Ring max'] mvgd_stats = pd.concat([mvgd_stats, ring_data], axis=1) - ring_data = ring_df.groupby(['grid_id'])['ring_length'].mean().to_frame() + ring_data = ring_df.groupby(['grid_id'])[ + 'ring_length'].mean().to_frame() ring_data.columns = ['Length of MV Ring mean'] mvgd_stats = pd.concat([mvgd_stats, ring_data], axis=1) @@ -976,14 +1044,16 @@ def calculate_mvgd_stats(nw): mvgd_stats = pd.concat([mvgd_stats, ring_data], axis=1) # km of non-ring - non_ring_data = branches_df.groupby(['grid_id'])['length'].sum().to_frame() + non_ring_data = branches_df.groupby(['grid_id'])[ + 'length'].sum().to_frame() non_ring_data.columns = ['Length of MV Rings total'] ring_data = non_ring_data - ring_data ring_data.columns = ['Length of MV Non-Rings total'] mvgd_stats = pd.concat([mvgd_stats, ring_data.round(1).abs()], axis=1) # rings generation capacity - ring_data = ring_df.groupby(['grid_id'])['ring_capacity'].sum().to_frame() + ring_data = ring_df.groupby(['grid_id'])[ + 'ring_capacity'].sum().to_frame() ring_data.columns = ['Gen. Cap. Connected to MV Rings'] mvgd_stats = pd.concat([mvgd_stats, ring_data], axis=1) ################################### @@ -1033,7 +1103,8 @@ def calculate_mvgd_stats(nw): 'lv_generation', 'total_peak_load'].sum() agg_LA_data.columns = ['LA Aggregated Population', - 'LA Aggregated LV Gen. Cap.', 'LA Aggregated LV Peak Load total' + 'LA Aggregated LV Gen. Cap.', + 'LA Aggregated LV Peak Load total' ] mvgd_stats = pd.concat([mvgd_stats, agg_LA_data], axis=1) @@ -1155,7 +1226,8 @@ def calculate_lvgd_voltage_current_stats(nw): if not LA.is_aggregated: for lv_district in LA.lv_grid_districts(): # nodes voltage - crit_nodes = get_critical_voltage_at_nodes(lv_district.lv_grid) + crit_nodes = get_critical_voltage_at_nodes( + lv_district.lv_grid) for node in crit_nodes: nodes_idx += 1 nodes_dict[nodes_idx] = { @@ -1170,7 +1242,8 @@ def calculate_lvgd_voltage_current_stats(nw): 'V nominal': lv_district.lv_grid.v_level, } # branches currents - critical_branches, critical_stations = get_critical_line_loading(lv_district.lv_grid) + critical_branches, critical_stations = get_critical_line_loading( + lv_district.lv_grid) for branch in critical_branches: branches_idx += 1 branches_dict[branches_idx] = { @@ -1322,10 +1395,11 @@ def process_stats(mv_districts, calc_mv = True calc_lv = True ####################################################################### - clusters = [mv_districts[x:x + n_of_districts] for x in range(0, len(mv_districts), n_of_districts)] + clusters = [mv_districts[x:x + n_of_districts] for x in + range(0, len(mv_districts), n_of_districts)] - mv_stats = [] - lv_stats = [] + mv_stats = [] + lv_stats = [] mv_crit_nodes = [] mv_crit_edges = [] lv_crit_nodes = [] @@ -1378,7 +1452,8 @@ def process_stats(mv_districts, lv_crit_nodes.append(stats[0]) lv_crit_edges.append(stats[1]) ####################################################################### - salida = (mv_stats, lv_stats, mv_crit_nodes, mv_crit_edges, lv_crit_nodes, lv_crit_edges) + salida = (mv_stats, lv_stats, mv_crit_nodes, mv_crit_edges, lv_crit_nodes, + lv_crit_edges) output.put(salida) @@ -1466,7 +1541,8 @@ def parallel_running_stats(districts_list, processes = [] for districts in threats: - args = (districts, n_of_districts, source, mode, critical, nw_name, output_stats) + args = (districts, n_of_districts, source, mode, critical, nw_name, + output_stats) processes.append(mp.Process(target=process_stats, args=args)) ####################################################################### # Run processes @@ -1575,910 +1651,6 @@ def parallel_running_stats(districts_list, return mv_stats, lv_stats, mv_crit_nodes, mv_crit_edges, lv_crit_nodes, lv_crit_edges -######################################################## -def export_network(nw, mode=''): - """ - Export all nodes and lines of the network nw as DataFrames - - Parameters - ---------- - nw: :any:`list` of NetworkDing0 - The MV grid(s) to be studied - mode: str - If 'MV' export only medium voltage nodes and lines - If 'LV' export only low voltage nodes and lines - else, exports MV and LV nodes and lines - - Returns - ------- - pandas.DataFrame - nodes_df : Dataframe containing nodes and its attributes - pandas.DataFrame - lines_df : Dataframe containing lines and its attributes - """ - - # close circuit breakers - nw.control_circuit_breakers(mode='close') - # srid - srid = str(int(nw.config['geo']['srid'])) - ############################## - # check what to do - lv_info = True - mv_info = True - if mode == 'LV': - mv_info = False - if mode == 'MV': - lv_info = False - ############################## - # from datetime import datetime - run_id = nw.metadata['run_id'] # datetime.now().strftime("%Y%m%d%H%M%S") - ############################## - ############################# - # go through the grid collecting info - lvgrid_idx = 0 - lv_grid_dict = {} - lvloads_idx = 0 - lv_loads_dict = {} - mvgrid_idx = 0 - mv_grid_dict = {} - mvloads_idx = 0 - mv_loads_dict = {} - mvgen_idx = 0 - mv_gen_dict = {} - mvcb_idx = 0 - mvcb_dict = {} - mvcd_idx = 0 - mv_cd_dict = {} - mvstations_idx = 0 - hvmv_stations_dict = {} - mvtrafos_idx = 0 - hvmv_trafos_dict = {} - lvgen_idx = 0 - lv_gen_dict = {} - lvcd_idx = 0 - lv_cd_dict = {} - lvstations_idx = 0 - mvlv_stations_dict = {} - lvtrafos_idx = 0 - mvlv_trafos_dict = {} - areacenter_idx = 0 - areacenter_dict = {} - lines_idx = 0 - lines_dict = {} - LVMVmapping_idx = 0 - mvlv_mapping_dict = {} - - def aggregate_generators(gen, aggr): - """Aggregate generation capacity per voltage level - Parameters - ---------- - gen: ding0.core.GeneratorDing0 - Ding0 Generator object - aggr: dict - Aggregated generation capacity. For structure see - `_determine_aggregated_nodes()`. - Returns - ------- - """ - - if gen.v_level not in aggr['generation']: - aggr['generation'][gen.v_level] = {} - if gen.type not in aggr['generation'][gen.v_level]: - aggr['generation'][gen.v_level][gen.type] = {} - if gen.subtype not in aggr['generation'][gen.v_level][gen.type]: - aggr['generation'][gen.v_level][gen.type].update( - {gen.subtype: {'ids': [gen.id_db], - 'capacity': gen.capacity}}) - else: - aggr['generation'][gen.v_level][gen.type][gen.subtype][ - 'ids'].append(gen.id_db) - aggr['generation'][gen.v_level][gen.type][gen.subtype][ - 'capacity'] += gen.capacity - - return aggr - - def aggregate_loads(la_center, aggr): - """Aggregate consumption in load area per sector - Parameters - ---------- - la_center: LVLoadAreaCentreDing0 - Load area center object from Ding0 - Returns - ------- - """ - for s in ['retail', 'industrial', 'agricultural', 'residential']: - if s not in aggr['load']: - aggr['load'][s] = {} - - for t in ['nominal','peak']: - if t not in aggr['load'][s]: - aggr['load'][s][t] = 0 - - aggr['load']['retail']['nominal'] += sum( - [_.sector_consumption_retail - for _ in la_center.lv_load_area._lv_grid_districts]) - aggr['load']['industrial']['nominal'] += sum( - [_.sector_consumption_industrial - for _ in la_center.lv_load_area._lv_grid_districts]) - aggr['load']['agricultural']['nominal'] += sum( - [_.sector_consumption_agricultural - for _ in la_center.lv_load_area._lv_grid_districts]) - aggr['load']['residential']['nominal'] += sum( - [_.sector_consumption_residential - for _ in la_center.lv_load_area._lv_grid_districts]) - - aggr['load']['retail']['peak'] += sum( - [_.peak_load_retail - for _ in la_center.lv_load_area._lv_grid_districts]) - aggr['load']['industrial']['peak'] += sum( - [_.peak_load_industrial - for _ in la_center.lv_load_area._lv_grid_districts]) - aggr['load']['agricultural']['peak'] += sum( - [_.peak_load_agricultural - for _ in la_center.lv_load_area._lv_grid_districts]) - aggr['load']['residential']['peak'] += sum( - [_.peak_load_residential - for _ in la_center.lv_load_area._lv_grid_districts]) - - return aggr - - for mv_district in nw.mv_grid_districts(): - - mv_grid_id = mv_district.mv_grid.id_db - mv_grid_id_db = '_'.join( - [str(mv_district.mv_grid.__class__.__name__), 'MV', str(mv_grid_id), str(mv_district.mv_grid.id_db)]) - - if mv_info: - lv_grid_id = 0 - - # MV-grid - # ToDo: geom <- Polygon - mvgrid_idx += 1 - mv_grid_dict[mvgrid_idx] = { - 'MV_grid_id': mv_district.mv_grid.id_db, - 'id_db': '_'.join([str(mv_district.mv_grid.__class__.__name__), 'MV', str(mv_grid_id), - str(mv_district.mv_grid.id_db)]), - # 'network': mv_district.mv_grid.network, - 'geom': wkt_dumps(mv_district.geo_data), - 'population': # None, - sum([_.zensus_sum - for _ in - mv_district._lv_load_areas # ding0_grid.grid_district._lv_load_areas - if not np.isnan(_.zensus_sum)]), - 'voltage_nom': mv_district.mv_grid.v_level, # in kV - 'run_id': run_id - } - - # id_db: Classname_MV/LV_mvgridid/lvgridid_id - # excemptions: class LVStations: LVStationDing0_MV_mvgridid_id(=lvgridid) - - # MVGrid - for node in mv_district.mv_grid.graph_nodes_sorted(): - geom = wkt_dumps(node.geo_data) - # geom = from_shape(Point(node.geo_data), srid=srid) - db_id = node.id_db - - # LVStation - if isinstance(node, LVStationDing0): - if not node.lv_load_area.is_aggregated: - lvstations_idx += 1 - mvlv_stations_dict[lvstations_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), - 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), - 'geom': geom, - 'run_id': run_id, - } - - # LV-MV mapping - LVMVmapping_idx += 1 - mvlv_mapping_dict[LVMVmapping_idx] = { - 'MV_grid_id': mv_grid_id, - 'MV_grid_id_db': mv_grid_id_db, - 'LV_grid_id': node.id_db, - 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), - 'run_id': run_id, - } - - # Trafos LV - for t in node.transformers(): - lvtrafos_idx += 1 - mvlv_trafos_dict[lvtrafos_idx] = { - 'id_db': '_'.join([str(t.__class__.__name__), 'LV', str(mv_grid_id), str(node.id_db)]), - 'geom': geom, - 'LV_grid_id_db': '_'.join(['LVGridDing0', 'LV', str(node.id_db), str(node.id_db)]), - 'voltage_op': t.v_level, - 'S_nom': t.s_max_a, - 'X': t.x, - 'R': t.r, - 'run_id': run_id, - } - - # MVStation - elif isinstance(node, MVStationDing0): - mvstations_idx += 1 - hvmv_stations_dict[mvstations_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), - 'MV_grid_id_db': mv_grid_id_db, - 'geom': geom, - 'run_id': run_id, - } - - # Trafos MV - for t in node.transformers(): - mvtrafos_idx += 1 - hvmv_trafos_dict[mvtrafos_idx] = { - 'id_db': '_'.join([str(t.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), - 'geom': geom, - 'MV_grid_id_db': mv_grid_id_db, - 'voltage_op': t.v_level, - 'S_nom': t.s_max_a, - 'X': t.x, - 'R': t.r, - 'run_id': run_id, - } - - # MVGenerator - elif isinstance(node, GeneratorDing0): - if node.subtype == None: - subtype = 'other' - else: - subtype = node.subtype - type = node.type - mvgen_idx += 1 - mv_gen_dict[mvgen_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), - 'MV_grid_id_db': mv_grid_id_db, - 'geom': geom, - 'type': type, - 'subtype': subtype, - 'v_level': node.v_level, - 'nominal_capacity': node.capacity, - 'run_id': run_id, - 'is_aggregated': False, - } - - # MVBranchTees - elif isinstance(node, MVCableDistributorDing0): - mvcd_idx += 1 - mv_cd_dict[mvcd_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), - 'MV_grid_id_db': mv_grid_id_db, - 'geom': geom, - 'run_id': run_id, - } - - # LoadAreaCentre - elif isinstance(node, LVLoadAreaCentreDing0): - - # type = 'Load area center of aggregated load area' - - areacenter_idx += 1 - aggr_lines = 0 - - aggr = {'generation': {}, 'load': {}, 'aggregates': []} - - # Determine aggregated generation in LV grid - for lvgd in node.lv_load_area._lv_grid_districts: - - for aggr_gen in lvgd.lv_grid.generators(): - aggr = aggregate_generators(aggr_gen, aggr) - - if aggr_gen.subtype == None: - subtype = 'other' - else: - subtype = aggr_gen.subtype - type = aggr_gen.type - - # Determine aggregated load in MV grid - # -> Implement once loads in Ding0 MV grids exist - - # Determine aggregated load in LV grid - aggr = aggregate_loads(node, aggr) - - # Collect metadata of aggregated load areas - aggr['aggregates'] = { - 'population': node.lv_load_area.zensus_sum, - 'geom': node.lv_load_area.geo_area} - aggr_line_type = nw._static_data['MV_cables'].iloc[ - nw._static_data['MV_cables']['I_max_th'].idxmax()] - geom = wkt_dumps(node.lv_load_area.geo_area) - - for aggr_node in aggr: - if aggr_node == 'generation': - mvgenaggr_idx = 0 - - for v_level in aggr['generation']: - for type in aggr['generation'][v_level]: - for subtype in aggr['generation'][v_level][type]: - mvgen_idx += 1 - mvgenaggr_idx += 1 - mv_gen_dict[mvgen_idx] = { - 'id_db': '_'.join( - [str(aggr_gen.__class__.__name__), 'MV', str(mv_grid_id), - str(aggr_gen.id_db), str(mvgenaggr_idx)]), # , str(mvgen_idx) - 'MV_grid_id_db': mv_grid_id_db, - 'geom': geom,#from_shape(Point(mv_district.mv_grid.station().geo_data), srid=srid),#lv_load_area.geo_area,#geom, #?? Polygon # hvmv_stations_dict[mvstations_idx]['geom'], # - 'type': type, - 'subtype': subtype, - 'v_level': v_level, - 'nominal_capacity': aggr['generation'][v_level][type][subtype]['capacity'], - 'is_aggregated': True, - 'run_id': run_id, - } - - lines_idx += 1 - aggr_lines += 1 - lines_dict[lines_idx] = { - # ToDo: Rename edge_name - 'edge_name': '_'.join( - [str(mv_grid_id), 'aggr', str(node.lv_load_area.id_db), - str(aggr_lines)]), - # , 'vlevel', str(v_level), 'subtype', str(subtype)]),#}'.format(v_level=v_level, subtype=subtype), - 'grid_id_db': mv_grid_id_db, - # ToDo: read type_name from aggr_line_type - 'type_name': 'NA2XS2Y 3x1x500 RM/35', # aggr_line_type.name, - 'type_kind': 'cable', # branch['branch'].kind, - 'length': 1, - 'U_n': aggr_line_type.U_n, - 'I_max_th': aggr_line_type.I_max_th, - 'R': aggr_line_type.R, - 'L': aggr_line_type.L, - 'C': aggr_line_type.C, - 'node1': '_'.join( - [str(aggr_gen.__class__.__name__), 'MV', str(mv_grid_id), - str(aggr_gen.id_db), str(mvgenaggr_idx)]), - 'node2': '_'.join([ - 'MVStationDing0', 'MV', str(mv_grid_id), str(mv_grid_id)]), - 'run_id': run_id, - } - - elif aggr_node == 'load': - for type in aggr['load']: - mvloads_idx += 1 - mv_loads_dict[mvloads_idx] = { - 'id_db': '_'.join( - ['AggregatedLoad', 'MV', str(mv_grid_id), str(mvloads_idx)]), - 'MV_grid_id_db': mv_grid_id_db, - 'geom': geom, - # from_shape(Point(mv_district.mv_grid.station().geo_data), srid=srid), - 'consumption_{}'.format(type): aggr['load'][type]['nominal'], - 'is_aggregated': True, - 'run_id': run_id, - } - - lines_idx += 1 - aggr_lines += 1 - lines_dict[lines_idx] = { - # ToDo: Rename edge_name - 'edge_name': '_'.join( - [str(mv_grid_id), 'aggr', str(node.lv_load_area.id_db), str(aggr_lines)]), - # 'edge_name': '_'.join( - # ['line_aggr_load', str(node.lv_load_area), 'vlevel', str(v_level), - # 'subtype', str(subtype)]), # }'.format(v_level=v_level, subtype=subtype), - 'grid_id_db': mv_grid_id_db, - # ToDo: read type_name from aggr_line_type - 'type_name': 'NA2XS2Y 3x1x500 RM/35', # aggr_line_type.name, - 'type_kind': 'cable', # branch['branch'].kind, - # 'type': aggr_line_type, - 'length': 1e-3, # in km - 'U_n': aggr_line_type.U_n, - 'I_max_th': aggr_line_type.I_max_th, - 'R': aggr_line_type.R, - 'L': aggr_line_type.L, - 'C': aggr_line_type.C, - 'node1': '_'.join( - ['AggregatedLoad', 'MV', str(mv_grid_id), str(mvloads_idx)]), - 'node2': '_'.join([ - 'MVStationDing0', 'MV', str(mv_grid_id), str(mv_grid_id)]), - 'run_id': run_id, - } - - # areacenter_dict[areacenter_idx] = { - # 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]),#node.id_db, - # 'MV_grid_id':node.grid, - # 'geom':node.geo_data, - # 'lv_load_area': node.lv_load_area, - # 'run_id': run_id,# - - # } - - # DisconnectingPoints - elif isinstance(node, CircuitBreakerDing0): - mvcb_idx += 1 - mvcb_dict[mvcb_idx] = { - 'id_db': '_'.join([str(node.__class__.__name__), 'MV', str(mv_grid_id), str(node.id_db)]), - 'MV_grid_id': mv_grid_id, - 'MV_grid_id_db': mv_grid_id_db, - 'geom': geom, - 'status': node.status, - 'run_id': run_id, - } - else: - type = 'Unknown' - - # MVedges - for branch in mv_district.mv_grid.graph_edges(): - # geom = wkt_dumps(node.geo_data) - geom = from_shape(LineString([branch['adj_nodes'][0].geo_data, branch['adj_nodes'][1].geo_data]), - srid=srid) - if not any([isinstance(branch['adj_nodes'][0], LVLoadAreaCentreDing0), - isinstance(branch['adj_nodes'][1], LVLoadAreaCentreDing0)]): - lines_idx += 1 - lines_dict[lines_idx] = { - 'edge_name': branch['branch'].id_db, - 'grid_id_db': mv_grid_id_db, - 'type_name': branch['branch'].type['name'], - 'type_kind': branch['branch'].kind, - 'length': branch['branch'].length / 1e3, - 'U_n': branch['branch'].type['U_n'], - 'I_max_th': branch['branch'].type['I_max_th'], - 'R': branch['branch'].type['R'], - 'L': branch['branch'].type['L'], - 'C': branch['branch'].type['C'], - 'node1': '_'.join([str(branch['adj_nodes'][0].__class__.__name__), 'MV', str(mv_grid_id), - str(branch['adj_nodes'][0].id_db)]), - 'node2': '_'.join([str(branch['adj_nodes'][1].__class__.__name__), 'MV', str(mv_grid_id), - str(branch['adj_nodes'][1].id_db)]), - 'run_id': run_id, - } - - if lv_info: - for LA in mv_district.lv_load_areas(): - for lv_district in LA.lv_grid_districts(): - if not lv_district.lv_grid.grid_district.lv_load_area.is_aggregated: - - # ding0_grid.grid_district._lv_load_areas._lv_grid_districts _.lv_grid - # LV-grid - # ToDo: geom <- Polygon - lvgrid_idx += 1 - lv_grid_dict[lvgrid_idx] = { - 'LV_grid_id': lv_district.lv_grid.id_db, - 'id_db': '_'.join( - [str(lv_district.lv_grid.__class__.__name__), 'LV', str(lv_district.lv_grid.id_db), - str(lv_district.lv_grid.id_db)]), - 'geom': wkt_dumps(lv_district.geo_data), - 'population': lv_district.population, - 'voltage_nom': lv_district.lv_grid.v_level / 1e3, - 'run_id': run_id - } - - lv_grid_id = lv_district.lv_grid.id_db - lv_grid_id_db = '_'.join( - [str(lv_district.lv_grid.__class__.__name__), 'LV', str(lv_district.lv_grid.id_db), - str(lv_district.lv_grid.id_db)]) - - # geom = from_shape(Point(lv_district.lv_grid.station().geo_data), srid=srid) - # geom = wkt_dumps(lv_district.geo_data)# lv_grid.station() #ding0_lv_grid.grid_district.geo_data - for node in lv_district.lv_grid.graph_nodes_sorted(): - # geom = wkt_dumps(node.geo_data) - - # LVGenerator - if isinstance(node, GeneratorDing0): - if node.subtype == None: - subtype = 'other' - else: - subtype = node.subtype - type = node.type - lvgen_idx += 1 - lv_gen_dict[lvgen_idx] = { - 'id_db': '_'.join( - [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), - 'LV_grid_id_db': lv_grid_id_db, - 'geom': wkt_dumps(node.geo_data), - 'type': type, - 'subtype': subtype, - 'v_level': node.v_level, - 'nominal_capacity': node.capacity, - 'run_id': run_id, - } - - # LVcd - elif isinstance(node, LVCableDistributorDing0): - lvcd_idx += 1 - lv_cd_dict[lvcd_idx] = { - 'id_db': '_'.join( - [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), - 'LV_grid_id_db': lv_grid_id_db, - 'geom': None, - # wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? - 'run_id': run_id, - } - - # LVload - elif isinstance(node, LVLoadDing0): - consumption_dict = {} - for k in ['residential', 'retail', 'agricultural', 'industrial']: - if k in node.consumption.keys(): - consumption_dict[k] = node.consumption[k] - else: - consumption_dict[k] = None - lvloads_idx += 1 - lv_loads_dict[lvloads_idx] = { - 'id_db': '_'.join( - [str(node.__class__.__name__), 'LV', str(lv_grid_id), str(node.id_db)]), - 'LV_grid_id_db': lv_grid_id_db, - 'geom': None,#wkt_dumps(lv_district.geo_data),#wkt_dumps(node.geo_data), Todo: why no geo_data? - # 'consumption': json.dumps(node.consumption), - 'consumption_residential': consumption_dict['residential'], - 'consumption_retail': consumption_dict['retail'], - 'consumption_agricultural': consumption_dict['agricultural'], - 'consumption_industrial': consumption_dict['industrial'], - 'run_id': run_id, - } - del consumption_dict - - else: - type = 'Unknown' - - # LVedges - for branch in lv_district.lv_grid.graph_edges(): - # geom = from_shape( - # LineString([branch['adj_nodes'][0].geo_data, branch['adj_nodes'][1].geo_data]), srid=srid) - if not any([isinstance(branch['adj_nodes'][0], LVLoadAreaCentreDing0), - isinstance(branch['adj_nodes'][1], LVLoadAreaCentreDing0)]): - lines_idx += 1 - lines_dict[lines_idx] = { - 'edge_name': branch['branch'].id_db, - 'grid_id_db': lv_grid_id_db, - 'type_name': branch['branch'].type.to_frame().columns[0], - 'type_kind': branch['branch'].kind, - 'length': branch['branch'].length / 1e3, # length in km - 'U_n': branch['branch'].type['U_n'] / 1e3, # U_n in kV - 'I_max_th': branch['branch'].type['I_max_th'], - 'R': branch['branch'].type['R'], - 'L': branch['branch'].type['L'], - 'C': branch['branch'].type['C'], - 'node1': '_'.join( - [str(branch['adj_nodes'][0].__class__.__name__), 'LV', str(lv_grid_id), - str(branch['adj_nodes'][0].id_db)]) - if not isinstance(branch['adj_nodes'][0], LVStationDing0) else '_'.join( - [str(branch['adj_nodes'][0].__class__.__name__), 'MV', str(mv_grid_id), - str(branch['adj_nodes'][0].id_db)]), - 'node2': '_'.join( - [str(branch['adj_nodes'][1].__class__.__name__), 'LV', str(lv_grid_id), - str(branch['adj_nodes'][1].id_db)]) - if not isinstance(branch['adj_nodes'][1], LVStationDing0) else '_'.join( - [str(branch['adj_nodes'][1].__class__.__name__), 'MV', str(mv_grid_id), - str(branch['adj_nodes'][1].id_db)]), - 'run_id': run_id, - } - - lv_grid = pd.DataFrame.from_dict(lv_grid_dict, orient='index') - lv_gen = pd.DataFrame.from_dict(lv_gen_dict, orient='index') - lv_cd = pd.DataFrame.from_dict(lv_cd_dict, orient='index') - mvlv_stations = pd.DataFrame.from_dict(mvlv_stations_dict, orient='index') - mvlv_trafos = pd.DataFrame.from_dict(mvlv_trafos_dict, orient='index') - lv_loads = pd.DataFrame.from_dict(lv_loads_dict, orient='index') - mv_grid = pd.DataFrame.from_dict(mv_grid_dict, orient='index') - mv_gen = pd.DataFrame.from_dict(mv_gen_dict, orient='index') - # mv_cb = pd.DataFrame.from_dict(mvcb_dict, orient='index') - mv_cd = pd.DataFrame.from_dict(mv_cd_dict, orient='index') - hvmv_stations = pd.DataFrame.from_dict(hvmv_stations_dict, orient='index') - # mv_areacenter= pd.DataFrame.from_dict(areacenter_dict, orient='index') - hvmv_trafos = pd.DataFrame.from_dict(hvmv_trafos_dict, orient='index') - mv_loads = pd.DataFrame.from_dict(mv_loads_dict, orient='index') - lines = pd.DataFrame.from_dict(lines_dict, orient='index') - mvlv_mapping = pd.DataFrame.from_dict(mvlv_mapping_dict, orient='index') - - lines = lines[sorted(lines.columns.tolist())] - - return run_id, lv_grid, lv_gen, lv_cd, mvlv_stations, mvlv_trafos, lv_loads, mv_grid, mv_gen, mv_cd, \ - hvmv_stations, hvmv_trafos, mv_loads, lines, mvlv_mapping # mv_areacenter, - - -####################################################### - -def export_data_tocsv(path, run_id, lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, mv_grid, mv_gen, mv_cb, - mv_cd, mv_stations, hvmv_trafos, mv_loads, lines, mapping): - # Exports data to csv - def export_network_tocsv(path, table, tablename): - return table.to_csv(''.join([path, '/', run_id, '/', tablename, '.csv']), ';') - - export_network_tocsv(path, lv_grid, 'lv_grid') - export_network_tocsv(path, lv_gen, 'lv_generator') - export_network_tocsv(path, lv_cd, 'lv_branchtee') - export_network_tocsv(path, lv_stations, 'lvmv_station') - export_network_tocsv(path, mvlv_trafos, 'lv_transformer') - export_network_tocsv(path, lv_loads, 'lv_load') - export_network_tocsv(path, mv_grid, 'mv_grid') - export_network_tocsv(path, mv_gen, 'mv_generator') - export_network_tocsv(path, mv_cd, 'mv_branchtee') - export_network_tocsv(path, mv_stations, 'mvhv_station') - export_network_tocsv(path, hvmv_trafos, 'mv_transformer') - export_network_tocsv(path, mv_cb, 'mv_circuitbreaker') - export_network_tocsv(path, mv_loads, 'mv_load') - export_network_tocsv(path, lines, 'line') - export_network_tocsv(path, mapping, 'mvlv_mapping') - # export_network_tocsv(path, areacenter, 'areacenter') - - -######################################################## - -from sqlalchemy import create_engine -from egoio.db_tables import model_draft as md - - -def export_network_to_oedb(session, table, tabletype, srid): - dataset = [] - engine = create_engine("sqlite:///myexample.db") - print("Exporting table type : {}".format(tabletype)) - if tabletype == 'lines': - table.apply(lambda row: - session.add(md.EgoGridDing0Line( - run_id=row['run_id'], - edge_name=row['edge_name'], - grid_id_db=row['grid_id_db'], - node1=row['node1'], - node2=row['node2'], - type_kind=row['type_kind'], - type_name=row['type_name'], - length=row['length'], - U_n=row['U_n'], - C=row['C'], - L=row['L'], - R=row['R'], - I_max_th=row['I_max_th'], - )) - , axis=1) - - elif tabletype == 'lv_cd': - table.apply(lambda row: - session.add(md.EgoGridDing0LvBranchtee( - run_id=row['run_id'], - id_db=row['id_db'], - lv_grid_id_db=row['LV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - )) - , axis=1) - - elif tabletype == 'lv_gen': - table.apply(lambda row: - session.add(md.EgoGridDing0LvGenerator( - run_id=row['run_id'], - id_db=row['id_db'], - lv_grid_id_db=str(row['LV_grid_id_db']), - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - type=row['type'], - subtype=row['subtype'], - v_level=row['v_level'], - nominal_capacity=row['nominal_capacity'], - )) - , axis=1) - - elif tabletype == 'lv_loads': - table.apply(lambda row: - session.add(md.EgoGridDing0LvLoad( - run_id=row['run_id'], - id_db=row['id_db'], - lv_grid_id_db=row['LV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - consumption_residential=row['consumption_residential'], - consumption_retail=row['consumption_retail'], - consumption_agricultural=row['consumption_agricultural'], - consumption_industrial=row['consumption_industrial'], - )) - , axis=1) - - elif tabletype == 'lv_grid': - table.apply(lambda row: - session.add(md.EgoGridDing0LvGrid( - run_id=row['run_id'], - id_db=row['id_db'], - lv_grid_id=row['LV_grid_id'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - population=row['population'], - voltage_nom=row['voltage_nom'], - )) - , axis=1) - - elif tabletype == 'mvlv_stations': - table.apply(lambda row: - session.add(md.EgoGridDing0MvlvStation( - run_id=row['run_id'], - id_db=row['id_db'], - lv_grid_id_db=row['LV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - )) - , axis=1) - - elif tabletype == 'mvlv_trafos': - table.apply(lambda row: - session.add(md.EgoGridDing0MvlvTransformer( - run_id=row['run_id'], - id_db=row['id_db'], - lv_grid_id_db=row['LV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - voltage_op=row['voltage_op'], - S_nom=row['S_nom'], - X=row['X'], - R=row['R'], - )) - , axis=1) - - elif tabletype == 'mvlv_mapping': - table.apply(lambda row: - session.add(md.EgoGridDing0MvlvMapping( - run_id=row['run_id'], - lv_grid_id=row['LV_grid_id'], - lv_grid_id_db=row['LV_grid_id_db'], - mv_grid_id=row['MV_grid_id'], - mv_grid_id_db=row['MV_grid_id_db'], - )) - , axis=1) - - elif tabletype == 'mv_cd': - table.apply(lambda row: - session.add(md.EgoGridDing0MvBranchtee( - run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id_db=row['MV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - )) - , axis=1) - - elif tabletype == 'mv_gen': - table.apply(lambda row: - session.add(md.EgoGridDing0MvGenerator( - run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id_db=row['MV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - type=row['type'], - subtype=row['subtype'], - v_level=row['v_level'], - nominal_capacity=row['nominal_capacity'], - is_aggregated=row['is_aggregated'], - )) - , axis=1) - - elif tabletype == 'mv_loads': - table.apply(lambda row: - session.add(md.EgoGridDing0MvLoad( - run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id_db=row['MV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - is_aggregated=row['is_aggregated'], - consumption_residential=row['consumption_residential'], - consumption_retail=row['consumption_retail'], - consumption_agricultural=row['consumption_agricultural'], - consumption_industrial=row['consumption_industrial'], - )) - , axis=1) - - elif tabletype == 'mv_grid': - table.apply(lambda row: - session.add(md.EgoGridDing0MvGrid( - run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id=row['MV_grid_id'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - population=row['population'], - voltage_nom=row['voltage_nom'], - )) - , axis=1) - - elif tabletype == 'hvmv_stations': - table.apply(lambda row: - session.add(md.EgoGridDing0HvmvStation( - run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id_db=row['MV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - )) - , axis=1) - - elif tabletype == 'hvmv_trafos': - table.apply(lambda row: - session.add(md.EgoGridDing0HvmvTransformer( - run_id=row['run_id'], - id_db=row['id_db'], - mv_grid_id_db=row['MV_grid_id_db'], - geom="SRID={};{}".format(srid, row['geom']) if row['geom'] else None, - voltage_op=row['voltage_op'], - S_nom=row['S_nom'], - X=row['X'], - R=row['R'], - )) - , axis=1) - # if not engine.dialect.has_table(engine, 'ego_grid_mv_transformer'): - # print('helloworld') - - session.commit() - - -def export_data_to_oedb(session, srid, lv_grid, lv_gen, lv_cd, mvlv_stations, mvlv_trafos, lv_loads, mv_grid, mv_gen, - mv_cd, hvmv_stations, hvmv_trafos, mv_loads, lines, mvlv_mapping): - # only for testing - # engine = create_engine('sqlite:///:memory:') - export_network_to_oedb(session, lv_grid, 'lv_grid', srid) - export_network_to_oedb(session, lv_gen, 'lv_gen', srid) - export_network_to_oedb(session, lv_cd, 'lv_cd', srid) - export_network_to_oedb(session, mvlv_stations, 'mvlv_stations', srid) - export_network_to_oedb(session, mvlv_trafos, 'mvlv_trafos', srid) - export_network_to_oedb(session, lv_loads, 'lv_loads', srid) - export_network_to_oedb(session, mv_grid, 'mv_grid', srid) - export_network_to_oedb(session, mv_gen, 'mv_gen', srid) - export_network_to_oedb(session, mv_cd, 'mv_cd', srid) - export_network_to_oedb(session, hvmv_stations, 'hvmv_stations', srid) - export_network_to_oedb(session, hvmv_trafos, 'hvmv_trafos', srid) - export_network_to_oedb(session, mv_loads, 'mv_loads', srid) - export_network_to_oedb(session, lines, 'lines', srid) - export_network_to_oedb(session, mvlv_mapping, 'mvlv_mapping', srid) - - -def create_ding0_db_tables(engine): - tables = [md.EgoGridDing0Line, - md.EgoGridDing0LvBranchtee, - md.EgoGridDing0LvGenerator, - md.EgoGridDing0LvLoad, - md.EgoGridDing0LvGrid, - md.EgoGridDing0MvlvStation, - md.EgoGridDing0MvlvTransformer, - md.EgoGridDing0MvlvMapping, - md.EgoGridDing0MvBranchtee, - md.EgoGridDing0MvGenerator, - md.EgoGridDing0MvLoad, - md.EgoGridDing0MvGrid, - md.EgoGridDing0HvmvStation, - md.EgoGridDing0HvmvTransformer] - - for tab in tables: - tab().__table__.create(bind=engine, checkfirst=True) - - -def drop_ding0_db_tables(engine): - tables = [md.EgoGridDing0Line, - md.EgoGridDing0LvBranchtee, - md.EgoGridDing0LvGenerator, - md.EgoGridDing0LvLoad, - md.EgoGridDing0LvGrid, - md.EgoGridDing0MvlvStation, - md.EgoGridDing0MvlvTransformer, - md.EgoGridDing0MvlvMapping, - md.EgoGridDing0MvBranchtee, - md.EgoGridDing0MvGenerator, - md.EgoGridDing0MvLoad, - md.EgoGridDing0MvGrid, - md.EgoGridDing0HvmvStation, - md.EgoGridDing0HvmvTransformer] - - print("Please confirm that you would like to drop the following tables:") - for n, tab in enumerate(tables): - print("{: 3d}. {}".format(n, tab)) - - print("Please confirm with either of the choices below:\n" + \ - "- yes\n" +\ - "- no\n" +\ - "- the indexes to drop in the format 0, 2, 3, 5") - confirmation = input("Please type the choice completely as there is no default choice.") - if re.fullmatch('[Yy]es', confirmation): - for tab in tables: - tab().__table__.drop(bind=engine, checkfirst=True) - elif re.fullmatch('[Nn]o', confirmation): - print("Cancelled dropping of tables") - else: - try: - indlist = confirmation.split(',') - indlist = list(map(int, indlist)) - print("Please confirm deletion of the following tables:") - tablist = np.array(tables)[indlist].tolist() - for n, tab in enumerate(tablist): - print("{: 3d}. {}".format(n, tab)) - con2 = input("Please confirm with either of the choices below:\n" + \ - "- yes\n" + \ - "- no") - if re.fullmatch('[Yy]es', con2): - for tab in tablist: - tab().__table__.drop(bind=engine, checkfirst=True) - elif re.fullmatch('[Nn]o', con2): - print("Cancelled dropping of tables") - else: - print("The input is unclear, no action taken") - except ValueError: - print("Confirmation unclear, no action taken") - - ######################################################## if __name__ == "__main__": # nw = init_mv_grid(mv_grid_districts=[3544, 3545]) diff --git a/doc/usage_details.rst b/doc/usage_details.rst index 7d7e697e..d1ecff92 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -155,11 +155,455 @@ Parameter Description Unit km_cable Cumulative length of underground cables km ========= ======================================= ==== +Ding0 IO : Ding0 exporter +========================= +Introduction +-------------- +The ding0 exporter provides the ding0 network topology data in a structured format. The main component is the +export.py which transforms the ding0 network to several pandas dataframes. The main purpose is to provide +the Data as table based format using a broadly known technology. The following will explain +the main usage of the exporter.py and introduce its inputs and possible outputs. + +The exporter contains the function export_network(). This function takes three parameters: a network +object, the mode which is currently not implemented, and the run_id. + +Note: +The run_id should be set if the network is not created but imported from pickle files. + +What is the input? +------------------ +Ding0Network can be run for a single or multiple GridDistricts. As mentioned a Ding0Network must be created +from a versioned Ding0 "run" that has been stored in pickle files before. The other options is to initialize +a new version by running ding0 again. The difference will be most obviously be noticed by looking at the run_id. +So the input would the ding0 network and the coherent run_id. + +.. code-block:: python + + # 1. + # example pickle file path + pkl_filepath = "/home/local/user/Ding0/20180823154014" + + # 2. + # choose MV Grid Districts to import, use list of integers + grids = list(range(2, 6)) + + # 3. + # loop over selected grids in the a directory + for grid_no in grids: + try: + nw = load_nd_from_pickle(os.path.join(pkl_filepath, 'ding0_grids__{}.pkl'.format(grid_no))) + except: + print('Something went wrong') + continue + +What is the output? +------------------- +The export_network() function returns a list of nametuples. The nametuple contains 17 elements. They contain all +data of a Ding0Network instance as well as the corresponding metadata. +15 elements store the Ding0Network data and 2 elements contain meta information like the current run_id and +Metadata_json. The metadata_json contains the assumptions that ding0 uses to create the network topology. + +Since Pandas is a Python package that is used very often, the goal was to improve the usability of the IO functionality +of ding0 with it. For further information see Panda's IO. + +.. code-block:: python + + # 4. inside for-loop + # Create 15 pandas dataframes and 2 metadata elements as namedtuple + # use the run_id from the pickle files in this case = 20180823154014 + network = export_network(nw, run_id=pickled_run_id_value) + + # This is the structure of the output: + # available namedtuple + 'Network', + [ + 'run_id', 'metadata_json', 'lv_grid', 'lv_gen', 'lv_cd', 'lv_stations', 'mvlv_trafos', 'lv_loads', + 'mv_grid', 'mv_gen', 'mv_cb', 'mv_cd', 'mv_stations', 'hvmv_trafos', 'mv_loads', 'lines', 'mvlv_mapping' + ] + + +What IO functionality is implemented? +------------------------------------- +Currently the Ding0Network can be serialized as pickle files in python. It can also be stored in +Tables on a relational database as well as saved to CSV files. + +IO settings +----------- +The IO settings are provided within a config file that is located in the ding0/config folder. The file is +named exporter_config.cfg. In the current state it just stores the database schema name that is used as destination +for any data export to a database. The config file is imported as config-object using the package "ConfigObj". +In the future all static options should be stored in this file. + + +Ding0 Table +=========== +In order to export a Pandas dataframe to a database one must create specific tables +first. The table definition and metadata(using string version 1.3: see Ding0 table metadata) is provided within +the module "ding0_db_tables.py". + +The table definition is implemented using SQLAlchemy and can be found here: ding0_sqla_tables_. + +.. _ding0_sqla_tables: https://github.com/openego/ding0/blob/features/stats-export/ding0/io/ding0_db_tables.py + + +Ding0 Table Metadata +-------------------- +The "ding0 metadata" JSON-strings are located in the "metadatastrings" folder within in the "ding0.io" folder. +They are created using the a versioned metadatastring which is under continuous development. The provided Metadata +is using a OEP specific json string in version1.3_. + +.. _version1.3: https://github.com/OpenEnergyPlatform/examples/blob/master/metadata/archiv/oep_metadata_template_v1.3.json + + +Table specification +------------------- +In the following a short description is given, which covers the structure of all tables. Note that all tables own +the run_id column form the versioning table as foreignKey. All tables depend on the same value in run_id. + +The database schema is selected based on the topic for which the data provides information. + +Note: The following tables can be generated automatically from a metadata string. +To create the tables this script_ is provided. + +.. _script: https://gist.github.com/gplssm/63f11276387875763f2bbc7f9a5fdb8f + +**OpenEnergyDatabase schema name: grid.tablename** + +versioning +---------- +=========== ================================= ============== +name description unit +=========== ================================= ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +description Used parameters for this run string +=========== ================================= ============== + +line +---- +========= ==================================================================================== ================ +name description unit +========= ==================================================================================== ================ +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge integer +edge_name unambiguous name of edge string +grid_name unambiguous name of grid string +node1 id_db of first node string +node2 id_db of second node string +type_kind n/a string +type_name n/a string +length length of line as float km +u_n nominal voltage as float kV +c inductive resistance at 50Hz as float uF/km +l stored as float mH/km +r stored as float Ohm/km +i_max_th stored as float A +geom geometric coordinates WGS84 LINESTRING +========= ==================================================================================== ================ + +lv_branchtee +----------------- +====== ========================================================================= ============== +name discription unit +====== ========================================================================= ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +geom geometric coordinates WGS84 POINT +id_db unambiguous number of LV-Grid integer +name unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber string +====== ========================================================================= ============== + +lv_generator +----------------- +================ ==================================================================== ============== +name description unit +================ ==================================================================== ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of LV-Grid integer +la_id FIXME integer +name unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#' string +lv_grid_id unambiguous id_db of LV-Grid integer +geom geometric coordinates WGS84, POINT +type type of generation {solar; biomass} string +subtype subtype of generation: {solar_roof_mounted, unknown; biomass} string +v_level voltage level of generator as integer FIXME +nominal_capacity nominal capacity as float FIXME +is_aggregated True if load is aggregated load, else False boolean +weather_cell_id unambiguous number of the corresponding weather cell integer +================ ==================================================================== ============== + +lv_grid +------------ +=========== ======================================================= ================== +name description unit +=========== ======================================================= ================== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of LV-Grid integer +name unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid# string +geom geometric coordinates WGS84 MULTIPOLYGON +population population in LV-Grid integer +voltage_nom voltage level of grid as float kV +=========== ======================================================= ================== + +lv_load +------------ +=========== ================================================================================== ============== +name description unit +=========== ================================================================================== ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of LV-Grid integer +name unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#' string +lv_grid_id unambiguous id_db of LV-Grid integer +geom geometric coordinates WGS84 POINT +consumption type of load {residential, agricultural, industrial} and corresponding consumption string +=========== ================================================================================== ============== + +lv_station +--------------- +====== =================================================== =========== +name description unit +====== =================================================== =========== +id unambiguous unique numer integer +run_id time and date of table generation in yyyyMMddhhmmss integer +id_db unambiguous number of LV-Grid integer +geom geometric coordinates WGS84 POINT +name FIXME string +====== =================================================== =========== + +mv_branchtee +----------------- +====== =========================================================================== ============== +name description unit +====== =========================================================================== ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of MV-Grid integer +geom geometric coordinates WGS84 POINT +name unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#' string +====== =========================================================================== ============== + +mv_circuitbreaker +----------------- +====== ================================= ============== +name description unit +====== ================================= ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of MV-Grid integer +geom geometric coordinates WGS84 POINT +name FIXME string +status FIXME string +====== ================================= ============== + +mv_generator +------------ +================ =========================================================================================== ============== +name description unit +================ =========================================================================================== ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of MV-Grid integer +name unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#' string +geom geometric coordinates WGS84 POINT +type type of generation: {solar; biomass} string +subtype subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas} string +v_level voltage level of generator as integer FIXME +nominal_capacity nominal capacity as float FIXME +weather_cell_id unambiguous number of the corresponding weather cell integer +is_aggregated True if load is aggregated load, else False boolean +================ =========================================================================================== ============== + +mv_grid +------- +=========== ======================================================== ================== +name description unit +=========== ======================================================== ================== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of MV-Grid integer +geom geometric coordinates WGS84 MULTIPOLYGON +name unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#' string +population population in MV-Grid integer +voltage_nom voltage level of grid as float kV +=========== ======================================================== ================== + +mv_load +------- +============= ========================================================================================== ============== +name description unit +============= ========================================================================================== ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +name unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#' string +geom geometric coordinates WGS84 GEOMETRY +is_aggregated True if load is aggregated load, else False boolean +consumption type of load {retail, residential, agricultural, industrial} and corresponding consumption string +============= ========================================================================================== ============== + +mv_station +---------- +====== ========================================================== ============== +name description unit +====== ========================================================== ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +id_db unambiguous number of MV-Grid integer +geom geometric coordinates WGS84 POINT +name unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid# string +====== ========================================================== ============== + +mvlv_mapping +------------ +============ ======================================================== ======= +name description unit +============ ======================================================== ======= +id unambiguous unique numer integer +run_id time and date of table generation in yyyyMMddhhmmss integer +lv_grid_id unambiguous number of LV-Grid integer +lv_grid_name unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#' string +mv_grid_id unambiguous number of MV-Grid integer +mv_grid_name unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#' string +============ ======================================================== ======= + +mvlv_transformer +---------------- +========== =================================================== =========== +name description unit +========== =================================================== =========== +id unambiguous unique numer integer +run_id time and date of table generation in yyyyMMddhhmmss integer +id_db unambiguous number of LV-Grid integer +geom geometric coordinates WGS84 POINT +name FIXME string +voltage_op as float kV +s_nom nominal apparent power as float kVA +x as float Ohm +r as float Ohm +========== =================================================== =========== + +hvmv_transformer +--------------------- +========== ================================= ============== +name description unit +========== ================================= ============== +id unambiguous unique numer integer +run_id time and date of table generation yyyyMMddhhmmss +geom geometric coordinates WGS84 POINT +name FIXME string +voltage_op FIXME float +s_nom nominal apparent power as float kVA +x as float Ohm +r as float Ohm +========== ================================= ============== + + +Export ding0 to database +========================= + +Database export +--------------- +This exporter depends on existing tables as described in chapter "Ding0 Table". +The functionality for this module is implemented in db_export.py_ . This module provides functionality to establish +a database connection, create the tables, drop the tables, as well as change the database specific owner for each table. +The core functionality is the data export. This is implemented using Pandas dataframes and a provided Pandas.IO +functionality. + +Note: The export to a Database will take a lot of time (about 1 Week). The reason for this is the amount of the data +provided by ding0. Therefore it is not recommended to export all 3608 available GridDistricts at once. This could be +error prone caused by connection timeout or similar reasons. We work on speeding up the export in the future. + +.. _db_export.py: https://github.com/openego/ding0/blob/features/stats-export/ding0/io/db_export.py + +Usage +----- +The module is implemented as a command line based script. To run the script one needs to be able to create a +ding0 network by using pickle files or by using a new ding0 run as mentioned before. The ding0 network is used as +input for the export_network function which returns the pandas dataframes as nametupels(see Ding0 IO : Ding0 exporter). + +.. code-block:: python + + # db_export.py + + # create ding0 Network instance + nw = NetworkDing0(name='network') + + #geo. ref. sys. e.g. 4326==WGS84 + SRID = int(nw.config['geo']['srid']) + + # choose MV Grid Districts to import, use list of integers + mv_grid_districts = list(range(2, 6)) + + # run DING0 on selected MV Grid District + nw.run_ding0(session=session, + mv_grid_districts_no=mv_grid_districts) + + # return values from export_network() as tupels + network = export_network(nw) + + +Before the data can be exported, the tables must be created in the database. + +.. code-block:: python + + # db_export.py + + # Create Tables + # Creates all defined tables from "ding0_db_tables.py" + create_ding0_sql_tables(oedb_engine, SCHEMA) + + # Delete only the created tables + drop_ding0_db_tables(oedb_engine) + + # change the owner (open-energy-database specific function) + db_tables_change_owner(oedb_engine, SCHEMA) + +The nametupels are one of the parameters that are input for the export functionality. Other Inputs are a valid +connection to the Open Energy Platform (the tcp based connection is used here), the schema name that specifies the +destination on the database as well as the srid. + +.. code-block:: python + + # db_export.py + + # establish database connection and SQLAlchemy session + # one need a database user (OEP-API is not supported yet) + oedb_engine = connection(section='oedb') + session = sessionmaker(bind=oedb_engine)() + + # Set the Database schema which you want to add the tables + # Configure the SCHEMA in config file located in: ding0/config/exporter_config.cfg . + SCHEMA = exporter_config['EXPORTER_DB']['SCHEMA'] + + # Export all Dataframes returned form export_network(nw) to DB + # example: export_all_dataframes_to_db(oedb_engine, my_schema_name, network=ding0_network_tuples, srid=4326) + export_all_dataframes_to_db(oedb_engine, SCHEMA, network=network, srid=SRID) CSV file export =============== -Ding0 objects are exported in csv files. +Ding0 objects can be exported in csv files. The functionality is provided by Pandas.IO. + +Usage +----- +The export functionality is implemented here: file_export_. + +The CSV exporter is used as a command line script. Its core functionality is realized by using Pandas io functions. + +To use the CSV exporter, you need to specify the specific destination folder in the script to specify where the +generated CSV files should be stored. The operator must also specify the range of grid districts to be exported. + +The prerequisite is that you are able to create (or have access to) a ding0 network from pickle files or from the +actual ding0 run process. The current state of the script assumes that you are using pickle files to create +the ding0 network. + +.. _file_export: https://github.com/openego/ding0/blob/features/stats-export/ding0/io/file_export.py + +CSV Table specification +----------------------- Lines ----- @@ -167,19 +611,22 @@ Lines :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge", "n/a" "edge_name", "str", "unambiguous name of edge", "n/a" - "grid_id_db", "int","unambiguous id_db of corresponding grid (MVgrid-id if MV-edge, LVgrid-id if LV-edge", "n/a" - "type_kind","str","","n/a" - "type_name","str","","n/a" + "grid_name", "str", "unambiguous name of grid", "n/a" "node1","str","id_db of first node","n/a" "node2","str","id_db of second node","n/a" + "type_kind","str","","n/a" + "type_name","str","","n/a" "length","float","length of line","km" - "U_n","float","nominal voltage","kV" - "R","float","","Ohm/km" - "C","float","inductive resistance at 50Hz","uF/km" - "L","float","","mH/km" - "I_max_th","float","","A" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "u_n","float","nominal voltage","kV" + "c","float","inductive resistance at 50Hz","uF/km" + "l","float","","mH/km" + "r","float","","Ohm/km" + "i_max_th","float","","A" + "geom", "None","geometric coordinates", "n/a" + LV-Branchtees -------------- @@ -187,10 +634,11 @@ LV-Branchtees :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" - "geom", "None","geometric coordinates", "n/a" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "geom", "None","geometric coordinates", "WGS 84, POINT" + "name", "str", "unambiguous name: 'LVCableDistributorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + LV-Generators ------------- @@ -198,14 +646,18 @@ LV-Generators :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "la_id", "int", "", "" + "name", "str", "unambiguous name: 'LVGeneratorDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "lv_grid_id", "int","unambiguous id_db of LV-Grid", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POINT" "type","str","type of generation","{solar; biomass}" "subtype","str","subtype of generation: {solar_roof_mounted, unknown; biomass}","n/a" "v_level","int","voltage level of generator","" "nominal_capacity","float","nominal capacity","" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" + "weather_cell_id", "int", "unambiguous number of the corresponding weather cell", "n/a" LV-Grids ----------- @@ -213,12 +665,12 @@ LV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" - "LV_grid_id", "int","unambiguous number of LV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 MULTIPOLYGON" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int", "unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, MULTIPOLYGON" "population","int","population in LV-Grid","?" "voltage_nom","float","voltage level of grid","kV" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" LV-Loads ----------- @@ -226,11 +678,13 @@ LV-Loads :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" - "geom", "None","geometric coordinates", "n/a" - "consumption","{''str'': float}","type of load {residential, agricultural, industrial} and corresponding consumption", "n/a" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int", "unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'LVLoadDing0_LV_#lvgridid#_#ascendingnumber#'", "n/a" + "lv_grid_id", "int","unambiguous id_db of LV-Grid", "n/a" + "geom", "None", "geometric coordinates", "WGS84, POINT" + "consumption","{''str'': float}","type of load {residential, agricultural, industrial} and corresponding consumption", "n/a" + LV-Stations ----------- @@ -238,10 +692,11 @@ LV-Stations :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int", "unambiguous number of LV-Grid", "n/a" + "geom", "wkt", "geometric coordinates", "WGS84, POINT" + "name", "str", "unambiguous name: 'LVStationDing0_MV_#mvgridid#_#lvgridid#'", "n/a" + LV-Transformers ---------------- @@ -249,14 +704,15 @@ LV-Transformers :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'TransformerDing0_LV_#mvgridid#_#lvgridid#'", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'TransformerDing0_LV_#mvgridid#_#lvgridid#'", "n/a" "geom", "wkt","geometric coordinates", "WGS84 POINT" "voltage_op","float","","kV" - "S_nom","float","nominal apparent power","kVA" - "X","float","","Ohm" - "R","float","","Ohm" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "s_nom","float","nominal apparent power","kVA" + "x","float","","Ohm" + "r","float","","Ohm" + LV-Grids ----------- @@ -264,11 +720,12 @@ LV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "LV_grid_id", "int","unambiguous number of LV-Grid", "n/a" - "MV_grid_id", "int","unambiguous number of MV-Grid", "n/a" - "LV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "lv_grid_id", "int","unambiguous number of LV-Grid", "n/a" + "lv_grid_name", "str", "unambiguous name: 'LVGridDing0_LV_#lvgridid#_#lvgridid#'", "n/a" + "mv_grid_id", "int","unambiguous number of MV-Grid", "n/a" + "mv_grid_name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + MV-Branchtees -------------- @@ -276,10 +733,10 @@ MV-Branchtees :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous id_db of MV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVCableDistributorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POINT" MV-Generators -------------- @@ -287,14 +744,17 @@ MV-Generators :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" - "type","str","type of generation: {solar; biomass}","n/a" - "subtype","str","subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}","n/a" - "v_level","int","voltage level of generator","" - "nominal_capacity","float","nominal capacity","" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int", "unambiguous number of MV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVGeneratorDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "geom", "wkt", "geometric coordinates", "WGS84, POINT" + "type", "str", "type of generation: {solar; biomass}", "n/a" + "subtype", "str", "subtype of generation: {solar_ground_mounted, solar_roof_mounted, unknown; biomass, biogas}", "n/a" + "v_level", "int", "voltage level of generator", "" + "nominal_capacity", "float", "nominal capacity", "" + "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" + "weather_cell_id", "int", "unambiguous number of the corresponding weather cell", "n/a" + MV-Grids ----------- @@ -302,12 +762,13 @@ MV-Grids :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "MV_grid_id", "int","unambiguous number of LV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 MULTIPOLYGON" - "population","int","population in LV-Grid","?" - "voltage_nom","float","voltage level of grid","kV" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of MV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVGridDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, MULTIPOLYGON" + "population","int","population in MV-Grid","?" + "voltage_nom","float","voltage level of grid","kV" + MV-Loads ----------- @@ -315,12 +776,13 @@ MV-Loads :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POLYGON" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of MV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVLoadDing0_MV_#mvgridid#_#ascendingnumber#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POLYGON" "consumption","{''str'': float}","type of load {retail, residential, agricultural, industrial} and corresponding consumption","n/a" "is_aggregated", "boolean", "True if load is aggregated load, else False", "n/a" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + MV-Stations ----------- @@ -328,10 +790,11 @@ MV-Stations :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of MV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of MV-Grid", "n/a" + "name", "str", "unambiguous name: 'MVStationDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POINT" + MV-Transformers ---------------- @@ -339,11 +802,11 @@ MV-Transformers :header: "Field","type", "Description", "Unit" :widths: 15, 10, 10, 30 - "id_db", "str", "unambiguous name: 'TransformerDing0_MV_#mvgridid#_#mvgridid#'", "n/a" - "MV_grid_id_db", "int","unambiguous id_db of LV-Grid", "n/a" - "geom", "wkt","geometric coordinates", "WGS84 POINT" + "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" + "id_db", "int","unambiguous number of LV-Grid", "n/a" + "name", "str", "unambiguous name: 'TransformerDing0_MV_#mvgridid#_#mvgridid#'", "n/a" + "geom", "wkt","geometric coordinates", "WGS84, POINT" "voltage_op","float","","kV" - "S_nom","float","nominal apparent power","kVA" - "X","float","","Ohm" - "R","float","","Ohm" - "run_id", "int", "time and date of table generation", "yyyyMMddhhmmss" \ No newline at end of file + "s_nom","float","nominal apparent power","kVA" + "x","float","","Ohm" + "r","float","","Ohm"