diff --git a/README.rst b/README.rst index 3a5f9bb4..e16b97a3 100644 --- a/README.rst +++ b/README.rst @@ -29,13 +29,19 @@ instead part of 'eGo' https://github.com/openego/eGo eTraGo is documented on `readthedocs `_. -.. warning:: - From now on eTraGo depends on a sector coupled data-model. This is not published on - the oedb yet, the data has to be created using - `eGon-data `_. - Not all functions and features are compatible to the sector coupled model yet. - - When you want to use eTraGo for optimizations, please use the latest release 0.8.0. + +Input data +========== +The grid model data for eTraGo was created with the open source tool +`eGon-data `_. The resulting data will +be pubished on the `OpenEnergyPlatform `_. +As long as the data is not published there, a local database is needed. +We published a backup of the required tables and instruction on how to use it +on zenodo: + +.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.8376714.svg + :target: https://doi.org/10.5281/zenodo.8376714 + Installation ============ diff --git a/doc/about.rst b/doc/about.rst index a7b91cf1..ad11ac9a 100644 --- a/doc/about.rst +++ b/doc/about.rst @@ -78,14 +78,6 @@ eGon-data is a further development of the `Data processing `_, `ding0 `_ and `eDisGo `_ and delivers for example data on grid topologies, demands/demand curves and generation capacities in a high spatial resolution. The outputs of egon-data are published under open source and open data licenses. -ego.io -====== - -The ego.io serves as a SQLAlchemy Interface to the OpenEnergy database (oedb). The -oedb table ORM objects are defined here and small helpers for io tasks are contained. -`Learn more here `_. - - Dingo ===== diff --git a/doc/installation.rst b/doc/installation.rst index a62a615a..f10ef989 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -63,9 +63,8 @@ environments. Setup database connection ========================= -The package `ego.io `_ will be installed -automatically when eTraGo is installed. The ``egoio`` -gives you python SQL-Alchemy representations of +The eTraGo module `db `_ +gives you a python SQL-Alchemy representations of the `OpenEnergy-Database(oedb) `_ and access to it by using the `oedialect `_, which is a SQL-Alchemy binding @@ -82,7 +81,7 @@ the oedialect enter the following connection parameter. For and you have to take your credentials which you obtained by registering at `openenergy-platform.org/login `_. -Your API access / login data will be saved in the folder ``.egoio`` in the file +Your API access / login data will be saved in the folder ``.etrago_database`` in the file ``config.ini``. Consequently, in the config.ini you can also change your connection parameters or add new ones. In the following you can see how the config.ini looks like when you use the diff --git a/doc/theoretical_background.rst b/doc/theoretical_background.rst index 6df61b82..62c8e2d9 100644 --- a/doc/theoretical_background.rst +++ b/doc/theoretical_background.rst @@ -39,7 +39,7 @@ With the argument ‘pf_post_lopf’, after the LOPF a non-linear power flow sim Complexity Reduction --------------------- -The data model is characterised by a high spatial (abou 8,000 electrical and 600 gas nodes) and temporal resolution (8,760 timesteps). To reduce the complexity of the resulting optimisation problem, several methods can be applied. +The data model is characterised by a high spatial (about 8,000 electrical and 600 gas nodes) and temporal resolution (8,760 timesteps). To reduce the complexity of the resulting optimisation problem, several methods can be applied. Reduction in spatial dimension: diff --git a/etrago/appl.py b/etrago/appl.py index 699897fe..fde0be7a 100644 --- a/etrago/appl.py +++ b/etrago/appl.py @@ -698,7 +698,7 @@ def run_etrago(args, json_path): # spatial disaggregation # needs to be adjusted for new sectors - # etrago.disaggregation() + etrago.disaggregation() # calculate central etrago results etrago.calc_results() diff --git a/etrago/cluster/disaggregation.py b/etrago/cluster/disaggregation.py index 13c811e1..babd0b3b 100644 --- a/etrago/cluster/disaggregation.py +++ b/etrago/cluster/disaggregation.py @@ -14,9 +14,7 @@ class Disaggregation: - def __init__( - self, original_network, clustered_network, clustering, skip=() - ): + def __init__(self, original_network, clustered_network, busmap, skip=()): """ :param original_network: Initial (unclustered) network structure :param clustered_network: Clustered network used for the optimization @@ -25,11 +23,11 @@ def __init__( """ self.original_network = original_network self.clustered_network = clustered_network - self.clustering = clustering + self.busmap = busmap self.buses = pd.merge( original_network.buses, - self.clustering.busmap.to_frame(name="cluster"), + busmap.to_frame(name="cluster"), left_index=True, right_index=True, ) @@ -280,6 +278,7 @@ def solve(self, scenario, solver): } profile = cProfile.Profile() profile = noops + for i, cluster in enumerate(sorted(clusters)): log.info(f"Decompose {cluster=} ({i + 1}/{n})") profile.enable() @@ -287,6 +286,7 @@ def solve(self, scenario, solver): partial_network, externals = self.construct_partial_network( cluster, scenario ) + profile.disable() self.stats["clusters"].loc[cluster, "decompose"] = time.time() - t log.info( @@ -324,7 +324,9 @@ def solve(self, scenario, solver): ): log.info(f"Attribute sums, {bt}, clustered - disaggregated:") cnb = getattr(self.clustered_network, bt) + cnb = cnb[cnb.carrier != "DC"] onb = getattr(self.original_network, bt) + onb = onb[onb.carrier != "DC"] log.info( "{:>{}}: {}".format( "p_nom_opt", @@ -622,6 +624,7 @@ def solve_partial_network( f" & (bus1 in {index})" ) pnb = pnb.query(query) + assert not pnb.empty or ( # In some cases, a district heating grid is connected to a # substation only via a resistive_heater but not e.g. by a @@ -649,6 +652,13 @@ def solve_partial_network( if pnb.empty: continue + # Exclude DC links from the disaggregation because it does not + # make sense to disaggregated them uniformly. + # A new power flow calculation in the high resolution would + # be required. + if pnb.carrier.iloc[0] == "DC": + continue + if not ( pnb.loc[:, extendable_flag].all() or not pnb.loc[:, extendable_flag].any() @@ -718,7 +728,12 @@ def solve_partial_network( for s in bustypes[bustype]["series"]: if s in self.skip: continue + filtered = pnb.loc[filters.get(s, slice(None))] + + if filtered.empty: + continue + clt = cl_t[s].loc[:, clb.index[0]] weight = reduce( multiply, @@ -744,6 +759,7 @@ def solve_partial_network( ) delta = abs((new_columns.sum(axis=1) - clt).sum()) epsilon = 1e-5 + assert delta < epsilon, ( "Sum of disaggregated time series does not match" f" aggregated timeseries: {delta=} > {epsilon=}." @@ -752,12 +768,23 @@ def solve_partial_network( def transfer_results(self, *args, **kwargs): kwargs["bustypes"] = ["generators", "links", "storage_units", "stores"] - kwargs["series"] = { - "generators": {"p"}, - "links": {"p0", "p1"}, - "storage_units": {"p", "state_of_charge"}, - "stores": {"e", "p"}, - } + + # Only disaggregate reactive power (q) if a pf_post_lopf was performed + # and there is data in resulting q time series + if self.original_network.generators_t.q.empty: + kwargs["series"] = { + "generators": {"p"}, + "links": {"p0", "p1"}, + "storage_units": {"p", "state_of_charge"}, + "stores": {"e", "p"}, + } + else: + kwargs["series"] = { + "generators": {"p", "q"}, + "links": {"p0", "p1"}, + "storage_units": {"p", "q", "state_of_charge"}, + "stores": {"e", "p"}, + } return super().transfer_results(*args, **kwargs) @@ -799,7 +826,7 @@ def update_constraints(network, externals): def run_disaggregation(self): log.debug("Running disaggregation.") - if self.clustering: + if self.args["network_clustering"]["active"]: disagg = self.args.get("disaggregation") skip = () if self.args["pf_post_lopf"]["active"] else ("q",) t = time.time() @@ -808,14 +835,14 @@ def run_disaggregation(self): disaggregation = MiniSolverDisaggregation( self.disaggregated_network, self.network, - self.clustering, + self.busmap, skip=skip, ) elif disagg == "uniform": disaggregation = UniformDisaggregation( original_network=self.disaggregated_network, clustered_network=self.network, - clustering=self.clustering, + busmap=pd.Series(self.busmap["busmap"]), skip=skip, ) diff --git a/etrago/cluster/electrical.py b/etrago/cluster/electrical.py index 4121e43e..d403f190 100755 --- a/etrago/cluster/electrical.py +++ b/etrago/cluster/electrical.py @@ -46,6 +46,7 @@ strategies_generators, strategies_one_ports, ) + from etrago.tools.utilities import set_control_strategies logger = logging.getLogger(__name__) @@ -326,7 +327,6 @@ def cluster_on_extra_high_voltage(etrago, busmap, with_time=True): io.import_series_from_dataframe(network_c, df, "Link", attr) # dealing with generators - network.generators.control = "PV" network.generators["weight"] = 1 new_df, new_pnl = aggregategenerators( @@ -443,8 +443,6 @@ def ehv_clustering(self): if self.args["network_clustering_ehv"]["active"]: logger.info("Start ehv clustering") - self.network.generators.control = "PV" - delete_ehv_buses_no_lines(self.network) busmap = busmap_ehv_clustering(self) @@ -607,6 +605,7 @@ def unify_foreign_buses(etrago): axis=1, ) n_clusters = (foreign_buses_load.country == country).sum() + if n_clusters < len(df): ( busmap_country, @@ -651,13 +650,6 @@ def preprocessing(etrago): network = etrago.network settings = etrago.args["network_clustering"] - # prepare k-mean - # k-means clustering (first try) - network.generators.control = "PV" - network.storage_units.control[ - network.storage_units.carrier == "extendable_storage" - ] = "PV" - # problem our lines have no v_nom. this is implicitly defined by the # connected buses: network.lines["v_nom"] = network.lines.bus0.map(network.buses.v_nom) @@ -1026,7 +1018,10 @@ def run_spatial_clustering(self): None """ if self.args["network_clustering"]["active"]: - self.network.generators.control = "PV" + if self.args["disaggregation"] is not None: + self.disaggregated_network = self.network.copy() + else: + self.disaggregated_network = self.network.copy(with_time=False) elec_network, weight, n_clusters, busmap_foreign = preprocessing(self) @@ -1058,25 +1053,22 @@ def run_spatial_clustering(self): busmap = pd.Series(dtype=str) medoid_idx = pd.Series(dtype=str) - self.clustering, busmap = postprocessing( + clustering, busmap = postprocessing( self, busmap, busmap_foreign, medoid_idx ) self.update_busmap(busmap) - if self.args["disaggregation"] is not None: - self.disaggregated_network = self.network.copy() - else: - self.disaggregated_network = self.network.copy(with_time=False) - - self.network = self.clustering.network + self.network = clustering.network self.buses_by_country() self.geolocation_buses() - self.network.generators.control[ - self.network.generators.control == "" - ] = "PV" + # The control parameter is overwritten in pypsa's clustering. + # The function network.determine_network_topology is called, + # which sets slack bus(es). + set_control_strategies(self.network) + logger.info( "Network clustered to {} buses with ".format( self.args["network_clustering"]["n_clusters_AC"] diff --git a/etrago/cluster/gas.py b/etrago/cluster/gas.py index b25f64e6..cca92bbf 100644 --- a/etrago/cluster/gas.py +++ b/etrago/cluster/gas.py @@ -42,6 +42,7 @@ kmedoids_dijkstra_clustering, sum_with_inf, ) + from etrago.tools.utilities import set_control_strategies logger = logging.getLogger(__name__) @@ -948,7 +949,6 @@ def run_spatial_clustering_gas(self): settings = self.args["network_clustering"] if settings["active"]: - self.network.generators.control = "PV" method = settings["method_gas"] logger.info(f"Start {method} clustering GAS") @@ -1000,6 +1000,11 @@ def run_spatial_clustering_gas(self): self.update_busmap(busmap) + # The control parameter is overwritten in pypsa's clustering. + # The function network.determine_network_topology is called, + # which sets slack bus(es). + set_control_strategies(self.network) + logger.info( """GAS Network clustered to {} DE-buses and {} foreign buses with {} algorithm.""".format( diff --git a/etrago/tools/execute.py b/etrago/tools/execute.py index 5eef60fa..838a66a2 100755 --- a/etrago/tools/execute.py +++ b/etrago/tools/execute.py @@ -804,20 +804,6 @@ def drop_foreign_components(network): network ) - # Assign generators control strategy - ac_bus = network.buses[network.buses.carrier == "AC"] - network.generators.control[ - network.generators.bus.isin(ac_bus.index) - ] = "PV" - network.generators.control[ - network.generators.carrier == "load shedding" - ] = "PQ" - - # Assign storage units control strategy - network.storage_units.control[ - network.storage_units.bus.isin(ac_bus.index) - ] = "PV" - # Find out the name of the main subnetwork main_subnet = str(network.buses.sub_network.value_counts().argmax()) diff --git a/etrago/tools/network.py b/etrago/tools/network.py index 88a1696e..1dfbb231 100644 --- a/etrago/tools/network.py +++ b/etrago/tools/network.py @@ -92,6 +92,7 @@ load_shedding, manual_fixes_datamodel, set_branch_capacity, + set_control_strategies, set_line_costs, set_q_foreign_loads, set_q_national_loads, @@ -405,5 +406,7 @@ def adjust_network(self): self.delete_dispensable_ac_buses() + set_control_strategies(self.network) + def _ts_weighted(self, timeseries): return timeseries.mul(self.network.snapshot_weightings, axis=0) diff --git a/etrago/tools/utilities.py b/etrago/tools/utilities.py index 164b7c9a..47d05c64 100755 --- a/etrago/tools/utilities.py +++ b/etrago/tools/utilities.py @@ -541,7 +541,6 @@ def set_q_foreign_loads(self, cos_phi): ].values * math.tan( math.acos(cos_phi) ) - network.generators.control[network.generators.control == "PQ"] = "PV" # To avoid a problem when the index of the load is the weather year, # the column names were temporarily set to `int` and changed back to @@ -645,6 +644,7 @@ def load_shedding(self, temporal_disaggregation=False, **kwargs): p_nom=p_nom, carrier="load shedding", bus=network.buses.index, + control="PQ", ), index=index, ), @@ -652,6 +652,41 @@ def load_shedding(self, temporal_disaggregation=False, **kwargs): ) +def set_control_strategies(network): + """Sets control strategies for AC generators and storage units + + Parameters + ---------- + network : :class:`pypsa.Network + Overall container of PyPSA + + Returns + ------- + None. + + """ + # Assign generators control strategy + network.generators.loc[:, "control"] = "PV" + + network.generators.loc[ + network.generators.carrier.isin( + [ + "load shedding", + "CH4", + "CH4_biogas", + "CH4_NG", + "central_biomass_CHP_heat", + "geo_thermal", + "solar_thermal_collector", + ] + ), + "control", + ] = "PQ" + + # Assign storage units control strategy + network.storage_units.loc[:, "control"] = "PV" + + def data_manipulation_sh(network): """Adds missing components to run calculations with SH scenarios.