From 7f2430aff10126b77814045f509e579211923a2f Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 16:14:27 +0200 Subject: [PATCH 1/5] delete unused curtailment.py --- edisgo/flex_opt/curtailment.py | 782 --------------------------------- 1 file changed, 782 deletions(-) delete mode 100644 edisgo/flex_opt/curtailment.py diff --git a/edisgo/flex_opt/curtailment.py b/edisgo/flex_opt/curtailment.py deleted file mode 100644 index 484df444..00000000 --- a/edisgo/flex_opt/curtailment.py +++ /dev/null @@ -1,782 +0,0 @@ -import logging - -import pandas as pd - -from pyomo.environ import ( - ConcreteModel, - Constraint, - Objective, - Param, - Set, - Var, - minimize, -) -from pyomo.opt import SolverFactory - -from edisgo.io import pypsa_io - - -def voltage_based( - feedin, generators, curtailment_timeseries, edisgo, curtailment_key, **kwargs -): - """ - Implements curtailment methodology 'voltage-based'. - - ToDo: adapt to refactored code! - - The curtailment that has to be met in each time step is allocated depending - on the exceedance of the allowed voltage deviation at the nodes of the - generators. The higher the exceedance, the higher the curtailment. - - The optional parameter `voltage_threshold` specifies the threshold for the - exceedance of the allowed voltage deviation above which a generator is - curtailed. By default it is set to zero, meaning that all generators at - nodes with voltage deviations that exceed the allowed voltage deviation are - curtailed. Generators at nodes where the allowed voltage deviation is not - exceeded are not curtailed. In the case that the required curtailment - exceeds the weather-dependent availability of all generators with voltage - deviations above the specified threshold, the voltage threshold is lowered - in steps of 0.01 p.u. until the curtailment target can be met. - - Above the threshold, the curtailment is proportional to the exceedance of - the allowed voltage deviation. In order to find the linear relation between - the curtailment and the voltage difference a linear problem is formulated - and solved using the python package pyomo. See documentation for further - information. - - Parameters - ---------- - feedin : :pandas:`pandas.DataFrame` - Dataframe holding the feed-in of each generator in kW for the - technology (and weather cell) specified in `curtailment_key` parameter. - Index of the dataframe is a - :pandas:`pandas.DatetimeIndex`. Columns are the - representatives of the fluctuating generators. - generators : :pandas:`pandas.DataFrame` - Dataframe with all generators of the type (and in weather cell) - specified in `curtailment_key` parameter. See return value of - :func:`edisgo.network.tools.get_gen_info` for more information. - curtailment_timeseries : :pandas:`pandas.Series` - The curtailment in kW to be distributed amongst the generators in - `generators` parameter. Index of the series is a - :pandas:`pandas.DatetimeIndex`. - edisgo : :class:`~.edisgo.EDisGo` - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment is specified for. - voltage_threshold: :obj:`float` - The node voltage below which no curtailment is assigned to the - respective generator if not necessary. Default: 0.0. - solver: :obj:`str` - The solver used to optimize the curtailment assigned to the generator. - Possible options are: - - * 'cbc' - coin-or branch and cut solver - * 'glpk' - gnu linear programming kit solver - * any other available compatible with 'pyomo' like 'gurobi' - or 'cplex' - - Default: 'cbc' - - """ - - raise NotImplementedError - - voltage_threshold = pd.Series( - kwargs.get("voltage_threshold", 0.0), - index=curtailment_timeseries.index, - ) - solver = kwargs.get("solver", "cbc") - combined_analysis = kwargs.get("combined_analysis", False) - - # get the voltages at the generators - if not edisgo.network.pypsa.edisgo_mode: - voltages_lv_gens = edisgo.network.results.v_res( - nodes_df=generators.loc[(generators.voltage_level == "lv")].index, - level="lv", - ) - else: - # if only MV topology was analyzed (edisgo_mode = 'mv') all LV - # generators are assigned the voltage at the corresponding station's - # primary side - lv_gens = generators[generators.voltage_level == "lv"] - voltages_lv_stations = edisgo.network.results.v_res( - nodes_df=[_.station for _ in lv_gens.grid.unique()], level="mv" - ) - voltages_lv_gens = pd.DataFrame() - for lv_gen in lv_gens.index: - voltages_lv_gens[repr(lv_gen)] = voltages_lv_stations[ - repr(lv_gen.grid.station) - ] - voltages_mv_gens = edisgo.network.results.v_res( - nodes_df=generators.loc[(generators.voltage_level == "mv")].index, - level="mv", - ) - voltages_gens = voltages_lv_gens.join(voltages_mv_gens) - - # get allowed voltage deviations from config - if not combined_analysis: - allowed_voltage_dev_mv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_feed-in_case_max_v_deviation"] - allowed_voltage_diff_lv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["lv_feed-in_case_max_v_deviation"] - else: - allowed_voltage_dev_mv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_lv_feed-in_case_max_v_deviation"] - allowed_voltage_diff_lv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_lv_feed-in_case_max_v_deviation"] - - # assign allowed voltage deviation to each generator - if not edisgo.network.pypsa.edisgo_mode: - # for edisgo_mode = None - - # get voltages at stations - grids = list(set(generators.grid)) - lv_stations = [_.station for _ in grids if "LVStation" in repr(_.station)] - voltage_lv_stations = edisgo.network.results.v_res( - nodes_df=lv_stations, level="lv" - ) - voltages_mv_station = edisgo.network.results.v_res( - nodes_df=[edisgo.network.mv_grid.station], level="mv" - ) - voltages_stations = voltage_lv_stations.join(voltages_mv_station) - - # assign allowed voltage deviation - generators["allowed_voltage_dev"] = generators.voltage_level.apply( - lambda _: allowed_voltage_diff_lv if _ == "lv" else allowed_voltage_dev_mv - ) - - # calculate voltage difference from generator node to station - voltage_gens_diff = pd.DataFrame() - for gen in voltages_gens.columns: - station = generators[generators.gen_repr == gen].grid.values[0].station - voltage_gens_diff[gen] = ( - voltages_gens.loc[:, gen] - - voltages_stations.loc[:, repr(station)] - - generators[generators.gen_repr == gen].allowed_voltage_dev.iloc[0] - ) - - else: - # for edisgo_mode = 'mv' - - station = edisgo.network.mv_grid.station - # get voltages at HV/MV station - voltages_station = edisgo.network.results.v_res(nodes_df=[station], level="mv") - - # assign allowed voltage deviation - generators["allowed_voltage_dev"] = allowed_voltage_dev_mv - - # calculate voltage difference from generator node to station - voltage_gens_diff = pd.DataFrame() - for gen in voltages_gens.columns: - voltage_gens_diff[gen] = ( - voltages_gens.loc[:, gen] - - voltages_station.loc[:, repr(station)] - - generators[generators.gen_repr == gen].allowed_voltage_dev.iloc[0] - ) - - # for every time step check if curtailment can be fulfilled, otherwise - # reduce voltage threshold; set feed-in of generators below voltage - # threshold to zero, so that they cannot be curtailed - for ts in curtailment_timeseries.index: - # get generators with voltage higher than threshold - gen_pool = voltage_gens_diff.loc[ - ts, voltage_gens_diff.loc[ts, :] > voltage_threshold.loc[ts] - ].index - # if curtailment cannot be fulfilled lower voltage threshold - while sum(feedin.loc[ts, gen_pool]) < curtailment_timeseries.loc[ts]: - voltage_threshold.loc[ts] = voltage_threshold.loc[ts] - 0.01 - gen_pool = voltage_gens_diff.loc[ - ts, voltage_gens_diff.loc[ts, :] > voltage_threshold.loc[ts] - ].index - # set feed-in of generators below voltage threshold to zero, so that - # they cannot be curtailed - gen_pool_out = voltage_gens_diff.loc[ - ts, voltage_gens_diff.loc[ts, :] <= voltage_threshold.loc[ts] - ].index - feedin.loc[ts, gen_pool_out] = 0 - - # only optimize for time steps where curtailment is greater than zero - timeindex = curtailment_timeseries[curtailment_timeseries > 0].index - if not timeindex.empty: - curtailment = _optimize_voltage_based_curtailment( - feedin, - voltage_gens_diff, - curtailment_timeseries, - voltage_threshold, - timeindex, - solver, - ) - else: - curtailment = pd.DataFrame() - - # set curtailment for other time steps to zero - curtailment = pd.concat( - [ - curtailment, - pd.DataFrame( - 0, - columns=feedin.columns, - index=curtailment_timeseries[curtailment_timeseries <= 0].index, - ), - ] - ) - - # check if curtailment target was met - _check_curtailment_target(curtailment, curtailment_timeseries, curtailment_key) - - # assign curtailment to individual generators - _assign_curtailment(curtailment, edisgo, generators, curtailment_key) - - -def _optimize_voltage_based_curtailment( - feedin, voltage_pu, total_curtailment, voltage_threshold, timeindex, solver -): - """ - Formulates and solves linear problem to find linear relation between - curtailment and node voltage. - - ToDo: adapt to refactored code! - - Parameters - ------------ - feedin : :pandas:`pandas.DataFrame` - See `feedin` parameter in - :func:`edisgo.flex_opt.curtailment.voltage_based` for more information. - voltage_pu : :pandas:`pandas.DataFrame - Dataframe containing voltages in p.u. at the generator nodes. Index - of the dataframe is a :pandas:`pandas.DatetimeIndex`, - columns are the generator representatives. - total_curtailment : :pandas:`pandas.Series` - Series containing the specific curtailment in kW to be allocated to the - generators. The index is a - :pandas:`pandas.DatetimeIndex`. - voltage_threshold : :pandas:`pandas.Series` - Series containing the voltage thresholds in p.u. below which no - generator curtailment will occur. The index is a - :pandas:`pandas.DatetimeIndex`. - solver : :obj:`str` - The solver used to optimize the linear problem. Default: 'cbc'. - - Returns - ------- - :pandas:`pandas.DataFrame` - Dataframe containing the curtailment in kW per generator and time step - feed-in was provided for in `feedin` parameter. Index is a - :pandas:`pandas.DatetimeIndex`, columns are the - generator representatives. - - """ - - raise NotImplementedError - - logging.debug("Start curtailment optimization.") - - v_max = voltage_pu.max(axis=1) - generators = feedin.columns - - # additional curtailment factors - cf_add = pd.DataFrame(index=timeindex) - for gen in generators: - cf_add[gen] = abs( - (voltage_pu.loc[timeindex, gen] - v_max[timeindex]) - / (voltage_threshold[timeindex] - v_max[timeindex]) - ) - - # curtailment factors - cf = pd.DataFrame(index=timeindex) - for gen in generators: - cf[gen] = abs( - (voltage_pu.loc[timeindex, gen] - voltage_threshold[timeindex]) - / (v_max[timeindex] - voltage_threshold[timeindex]) - ) - - # initialize model - model = ConcreteModel() - - # add sets - model.T = Set(initialize=timeindex) - model.G = Set(initialize=generators) - - # add parameters - def feedin_init(model, t, g): - return feedin.loc[t, g] - - model.feedin = Param(model.T, model.G, initialize=feedin_init) - - def voltage_pu_init(model, t, g): - return voltage_pu.loc[t, g] - - model.voltage_pu = Param(model.T, model.G, initialize=voltage_pu_init) - - def cf_add_init(model, t, g): - return cf_add.loc[t, g] - - model.cf_add = Param(model.T, model.G, initialize=cf_add_init) - - def cf_init(model, t, g): - return cf.loc[t, g] - - model.cf = Param(model.T, model.G, initialize=cf_init) - - def total_curtailment_init(model, t): - return total_curtailment.loc[t] - - model.total_curtailment = Param(model.T, initialize=total_curtailment_init) - - # add variables - model.offset = Var(model.T, bounds=(0, 1)) - model.cf_max = Var(model.T, bounds=(0, 1)) - - def curtailment_init(model, t, g): - return (0, feedin.loc[t, g]) - - model.c = Var(model.T, model.G, bounds=curtailment_init) - - # add objective - def obj_rule(model): - expr = sum(model.offset[t] * 100 for t in model.T) - return expr - - model.obj = Objective(rule=obj_rule, sense=minimize) - - # add constraints - # curtailment per generator constraints - def curtail(model, t, g): - return ( - model.cf[t, g] * model.cf_max[t] * model.feedin[t, g] - + model.cf_add[t, g] * model.offset[t] * model.feedin[t, g] - - model.c[t, g] - == 0 - ) - - model.curtailment = Constraint(model.T, model.G, rule=curtail) - - # total curtailment constraint - def total_curtailment(model, t): - return sum(model.c[t, g] for g in model.G) == model.total_curtailment[t] - - model.sum_curtailment = Constraint(model.T, rule=total_curtailment) - - # solve - solver = SolverFactory(solver) - results = solver.solve(model, tee=False) - - # load results back into model - model.solutions.load_from(results) - - return pd.DataFrame( - {g: [model.c[t, g].value for t in model.T] for g in model.G}, - index=model.T, - ) - - -def feedin_proportional( - feedin, generators, curtailment_timeseries, edisgo, curtailment_key, **kwargs -): - """ - Implements curtailment methodology 'feedin-proportional'. - - ToDo: adapt to refactored code! - - The curtailment that has to be met in each time step is allocated - equally to all generators depending on their share of total - feed-in in that time step. - - Parameters - ---------- - feedin : :pandas:`pandas.DataFrame` - Dataframe holding the feed-in of each generator in kW for the - technology (and weather cell) specified in `curtailment_key` parameter. - Index of the dataframe is a - :pandas:`pandas.DatetimeIndex`. Columns are the - representatives of the fluctuating generators. - generators : :pandas:`pandas.DataFrame` - Dataframe with all generators of the type (and in weather cell) - specified in `curtailment_key` parameter. See return value of - :func:`edisgo.network.tools.get_gen_info` for more information. - curtailment_timeseries : :pandas:`pandas.Series` - The curtailment in kW to be distributed amongst the generators in - `generators` parameter. Index of the series is a - :pandas:`pandas.DatetimeIndex`. - edisgo : :class:`~.edisgo.EDisGo` - curtailment_key::obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment is specified for. - - """ - raise NotImplementedError - - # calculate curtailment in each time step of each generator - curtailment = feedin.divide(feedin.sum(axis=1), axis=0).multiply( - curtailment_timeseries, axis=0 - ) - - # substitute NaNs from division with 0 by 0 - curtailment.fillna(0, inplace=True) - - # check if curtailment target was met - _check_curtailment_target(curtailment, curtailment_timeseries, curtailment_key) - - # assign curtailment to individual generators - _assign_curtailment(curtailment, edisgo, generators, curtailment_key) - - -def _check_curtailment_target(curtailment, curtailment_target, curtailment_key): - """ - Raises an error if curtailment target was not met in any time step. - - ToDo: adapt to refactored code! - - Parameters - ----------- - curtailment : :pandas:`pandas.DataFrame` - Dataframe containing the curtailment in kW per generator and time step. - Index is a :pandas:`pandas.DatetimeIndex`, columns are - the generator representatives. - curtailment_target : :pandas:`pandas.Series` - The curtailment in kW that was to be distributed amongst the - generators. Index of the series is a - :pandas:`pandas.DatetimeIndex`. - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment was specified for. - - """ - raise NotImplementedError - - if not (abs(curtailment.sum(axis=1) - curtailment_target) < 1e-1).all(): - message = "Curtailment target not met for {}.".format(curtailment_key) - logging.error(message) - raise TypeError(message) - - -def _assign_curtailment(curtailment, edisgo, generators, curtailment_key): - """ - Helper function to write curtailment time series to generator objects. - - ToDo: adapt to refactored code! - - This function also writes a list of the curtailed generators to curtailment - in :class:`edisgo.network.network.TimeSeries` and - :class:`edisgo.network.network.Results`. - - Parameters - ---------- - curtailment : :pandas:`pandas.DataFrame` - Dataframe containing the curtailment in kW per generator and time step - for all generators of the type (and in weather cell) specified in - `curtailment_key` parameter. Index is a - :pandas:`pandas.DatetimeIndex`, columns are the - generator representatives. - edisgo : :class:`~.edisgo.EDisGo` - generators : :pandas:`pandas.DataFrame` - Dataframe with all generators of the type (and in weather cell) - specified in `curtailment_key` parameter. See return value of - :func:`edisgo.network.tools.get_gen_info` for more information. - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment is specified for. - - """ - raise NotImplementedError - - gen_object_list = [] - for gen in curtailment.columns: - # get generator object from representative - gen_object = generators.loc[generators.gen_repr == gen].index[0] - # assign curtailment to individual generators - gen_object.curtailment = curtailment.loc[:, gen] - gen_object_list.append(gen_object) - - # set timeseries.curtailment - if edisgo.network.timeseries._curtailment: - edisgo.network.timeseries._curtailment.extend(gen_object_list) - edisgo.network.results._curtailment[curtailment_key] = gen_object_list - else: - edisgo.network.timeseries._curtailment = gen_object_list - # list needs to be copied, otherwise it will be extended every time - # a new key is added to results._curtailment - edisgo.network.results._curtailment = {curtailment_key: gen_object_list.copy()} - - -class CurtailmentControl: - """ - Allocates given curtailment targets to solar and wind generators. - - ToDo: adapt to refactored code! - - Parameters - ---------- - edisgo: :class:`edisgo.EDisGo` - The parent EDisGo object that this instance is a part of. - methodology : :obj:`str` - Defines the curtailment strategy. Possible options are: - - * 'feedin-proportional' - The curtailment that has to be met in each time step is allocated - equally to all generators depending on their share of total - feed-in in that time step. For more information see - :func:`edisgo.flex_opt.curtailment.feedin_proportional`. - * 'voltage-based' - The curtailment that has to be met in each time step is allocated - based on the voltages at the generator connection points and a - defined voltage threshold. Generators at higher voltages - are curtailed more. The default voltage threshold is 1.0 but - can be changed by providing the argument 'voltage_threshold'. This - method formulates the allocation of curtailment as a linear - optimization problem using :py:mod:`Pyomo` and requires a linear - programming solver like coin-or cbc (cbc) or gnu linear programming - kit (glpk). The solver can be specified through the parameter - 'solver'. For more information see - :func:`edisgo.flex_opt.curtailment.voltage_based`. - - curtailment_timeseries : :pandas:`pandas.Series` or \ - :pandas:`pandas.DataFrame`, optional - Series or DataFrame containing the curtailment time series in kW. Index - needs to be a :pandas:`pandas.DatetimeIndex`. - Provide a Series if the curtailment time series applies to wind and - solar generators. Provide a DataFrame if the curtailment time series - applies to a specific technology and optionally weather cell. In the - first case columns of the DataFrame are e.g. 'solar' and 'wind'; in the - second case columns need to be a - :pandas:`pandas.MultiIndex` with the first level containing - the type and the second level the weather cell ID. Default: None. - solver: :obj:`str` - The solver used to optimize the curtailment assigned to the generators - when 'voltage-based' curtailment methodology is chosen. - Possible options are: - - * 'cbc' - * 'glpk' - * any other available solver compatible with 'pyomo' such as 'gurobi' - or 'cplex' - - Default: 'cbc'. - voltage_threshold : :obj:`float` - Voltage below which no curtailment is assigned to the respective - generator if not necessary when 'voltage-based' curtailment methodology - is chosen. See :func:`edisgo.flex_opt.curtailment.voltage_based` for - more information. Default: 1.0. - mode : :obj:`str` - The `mode` is only relevant for curtailment method 'voltage-based'. - Possible options are None and 'mv'. Per default `mode` is None in which - case a power flow is conducted for both the MV and LV. In case `mode` - is set to 'mv' components in underlying LV grids are considered - aggregative. Default: None. - - """ - - # ToDo move some properties from topology here (e.g. peak_load, generators,...) - def __init__( - self, edisgo, methodology, curtailment_timeseries, mode=None, **kwargs - ): - raise NotImplementedError - - logging.info("Start curtailment methodology {}.".format(methodology)) - - self._check_timeindex(curtailment_timeseries, edisgo.topology) - - if methodology == "feedin-proportional": - curtailment_method = feedin_proportional - elif methodology == "voltage-based": - curtailment_method = voltage_based - else: - raise ValueError( - "{} is not a valid curtailment methodology.".format(methodology) - ) - - # check if provided mode is valid - if mode and mode != "mv": - raise ValueError("Provided mode {} is not a valid mode.") - - # get all fluctuating generators and their attributes (weather ID, - # type, etc.) - # TODO: Function get_gen_info does not exist - generators = get_gen_info( # noqa: F821 - edisgo.topology, "mvlv", fluctuating=True - ) - - # do analyze to get all voltages at generators and feed-in dataframe - edisgo.analyze(mode=mode) - - # get feed-in time series of all generators - if not mode: - feedin = edisgo.topology.pypsa.generators_t.p * 1000 - # drop dispatchable generators and slack generator - drop_labels = [ - _ for _ in feedin.columns if "GeneratorFluctuating" not in _ - ] + ["Generator_slack"] - else: - feedin = edisgo.topology.mv_grid.generators_timeseries() - for grid in edisgo.topology.mv_grid.lv_grids: - feedin = pd.concat([feedin, grid.generators_timeseries()], axis=1) - feedin.rename(columns=lambda _: repr(_), inplace=True) - # drop dispatchable generators - drop_labels = [_ for _ in feedin.columns if "GeneratorFluctuating" not in _] - feedin.drop(labels=drop_labels, axis=1, inplace=True) - - if isinstance(curtailment_timeseries, pd.Series): - # check if curtailment exceeds feed-in - self._precheck(curtailment_timeseries, feedin, "all_fluctuating_generators") - - # do curtailment - curtailment_method( - feedin, - generators, - curtailment_timeseries, - edisgo, - "all_fluctuating_generators", - **kwargs - ) - - elif isinstance(curtailment_timeseries, pd.DataFrame): - for col in curtailment_timeseries.columns: - logging.debug("Calculating curtailment for {}".format(col)) - - # filter generators - if isinstance(curtailment_timeseries.columns, pd.MultiIndex): - selected_generators = generators.loc[ - (generators.type == col[0]) - & (generators.weather_cell_id == col[1]) - ] - else: - selected_generators = generators.loc[(generators.type == col)] - - # check if curtailment exceeds feed-in - feedin_selected_generators = feedin.loc[ - :, selected_generators.gen_repr.values - ] - self._precheck( - curtailment_timeseries.loc[:, col], - feedin_selected_generators, - col, - ) - - # do curtailment - if not feedin_selected_generators.empty: - curtailment_method( - feedin_selected_generators, - selected_generators, - curtailment_timeseries.loc[:, col], - edisgo, - col, - **kwargs - ) - - # check if curtailment exceeds feed-in - self._postcheck(edisgo.topology, feedin) - - # update generator time series in pypsa topology - if edisgo.topology.pypsa is not None: - pypsa_io.update_pypsa_generator_timeseries(edisgo.topology) - - # add measure to Results object - edisgo.results.measures = "curtailment" - - def _check_timeindex(self, curtailment_timeseries, network): - """ - Raises an error if time index of curtailment time series does not - comply with the time index of load and feed-in time series. - - Parameters - ----------- - curtailment_timeseries : :pandas:`pandas.Series` or \ - :pandas:`pandas.DataFrame` - See parameter `curtailment_timeseries` in class definition for more - information. - - """ - raise NotImplementedError - - if curtailment_timeseries is None: - message = "No curtailment given." - logging.error(message) - raise KeyError(message) - try: - curtailment_timeseries.loc[network.timeseries.timeindex] - except Exception: - message = ( - "Time index of curtailment time series does not match " - "with load and feed-in time series." - ) - logging.error(message) - raise KeyError(message) - - def _precheck(self, curtailment_timeseries, feedin_df, curtailment_key): - """ - Raises an error if the curtailment at any time step exceeds the - total feed-in of all generators curtailment can be distributed among - at that time. - - Parameters - ----------- - curtailment_timeseries : :pandas:`pandas.Series` - Curtailment time series in kW for the technology (and weather - cell) specified in `curtailment_key`. - feedin_df : :pandas:`pandas.Series` - Feed-in time series in kW for all generators of type (and in - weather cell) specified in `curtailment_key`. - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - Technology (and weather cell) curtailment is given for. - - """ - raise NotImplementedError - - if not feedin_df.empty: - feedin_selected_sum = feedin_df.sum(axis=1) - diff = feedin_selected_sum - curtailment_timeseries - # add tolerance (set small negative values to zero) - diff[diff.between(-1, 0)] = 0 - if not (diff >= 0).all(): - bad_time_steps = [_ for _ in diff.index if diff[_] < 0] - message = ( - "Curtailment demand exceeds total feed-in in time " - "steps {}.".format(bad_time_steps) - ) - logging.error(message) - raise ValueError(message) - else: - bad_time_steps = [ - _ for _ in curtailment_timeseries.index if curtailment_timeseries[_] > 0 - ] - if bad_time_steps: - message = ( - "Curtailment given for time steps {} but there " - "are no generators to meet the curtailment target " - "for {}.".format(bad_time_steps, curtailment_key) - ) - logging.error(message) - raise ValueError(message) - - def _postcheck(self, network, feedin): - """ - Raises an error if the curtailment of a generator exceeds the - feed-in of that generator at any time step. - - Parameters - ----------- - network : :class:`~.network.topology.Topology` - feedin : :pandas:`pandas.DataFrame` - DataFrame with feed-in time series in kW. Columns of the dataframe - are :class:`~.network.components.GeneratorFluctuating`, index is - time index. - - """ - raise NotImplementedError - - curtailment = network.timeseries.curtailment - gen_repr = [repr(_) for _ in curtailment.columns] - feedin_repr = feedin.loc[:, gen_repr] - curtailment_repr = curtailment - curtailment_repr.columns = gen_repr - if not ((feedin_repr - curtailment_repr) > -1e-1).all().all(): - message = "Curtailment exceeds feed-in." - logging.error(message) - raise TypeError(message) From e18809ae8c0ced704eb19d6c4c12be1cd7fe230b Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 16:14:50 +0200 Subject: [PATCH 2/5] delete unused storage_positioning.py --- edisgo/flex_opt/storage_positioning.py | 707 ------------------------- 1 file changed, 707 deletions(-) delete mode 100644 edisgo/flex_opt/storage_positioning.py diff --git a/edisgo/flex_opt/storage_positioning.py b/edisgo/flex_opt/storage_positioning.py deleted file mode 100644 index b0a7015d..00000000 --- a/edisgo/flex_opt/storage_positioning.py +++ /dev/null @@ -1,707 +0,0 @@ -import logging - -from math import ceil, sqrt - -import networkx as nx -import numpy as np -import pandas as pd - -from networkx.algorithms.shortest_paths.weighted import ( - _dijkstra as dijkstra_shortest_path_length, -) - -from edisgo.flex_opt import check_tech_constraints, costs -from edisgo.tools import plots, tools - -logger = logging.getLogger(__name__) - - -def one_storage_per_feeder( - edisgo, storage_timeseries, storage_nominal_power=None, **kwargs -): - """ - Allocates the given storage capacity to multiple smaller storages. - - ToDo: adapt to refactored code! - - For each feeder with load or voltage issues it is checked if integrating a - storage will reduce peaks in the feeder, starting with the feeder with - the highest theoretical network expansion costs. A heuristic approach is used - to estimate storage sizing and siting while storage operation is carried - over from the given storage operation. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - storage_timeseries : :pandas:`pandas.DataFrame` - Total active and reactive power time series that will be allocated to - the smaller storages in feeders with load or voltage issues. Columns of - the dataframe are 'p' containing active power time series in kW and 'q' - containing the reactive power time series in kvar. Index is a - :pandas:`pandas.DatetimeIndex`. - storage_nominal_power : :obj:`float` or None - Nominal power in kW that will be allocated to the smaller storages in - feeders with load or voltage issues. If no nominal power is provided - the maximum active power given in `storage_timeseries` is used. - Default: None. - debug : :obj:`Boolean`, optional - If dedug is True a dataframe with storage size and path to storage of - all installed and possibly discarded storages is saved to a csv file - and a plot with all storage positions is created and saved, both to the - current working directory with filename `storage_results_{MVgrid_id}`. - Default: False. - check_costs_reduction : :obj:`Boolean` or :obj:`str`, optional - This parameter specifies when and whether it should be checked if a - storage reduced network expansion costs or not. It can be used as a safety - check but can be quite time consuming. Possible options are: - - * 'each_feeder' - Costs reduction is checked for each feeder. If the storage did not - reduce network expansion costs it is discarded. - * 'once' - Costs reduction is checked after the total storage capacity is - allocated to the feeders. If the storages did not reduce network - expansion costs they are all discarded. - * False - Costs reduction is never checked. - - Default: False. - - """ - - def _feeder_ranking(grid_expansion_costs): - """ - Get feeder ranking from network expansion costs DataFrame. - - MV feeders are ranked descending by network expansion costs that are - attributed to that feeder. - - Parameters - ---------- - grid_expansion_costs : :pandas:`pandas.DataFrame` - grid_expansion_costs DataFrame from :class:`~.network.network.Results` - of the copied edisgo object. - - Returns - ------- - :pandas:`pandas.Series` - Series with ranked MV feeders (in the copied graph) of type - :class:`~.network.components.Line`. Feeders are ranked by total network - expansion costs of all measures conducted in the feeder. The - feeder with the highest costs is in the first row and the feeder - with the lowest costs in the last row. - - """ - return ( - grid_expansion_costs.groupby(["mv_feeder"], sort=False) - .sum() - .reset_index() - .sort_values(by=["total_costs"], ascending=False)["mv_feeder"] - ) - - def _shortest_path(node): - # TODO: LVStation class is not used anymore - # resolve this when storage positioning is refactored - if isinstance(node, LVStation): # noqa: F821 - return len(nx.shortest_path(node.mv_grid.graph, node.mv_grid.station, node)) - else: - return len(nx.shortest_path(node.grid.graph, node.grid.station, node)) - - def _find_battery_node(edisgo, critical_lines_feeder, critical_nodes_feeder): - """ - Evaluates where to install the storage. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - critical_lines_feeder : :pandas:`pandas.DataFrame` - Dataframe containing over-loaded lines in MV feeder, their maximum - relative over-loading and the corresponding time step. See - :func:`edisgo.flex_opt.check_tech_constraints.mv_line_overload` for - more information. - critical_nodes_feeder : :obj:`list` - List with all nodes in MV feeder with voltage issues. - - Returns - ------- - :obj:`float` - Node where storage is installed. - - """ - - # if there are overloaded lines in the MV feeder the battery storage - # will be installed at the node farthest away from the MV station - if not critical_lines_feeder.empty: - logger.debug("Storage positioning due to overload.") - # dictionary with nodes and their corresponding path length to - # MV station - path_length_dict = {} - for line in critical_lines_feeder.index: - nodes = line.grid.graph.nodes_from_line(line) - for node in nodes: - path_length_dict[node] = _shortest_path(node) - # return node farthest away - return [ - _ - for _ in path_length_dict - if path_length_dict[_] == max(path_length_dict.values()) - ][0] - - # if there are voltage issues in the MV network the battery storage will - # be installed at the first node in path that exceeds 2/3 of the line - # length from station to critical node with highest voltage deviation - if critical_nodes_feeder: - logger.debug("Storage positioning due to voltage issues.") - node = critical_nodes_feeder[0] - - # get path length from station to critical node - get_weight = lambda u, v, data: data["line"].length # noqa: E731 - path_length = dijkstra_shortest_path_length( - edisgo.network.mv_grid.graph, - edisgo.network.mv_grid.station, - get_weight, - target=node, - ) - - # find first node in path that exceeds 2/3 of the line length - # from station to critical node farthest away from the station - path = nx.shortest_path( - edisgo.network.mv_grid.graph, - edisgo.network.mv_grid.station, - node, - ) - return next(j for j in path if path_length[j] >= path_length[node] * 2 / 3) - - return None - - def _calc_storage_size(edisgo, feeder, max_storage_size): - """ - Calculates storage size that reduces residual load. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - feeder : :class:`~.network.components.Line` - MV feeder the storage will be connected to. The line object is an - object from the copied graph. - - Returns - ------- - :obj:`float` - Storage size that reduced the residual load in the feeder. - - """ - step_size = 200 - sizes = [0] + list( - np.arange(p_storage_min, max_storage_size + 0.5 * step_size, step_size) - ) - p_feeder = edisgo.network.results.pfa_p.loc[:, repr(feeder)] - q_feeder = edisgo.network.results.pfa_q.loc[:, repr(feeder)] - p_slack = edisgo.network.pypsa.generators_t.p.loc[:, "Generator_slack"] * 1e3 - - # get sign of p and q - lines = edisgo.network.pypsa.lines.loc[repr(feeder), :] - mv_station_bus = ( - "bus0" - if lines.loc["bus0"] == f"Bus_{repr(edisgo.network.mv_grid.station)}" - else "bus1" - ) - if mv_station_bus == "bus0": - diff = ( - edisgo.network.pypsa.lines_t.p1.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.p0.loc[:, repr(feeder)] - ) - diff_q = ( - edisgo.network.pypsa.lines_t.q1.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.q0.loc[:, repr(feeder)] - ) - else: - diff = ( - edisgo.network.pypsa.lines_t.p0.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.p1.loc[:, repr(feeder)] - ) - diff_q = ( - edisgo.network.pypsa.lines_t.q0.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.q1.loc[:, repr(feeder)] - ) - p_sign = pd.Series([-1 if _ < 0 else 1 for _ in diff], index=p_feeder.index) - q_sign = pd.Series([-1 if _ < 0 else 1 for _ in diff_q], index=p_feeder.index) - - # get allowed load factors per case - lf = { - "feed-in_case": edisgo.network.config["grid_expansion_load_factors"][ - "mv_feed-in_case_line" - ], - "load_case": network.config["grid_expansion_load_factors"][ - "mv_load_case_line" - ], - } - - # calculate maximum apparent power for each storage size to find - # storage size that minimizes apparent power in the feeder - p_feeder = p_feeder.multiply(p_sign) - q_feeder = q_feeder.multiply(q_sign) - s_max = [] - for size in sizes: - share = size / storage_nominal_power - p_storage = storage_timeseries.p * share - q_storage = storage_timeseries.q * share - p_total = p_feeder + p_storage - q_total = q_feeder + q_storage - p_hv_mv_station = p_slack - p_storage - lf_ts = p_hv_mv_station.apply( - lambda _: lf["feed-in_case"] if _ < 0 else lf["load_case"] - ) - s_max_ts = (p_total**2 + q_total**2).apply(sqrt).divide(lf_ts) - s_max.append(max(s_max_ts)) - - return sizes[pd.Series(s_max).idxmin()] - - def _critical_nodes_feeder(edisgo, feeder): - """ - Returns all nodes in MV feeder with voltage issues. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - feeder : :class:`~.network.components.Line` - MV feeder the storage will be connected to. The line object is an - object from the copied graph. - - Returns - ------- - :obj:`list` - List with all nodes in MV feeder with voltage issues. - - """ - # get all nodes with voltage issues in MV network - critical_nodes = check_tech_constraints.voltage_issues( - edisgo.network, voltage_levels="mv" - ) - if critical_nodes: - critical_nodes = critical_nodes[edisgo.network.mv_grid] - else: - return [] - - return [n for n in critical_nodes.index if repr(n.mv_feeder) == repr(feeder)] - - def _critical_lines_feeder(edisgo, feeder): - """ - Returns all lines in MV feeder with overload issues. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - feeder : :class:`~.network.components.Line` - MV feeder the storage will be connected to. The line object is an - object from the copied graph. - - Returns - ------- - :pandas:`pandas.DataFrame` - Dataframe containing over-loaded lines in MV feeder, their maximum - relative over-loading and the corresponding time step. See - :func:`edisgo.flex_opt.check_tech_constraints.mv_line_overload` for - more information. - - """ - # return grid_expansion_costs_feeder_ranking[ - # (grid_expansion_costs_feeder_ranking.mv_feeder == feeder) & - # (grid_expansion_costs_feeder_ranking.voltage_level == 'mv')] - # get all overloaded MV lines - critical_lines = check_tech_constraints.mv_line_overload(edisgo.network) - # filter overloaded lines in feeder - critical_lines_feeder = [ - line - for line in critical_lines.index - if repr(tools.get_mv_feeder_from_line(line)) == repr(feeder) - ] - - return critical_lines.loc[critical_lines_feeder, :] - - def _estimate_new_number_of_lines(critical_lines_feeder): - return sum( - ( - ceil( - critical_lines_feeder.loc[crit_line, "max_rel_overload"] - * crit_line.quantity - ) - - crit_line.quantity - ) - for crit_line in critical_lines_feeder.index - ) - - raise NotImplementedError - - debug = kwargs.get("debug", False) - check_costs_reduction = kwargs.get("check_costs_reduction", False) - - # global variables - # minimum and maximum storage power to be connected to the MV network - p_storage_min = 300 - p_storage_max = 4500 - - # remaining storage nominal power - if storage_nominal_power is None: - storage_nominal_power = max(abs(storage_timeseries.p)) - p_storage_remaining = storage_nominal_power - - if debug: - feeder_repr = [] - storage_path = [] - storage_repr = [] - storage_size = [] - - # rank MV feeders by network expansion costs - - # conduct network reinforcement on copied edisgo object on worst-case time - # steps - grid_expansion_results_init = edisgo.reinforce( - copy_graph=True, timesteps_pfa="snapshot_analysis", mode="mv" - ) - - # only analyse storage integration if there were any network expansion needs - if grid_expansion_results_init.equipment_changes.empty: - logger.debug( - "No storage integration necessary since there are no " - "network expansion needs." - ) - return - else: - equipment_changes_reinforcement_init = ( - grid_expansion_results_init.equipment_changes.loc[ - grid_expansion_results_init.equipment_changes.iteration_step > 0 - ] - ) - total_grid_expansion_costs = ( - grid_expansion_results_init.grid_expansion_costs.total_costs.sum() - ) - if equipment_changes_reinforcement_init.empty: - logger.debug( - "No storage integration necessary since there are no " - "network expansion needs." - ) - return - else: - network = equipment_changes_reinforcement_init.index[0].grid.network - - # calculate network expansion costs without costs for new generators - # to be used in feeder ranking - grid_expansion_costs_feeder_ranking = costs.grid_expansion_costs( - network, without_generator_import=True, mode="mv" - ) - - ranked_feeders = _feeder_ranking(grid_expansion_costs_feeder_ranking) - - count = 1 - storage_obj_list = [] - total_grid_expansion_costs_new = "not calculated" - for feeder in ranked_feeders.values: - logger.debug("Feeder: {}".format(count)) - count += 1 - - # first step: find node where storage will be installed - - critical_nodes_feeder = _critical_nodes_feeder(edisgo, feeder) - critical_lines_feeder = _critical_lines_feeder(edisgo, feeder) - - # get node the storage will be connected to (in original graph) - battery_node = _find_battery_node( - edisgo, critical_lines_feeder, critical_nodes_feeder - ) - - if battery_node: - # add to output lists - if debug: - feeder_repr.append(repr(feeder)) - storage_path.append( - nx.shortest_path( - edisgo.network.mv_grid.graph, - edisgo.network.mv_grid.station, - battery_node, - ) - ) - - # second step: calculate storage size - - max_storage_size = min(p_storage_remaining, p_storage_max) - p_storage = _calc_storage_size(edisgo, feeder, max_storage_size) - - # if p_storage is greater than or equal to the minimum storage - # power required, do storage integration - if p_storage >= p_storage_min: - # third step: integrate storage - - share = p_storage / storage_nominal_power - edisgo.integrate_storage( - timeseries=storage_timeseries.p * share, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q * share, - ) - tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid) - - # get new storage object - storage_obj = [ - _ - for _ in edisgo.network.mv_grid.graph.nodes_by_attribute("storage") - if _ in list(edisgo.network.mv_grid.graph.neighbors(battery_node)) - ][0] - storage_obj_list.append(storage_obj) - - logger.debug( - "Storage with nominal power of {} kW connected to " - "node {} (path to HV/MV station {}).".format( - p_storage, - battery_node, - nx.shortest_path( - battery_node.grid.graph, - battery_node.grid.station, - battery_node, - ), - ) - ) - - # fourth step: check if storage integration reduced network - # reinforcement costs or number of issues - - if check_costs_reduction == "each_feeder": - # calculate new network expansion costs - - grid_expansion_results_new = edisgo.reinforce( - copy_graph=True, timesteps_pfa="snapshot_analysis" - ) - - # fmt: off - total_grid_expansion_costs_new = ( - grid_expansion_results_new.grid_expansion_costs.total_costs.sum( - ) - ) - # fmt: on - - costs_diff = ( - total_grid_expansion_costs - total_grid_expansion_costs_new - ) - - if costs_diff > 0: - logger.debug( - "Storage integration in feeder {} reduced network " - "expansion costs by {} kEuro.".format(feeder, costs_diff) - ) - - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - - total_grid_expansion_costs = total_grid_expansion_costs_new - - else: - logger.debug( - "Storage integration in feeder {} did not reduce " - "network expansion costs (costs increased by {} " - "kEuro).".format(feeder, -costs_diff) - ) - - tools.disconnect_storage(edisgo.network, storage_obj) - p_storage = 0 - - if debug: - storage_repr.append(None) - storage_size.append(0) - - edisgo.integrate_storage( - timeseries=storage_timeseries.p * 0, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q * 0, - ) - tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid) - - else: - number_parallel_lines_before = _estimate_new_number_of_lines( - critical_lines_feeder - ) - edisgo.analyze() - critical_lines_feeder_new = _critical_lines_feeder(edisgo, feeder) - critical_nodes_feeder_new = _critical_nodes_feeder(edisgo, feeder) - number_parallel_lines = _estimate_new_number_of_lines( - critical_lines_feeder_new - ) - - # if there are critical lines check if number of parallel - # lines was reduced - if not critical_lines_feeder.empty: - diff_lines = ( - number_parallel_lines_before - number_parallel_lines - ) - # if it was not reduced check if there are critical - # nodes and if the number was reduced - if diff_lines <= 0: - # if there are no critical nodes remove storage - if not critical_nodes_feeder: - logger.debug( - "Storage integration in feeder {} did not " - "reduce number of critical lines (number " - "increased by {}), storage " - "is therefore removed.".format(feeder, -diff_lines) - ) - - tools.disconnect_storage(edisgo.network, storage_obj) - p_storage = 0 - - if debug: - storage_repr.append(None) - storage_size.append(0) - - edisgo.integrate_storage( - timeseries=storage_timeseries.p * 0, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q - * 0, - ) - tools.assign_mv_feeder_to_nodes( - edisgo.network.mv_grid - ) - else: - logger.debug( - "Critical nodes in feeder {} " - "before and after storage integration: " - "{} vs. {}".format( - feeder, - critical_nodes_feeder, - critical_nodes_feeder_new, - ) - ) - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - else: - logger.debug( - "Storage integration in feeder {} reduced " - "number of critical lines.".format(feeder) - ) - - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - - # if there are no critical lines - else: - logger.debug( - "Critical nodes in feeder {} " - "before and after storage integration: " - "{} vs. {}".format( - feeder, - critical_nodes_feeder, - critical_nodes_feeder_new, - ) - ) - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - - # fifth step: if there is storage capacity left, rerun - # the past steps for the next feeder in the ranking - # list - p_storage_remaining = p_storage_remaining - p_storage - if not p_storage_remaining > p_storage_min: - break - - else: - logger.debug("No storage integration in feeder {}.".format(feeder)) - - if debug: - storage_repr.append(None) - storage_size.append(0) - - edisgo.integrate_storage( - timeseries=storage_timeseries.p * 0, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q * 0, - ) - tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid) - else: - logger.debug( - "No storage integration in feeder {} because there " - "are neither overloading nor voltage issues.".format(feeder) - ) - - if debug: - storage_repr.append(None) - storage_size.append(0) - feeder_repr.append(repr(feeder)) - storage_path.append([]) - - if check_costs_reduction == "once": - # check costs reduction and discard all storages if costs were not - # reduced - grid_expansion_results_new = edisgo.reinforce( - copy_graph=True, timesteps_pfa="snapshot_analysis" - ) - - total_grid_expansion_costs_new = ( - grid_expansion_results_new.grid_expansion_costs.total_costs.sum() - ) - - costs_diff = total_grid_expansion_costs - total_grid_expansion_costs_new - - if costs_diff > 0: - logger.info( - "Storage integration in network {} reduced network " - "expansion costs by {} kEuro.".format(edisgo.network.id, costs_diff) - ) - else: - logger.info( - "Storage integration in network {} did not reduce " - "network expansion costs (costs increased by {} " - "kEuro).".format(edisgo.network.id, -costs_diff) - ) - - for storage in storage_obj_list: - tools.disconnect_storage(edisgo.network, storage) - elif check_costs_reduction == "each_feeder": - # if costs redcution was checked after each storage only give out - # total costs reduction - if total_grid_expansion_costs_new == "not calculated": - costs_diff = 0 - else: - total_grid_expansion_costs = ( - grid_expansion_results_init.grid_expansion_costs.total_costs.sum() - ) - costs_diff = total_grid_expansion_costs - total_grid_expansion_costs_new - - logger.info( - "Storage integration in network {} reduced network " - "expansion costs by {} kEuro.".format(edisgo.network.id, costs_diff) - ) - - if debug: - plots.storage_size( - edisgo.network.mv_grid, - edisgo.network.pypsa, - filename="storage_results_{}.pdf".format(edisgo.network.id), - lopf=False, - ) - storages_df = pd.DataFrame( - { - "path": storage_path, - "repr": storage_repr, - "p_nom": storage_size, - }, - index=feeder_repr, - ) - storages_df.to_csv("storage_results_{}.csv".format(edisgo.network.id)) - - edisgo.network.results.storages_costs_reduction = pd.DataFrame( - { - "grid_expansion_costs_initial": total_grid_expansion_costs, - "grid_expansion_costs_with_storages": total_grid_expansion_costs_new, - }, - index=[edisgo.network.id], - ) From 9bcd6d0c33ad5e2d47b3bd171ddbb53113f0fc91 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 16:16:17 +0200 Subject: [PATCH 3/5] Update pypsa to 0.26.2 and delete pyomo dependencies --- eDisGo_env.yml | 2 +- eDisGo_env_dev.yml | 2 +- rtd_requirements.txt | 3 +-- setup.py | 3 +-- 4 files changed, 4 insertions(+), 6 deletions(-) diff --git a/eDisGo_env.yml b/eDisGo_env.yml index 06e2b03e..7e9a6ef7 100644 --- a/eDisGo_env.yml +++ b/eDisGo_env.yml @@ -16,6 +16,6 @@ dependencies: - conda-forge::pygeos - conda-forge::contextily - conda-forge::descartes - - conda-forge::pypsa >= 0.17.0, <= 0.20.1 + - conda-forge::pypsa == 0.26.2 - pip: - eDisGo diff --git a/eDisGo_env_dev.yml b/eDisGo_env_dev.yml index eb9cb30d..3cbea7d3 100644 --- a/eDisGo_env_dev.yml +++ b/eDisGo_env_dev.yml @@ -16,6 +16,6 @@ dependencies: - conda-forge::pygeos - conda-forge::contextily - conda-forge::descartes - - conda-forge::pypsa >= 0.17.0, <= 0.20.1 + - conda-forge::pypsa == 0.26.2 - pip: - -e .[dev] diff --git a/rtd_requirements.txt b/rtd_requirements.txt index dd3c393d..84a37127 100644 --- a/rtd_requirements.txt +++ b/rtd_requirements.txt @@ -8,10 +8,9 @@ multiprocess networkx >= 2.5.0 pandas >= 1.4.0 plotly -pyomo >= 6.0 pypower pyproj >= 3.0.0 -pypsa >=0.17.0, <=0.20.1 +pypsa == 0.26.2 pyyaml saio scikit-learn diff --git a/setup.py b/setup.py index 0ccb320b..5b67f566 100644 --- a/setup.py +++ b/setup.py @@ -50,10 +50,9 @@ def read(fname): "plotly", "pydot", "pygeos", - "pyomo <= 6.4.2", # Problem with PyPSA 20.1 fixed in newest PyPSA release "pypower", "pyproj >= 3.0.0", - "pypsa >= 0.17.0, <= 0.20.1", + "pypsa == 0.26.2", "pyyaml", "saio", "scikit-learn <= 1.1.1", From 743b4cb6e22b4eb216c931a097c6dc8717d28237 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 16:16:29 +0200 Subject: [PATCH 4/5] delete unused files --- doc/conf.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 2b2867de..30cafa8a 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -71,8 +71,6 @@ ] # Files to ignore when building api documentation autoapi_ignore = [ - "*/flex_opt/curtailment.py", - "*/flex_opt/storage_positioning.py", "*/opf/timeseries_reduction.py", "*/opf/opf_solutions/*", ] From d1a712c06eb39af25b74b33119b87138ea0e05c7 Mon Sep 17 00:00:00 2001 From: joda9 Date: Tue, 16 Jul 2024 16:19:30 +0200 Subject: [PATCH 5/5] update demandlib version --- rtd_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rtd_requirements.txt b/rtd_requirements.txt index 84a37127..c9e102de 100644 --- a/rtd_requirements.txt +++ b/rtd_requirements.txt @@ -1,5 +1,5 @@ dash < 2.9.0 -demandlib +demandlib < 0.2.0 egoio >= 0.4.7 geopy >= 2.0.0 jupyter_dash