Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

get costs_year.csv instead of costs.csv #1120

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 3 additions & 15 deletions Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,9 @@ COSTDIR = config["costs_dir"]
load_data_paths = get_load_paths_gegis("data", config)

if config["enable"].get("retrieve_cost_data", True):
COSTS = "resources/" + RDIR + "costs.csv"
COSTS = (
"resources/" + RDIR + "costs_{year}.csv".format(year=config["costs"]["year"])
)
else:
COSTS = "data/costs.csv"
ATLITE_NPROCESSES = config["atlite"].get("nprocesses", 4)
Expand Down Expand Up @@ -406,20 +408,6 @@ if config["enable"].get("retrieve_cost_data", True):
run:
move(input[0], output[0])

rule retrieve_cost_data_flexible:
input:
HTTP.remote(
f"raw.githubusercontent.com/PyPSA/technology-data/{config['costs']['version']}/outputs/costs"
+ "_{planning_horizons}.csv",
keep_local=True,
),
output:
costs=COSTDIR + "costs_{planning_horizons}.csv",
resources:
mem_mb=5000,
run:
move(input[0], output[0])


rule build_demand_profiles:
params:
Expand Down
5 changes: 4 additions & 1 deletion scripts/add_electricity.py
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,10 @@ def attach_hydro(n, costs, ppl):
ror = ppl.query('technology == "Run-Of-River"')
phs = ppl.query('technology == "Pumped Storage"')
hydro = ppl.query('technology == "Reservoir"')
bus_id = ppl["bus"]
if snakemake.params.alternative_clustering:
bus_id = ppl["region_id"]
else:
bus_id = ppl["bus"]

inflow_idx = ror.index.union(hydro.index)
if not inflow_idx.empty:
Expand Down
53 changes: 45 additions & 8 deletions scripts/build_renewable_profiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,6 +356,9 @@ def rescale_hydro(plants, runoff, normalize_using_yearly, normalization_year):
logger.info("No bus has installed hydro plants, ignoring normalization.")
return runoff

if snakemake.params.alternative_clustering:
plants = plants.set_index("shape_id")

years_statistics = normalize_using_yearly.index
if isinstance(years_statistics, pd.DatetimeIndex):
years_statistics = years_statistics.year
Expand Down Expand Up @@ -530,6 +533,24 @@ def create_scaling_factor(
# the region should be restricted for non-hydro technologies, as the hydro potential is calculated across hydrobasins which may span beyond the region of the country
cutout = filter_cutout_region(cutout, regions)

if snakemake.params.alternative_clustering:
regions = gpd.GeoDataFrame(
regions.reset_index()
.groupby("shape_id")
.agg(
{
"x": "mean",
"y": "mean",
"country": "first",
"geometry": "first",
"bus": "first",
}
)
.reset_index()
.set_index("bus"),
crs=regions.crs,
)

buses = regions.index

func = getattr(cutout, resource.pop("method"))
Expand All @@ -556,10 +577,17 @@ def create_scaling_factor(
# select busbar whose location (p) belongs to at least one hydrobasin geometry
# if extendable option is true, all buses are included
# otherwise only where hydro powerplants are available are considered
filter_bus_to_consider = regions.index.map(
lambda bus_id: config.get("extendable", False)
| (bus_id in hydro_ppls.bus.values)
)
if snakemake.params.alternative_clustering:
filter_bus_to_consider = regions.index.map(
lambda bus_id: config.get("extendable", False)
| (bus_id in hydro_ppls.region_id.values)
)
### TODO: quickfix. above case and the below case should by unified
if snakemake.params.alternative_clustering == False:
filter_bus_to_consider = regions.index.map(
lambda bus_id: config.get("extendable", False)
| (bus_id in hydro_ppls.bus.values)
)
bus_to_consider = regions.index[filter_bus_to_consider]

# identify subset of buses within the hydrobasins
Expand All @@ -577,10 +605,17 @@ def create_scaling_factor(
columns={"x": "lon", "y": "lat", "country": "countries"}
).loc[bus_in_hydrobasins, ["lon", "lat", "countries", "shape_id"]]

resource["plants"]["installed_hydro"] = [
True if (bus_id in hydro_ppls.bus.values) else False
for bus_id in resource["plants"].index
]
# TODO: these cases shall be fixed by restructuring the alternative clustering procedure
if snakemake.params.alternative_clustering == False:
resource["plants"]["installed_hydro"] = [
True if (bus_id in hydro_ppls.bus.values) else False
for bus_id in resource["plants"].index
]
else:
resource["plants"]["installed_hydro"] = [
True if (bus_id in hydro_ppls.region_id.values) else False
for bus_id in resource["plants"].shape_id.values
]

# get normalization before executing runoff
normalization = None
Expand All @@ -596,6 +631,8 @@ def create_scaling_factor(
else:
# otherwise perform the calculations
inflow = correction_factor * func(capacity_factor=True, **resource)
if snakemake.params.alternative_clustering:
inflow["plant"] = regions.shape_id.loc[inflow["plant"]].values

if "clip_min_inflow" in config:
inflow = inflow.where(inflow >= config["clip_min_inflow"], 0)
Expand Down
Loading