Skip to content

Commit

Permalink
Merge pull request #56 from Margherita-Capitani/finalize_emre
Browse files Browse the repository at this point in the history
Finalize_Emre_PR
  • Loading branch information
davide-f authored Dec 11, 2024
2 parents d800196 + f3df960 commit 09c0ab5
Show file tree
Hide file tree
Showing 5 changed files with 119 additions and 35 deletions.
3 changes: 2 additions & 1 deletion config.distribution.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ enable:
retrieve_databundle: true
retrieve_cost_data: true
download_osm_data: true
download_osm_buildings: false
download_osm_buildings: true
download_osm_method: overpass # or earth_osm
# If "build_cutout" : true # requires cds API key
# https://cds.climate.copernicus.eu/api-how-to
# More information
Expand Down
3 changes: 3 additions & 0 deletions doc/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@ Release Notes
Upcoming Release
================

* The energy demand time series can now be determined using a new methodology based on social inputs that integrates with the RAMP tool. `PR #55 <https://github.com/pypsa-meets-earth/pypsa-distribution/pull/55>`__

* Automated downloading of buildings within the microgrid is now supported through the new download_osm_data rule. `PR #52 <https://github.com/pypsa-meets-earth/pypsa-distribution/pull/52>`__ and `PR #56 <https://github.com/pypsa-meets-earth/pypsa-distribution/pull/56>`__



Expand Down
8 changes: 3 additions & 5 deletions scripts/cluster_buildings.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@
_logger.setLevel(logging.INFO)


def buildings_classification(input_file, crs, house_area_limit):
def buildings_classification(input_file, crs):
"""
Filters the data contained in all_raw_building, selecting only Polygon elements,
after which the plan area is calculated for each building with the specified coordinate system
and adds the information to the geodataframe.
"""
microgrid_buildings = gpd.read_file(input_file)
microgrid_buildings.rename(columns={"tags.building": "tags_building"}, inplace=True)
microgrid_buildings.rename(columns={"building": "tags_building"}, inplace=True)
microgrid_buildings = microgrid_buildings.loc[
microgrid_buildings.geometry.type != "Point"
]
Expand Down Expand Up @@ -54,9 +54,7 @@ def get_central_points_geojson_with_buildings(
a dataframe with all of the buildings divided into clusters,
a csv file where for each cluster the building types are counted
"""
microgrid_buildings = buildings_classification(
input_filepath, crs, house_area_limit
)
microgrid_buildings = buildings_classification(input_filepath, crs)
centroids_building = [
(row.geometry.centroid.x, row.geometry.centroid.y)
for row in microgrid_buildings.itertuples()
Expand Down
131 changes: 105 additions & 26 deletions scripts/download_osm_data.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
# -*- coding: utf-8 -*-
import json
import logging
import os
import shutil
from pathlib import Path

import requests
import yaml
from _helpers_dist import configure_logging, create_logger, read_osm_config
from earth_osm import eo
Expand Down Expand Up @@ -65,6 +67,75 @@ def convert_iso_to_geofk(
return iso_code


def retrieve_osm_data_geojson(coordinates, features, url, path):
"""
The buildings inside the specified coordinates are retrieved by using overpass API.
The region coordinates should be defined in the config.yaml file.
Parameters
----------
coordinates : dict
Coordinates of the rectangular region where buildings to be downloaded from osm resides.
features : str
The feature that is searched in the osm database
url : str
osm query address
path : str
Directory where the GeoJSON file will be saved.
"""

for item in coordinates.keys():

overpass_query = f"""
[out:json];
way["{features}"]({coordinates[item]["lat_min"]}, {coordinates[item]["lon_min"]}, {coordinates[item]["lat_max"]}, {coordinates[item]["lon_max"]});
(._;>;);
out body;
"""

try:
# Send request to API Overpass
response = requests.get(url, params={"data": overpass_query})
response.raise_for_status()
data = response.json()
# Create a dictionary to map nodes with their coordinates
node_coordinates = {
node["id"]: [node["lon"], node["lat"]]
for node in data["elements"]
if node["type"] == "node"
}
# Choose the output path to save the file.
outpath = Path(path) / f"all_raw_building.geojson"
# outpath = Path(path) / f"all_raw_building_{item}.geojson" #ATTENTION: Currently the other parts of the code ( clean earth osm data,cluster building, and others) have not been updated to run on multiple microgrids simultaneously. For now we do not exploit this to run the code. As soon as we update the other parts of the code as well, we will exploit it.
outpath.parent.mkdir(parents=True, exist_ok=True)
# Write the geojson file
with open(outpath, "w") as f:
f.write('{"type":"FeatureCollection","features":[\n')
features = []
for element in data["elements"]:
if element["type"] == "way" and "nodes" in element:
coordinates = [
node_coordinates[node_id]
for node_id in element["nodes"]
if node_id in node_coordinates
]
properties = {"id": element["id"]}
if "tags" in element:
properties.update(element["tags"])
feature = {
"type": "Feature",
"properties": properties,
"geometry": {
"type": "Polygon",
"coordinates": [coordinates],
},
}
features.append(json.dumps(feature, separators=(",", ":")))
f.write(",\n".join(features))
f.write("\n]}\n")
except (json.JSONDecodeError, requests.exceptions.RequestException) as e:
logger.error(f"Error downloading osm data for the specified coordinates")


if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers_dist import mock_snakemake, sets_path_to_root
Expand All @@ -81,29 +152,37 @@ def convert_iso_to_geofk(
countries = snakemake.config["countries"]
country_list = country_list_to_geofk(countries)

eo.save_osm_data(
region_list=country_list,
primary_name="building",
feature_list=["ALL"],
update=False,
mp=False,
data_dir=store_path_data,
out_dir=store_path_resources,
out_format=["csv", "geojson"],
out_aggregate=True,
)

out_path = Path.joinpath(store_path_resources, "out")
out_formats = ["csv", "geojson"]
new_files = os.listdir(out_path)

for f in out_formats:
new_file_name = Path.joinpath(store_path_resources, f"all_raw_building.{f}")
old_file = list(Path(out_path).glob(f"*building.{f}"))

if not old_file:
with open(new_file_name, "w") as f:
pass
else:
logger.info(f"Move {old_file[0]} to {new_file_name}")
shutil.move(old_file[0], new_file_name)
if snakemake.config["enable"]["download_osm_method"] == "earth_osm":
eo.save_osm_data(
region_list=country_list,
primary_name="building",
feature_list=["ALL"],
update=False,
mp=False,
data_dir=store_path_data,
out_dir=store_path_resources,
out_format=["csv", "geojson"],
out_aggregate=True,
)

out_path = Path.joinpath(store_path_resources, "out")
out_formats = ["csv", "geojson"]
new_files = os.listdir(out_path)

for f in out_formats:
new_file_name = Path.joinpath(store_path_resources, f"all_raw_building.{f}")
old_file = list(Path(out_path).glob(f"*building.{f}"))

if not old_file:
with open(new_file_name, "w") as f:
pass
else:
logger.info(f"Move {old_file[0]} to {new_file_name}")
shutil.move(old_file[0], new_file_name)

elif snakemake.config["enable"]["download_osm_method"] == "overpass":
microgrids_list = snakemake.config["microgrids_list"]
features = "building"
overpass_url = "https://overpass-api.de/api/interpreter"
output_file = Path.cwd() / "resources" / RDIR / "osm" / "raw"
retrieve_osm_data_geojson(microgrids_list, features, overpass_url, output_file)
9 changes: 6 additions & 3 deletions test/config.distribution.test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,12 @@ enable:
retrieve_databundle: true
retrieve_cost_data: true
download_osm_data: true
download_osm_buildings: false
# If "build_cutout" : true # requires cds API key https://cds.climate.copernicus.eu/api-how-to
# More information https://atlite.readthedocs.io/en/latest/introduction.html#datasets
download_osm_buildings: true
download_osm_method: overpass # or earth_osm
# If "build_cutout" : true # requires cds API key
# https://cds.climate.copernicus.eu/api-how-to
# More information
# https://atlite.readthedocs.io/en/latest/introduction.html#datasets
build_cutout: false
build_natura_raster: false # If True, then build_natura_raster can be run

Expand Down

0 comments on commit 09c0ab5

Please sign in to comment.