diff --git a/config.distribution.yaml b/config.distribution.yaml index 0f63e98..d76472a 100644 --- a/config.distribution.yaml +++ b/config.distribution.yaml @@ -17,7 +17,8 @@ enable: retrieve_databundle: true retrieve_cost_data: true download_osm_data: true - download_osm_buildings: false + download_osm_buildings: true + download_osm_method: overpass # or earth_osm # If "build_cutout" : true # requires cds API key # https://cds.climate.copernicus.eu/api-how-to # More information diff --git a/doc/release_notes.rst b/doc/release_notes.rst index de24c5f..eff1c3c 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -11,6 +11,9 @@ Release Notes Upcoming Release ================ +* The energy demand time series can now be determined using a new methodology based on social inputs that integrates with the RAMP tool. `PR #55 `__ + +* Automated downloading of buildings within the microgrid is now supported through the new download_osm_data rule. `PR #52 `__ and `PR #56 `__ diff --git a/scripts/cluster_buildings.py b/scripts/cluster_buildings.py index 30ef1a7..f94d80f 100644 --- a/scripts/cluster_buildings.py +++ b/scripts/cluster_buildings.py @@ -19,14 +19,14 @@ _logger.setLevel(logging.INFO) -def buildings_classification(input_file, crs, house_area_limit): +def buildings_classification(input_file, crs): """ Filters the data contained in all_raw_building, selecting only Polygon elements, after which the plan area is calculated for each building with the specified coordinate system and adds the information to the geodataframe. """ microgrid_buildings = gpd.read_file(input_file) - microgrid_buildings.rename(columns={"tags.building": "tags_building"}, inplace=True) + microgrid_buildings.rename(columns={"building": "tags_building"}, inplace=True) microgrid_buildings = microgrid_buildings.loc[ microgrid_buildings.geometry.type != "Point" ] @@ -54,9 +54,7 @@ def get_central_points_geojson_with_buildings( a dataframe with all of the buildings divided into clusters, a csv file where for each cluster the building types are counted """ - microgrid_buildings = buildings_classification( - input_filepath, crs, house_area_limit - ) + microgrid_buildings = buildings_classification(input_filepath, crs) centroids_building = [ (row.geometry.centroid.x, row.geometry.centroid.y) for row in microgrid_buildings.itertuples() diff --git a/scripts/download_osm_data.py b/scripts/download_osm_data.py index 4917662..d6697a3 100644 --- a/scripts/download_osm_data.py +++ b/scripts/download_osm_data.py @@ -1,9 +1,11 @@ # -*- coding: utf-8 -*- +import json import logging import os import shutil from pathlib import Path +import requests import yaml from _helpers_dist import configure_logging, create_logger, read_osm_config from earth_osm import eo @@ -65,6 +67,75 @@ def convert_iso_to_geofk( return iso_code +def retrieve_osm_data_geojson(coordinates, features, url, path): + """ + The buildings inside the specified coordinates are retrieved by using overpass API. + The region coordinates should be defined in the config.yaml file. + Parameters + ---------- + coordinates : dict + Coordinates of the rectangular region where buildings to be downloaded from osm resides. + features : str + The feature that is searched in the osm database + url : str + osm query address + path : str + Directory where the GeoJSON file will be saved. + """ + + for item in coordinates.keys(): + + overpass_query = f""" + [out:json]; + way["{features}"]({coordinates[item]["lat_min"]}, {coordinates[item]["lon_min"]}, {coordinates[item]["lat_max"]}, {coordinates[item]["lon_max"]}); + (._;>;); + out body; + """ + + try: + # Send request to API Overpass + response = requests.get(url, params={"data": overpass_query}) + response.raise_for_status() + data = response.json() + # Create a dictionary to map nodes with their coordinates + node_coordinates = { + node["id"]: [node["lon"], node["lat"]] + for node in data["elements"] + if node["type"] == "node" + } + # Choose the output path to save the file. + outpath = Path(path) / f"all_raw_building.geojson" + # outpath = Path(path) / f"all_raw_building_{item}.geojson" #ATTENTION: Currently the other parts of the code ( clean earth osm data,cluster building, and others) have not been updated to run on multiple microgrids simultaneously. For now we do not exploit this to run the code. As soon as we update the other parts of the code as well, we will exploit it. + outpath.parent.mkdir(parents=True, exist_ok=True) + # Write the geojson file + with open(outpath, "w") as f: + f.write('{"type":"FeatureCollection","features":[\n') + features = [] + for element in data["elements"]: + if element["type"] == "way" and "nodes" in element: + coordinates = [ + node_coordinates[node_id] + for node_id in element["nodes"] + if node_id in node_coordinates + ] + properties = {"id": element["id"]} + if "tags" in element: + properties.update(element["tags"]) + feature = { + "type": "Feature", + "properties": properties, + "geometry": { + "type": "Polygon", + "coordinates": [coordinates], + }, + } + features.append(json.dumps(feature, separators=(",", ":"))) + f.write(",\n".join(features)) + f.write("\n]}\n") + except (json.JSONDecodeError, requests.exceptions.RequestException) as e: + logger.error(f"Error downloading osm data for the specified coordinates") + + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers_dist import mock_snakemake, sets_path_to_root @@ -81,29 +152,37 @@ def convert_iso_to_geofk( countries = snakemake.config["countries"] country_list = country_list_to_geofk(countries) - eo.save_osm_data( - region_list=country_list, - primary_name="building", - feature_list=["ALL"], - update=False, - mp=False, - data_dir=store_path_data, - out_dir=store_path_resources, - out_format=["csv", "geojson"], - out_aggregate=True, - ) - - out_path = Path.joinpath(store_path_resources, "out") - out_formats = ["csv", "geojson"] - new_files = os.listdir(out_path) - - for f in out_formats: - new_file_name = Path.joinpath(store_path_resources, f"all_raw_building.{f}") - old_file = list(Path(out_path).glob(f"*building.{f}")) - - if not old_file: - with open(new_file_name, "w") as f: - pass - else: - logger.info(f"Move {old_file[0]} to {new_file_name}") - shutil.move(old_file[0], new_file_name) + if snakemake.config["enable"]["download_osm_method"] == "earth_osm": + eo.save_osm_data( + region_list=country_list, + primary_name="building", + feature_list=["ALL"], + update=False, + mp=False, + data_dir=store_path_data, + out_dir=store_path_resources, + out_format=["csv", "geojson"], + out_aggregate=True, + ) + + out_path = Path.joinpath(store_path_resources, "out") + out_formats = ["csv", "geojson"] + new_files = os.listdir(out_path) + + for f in out_formats: + new_file_name = Path.joinpath(store_path_resources, f"all_raw_building.{f}") + old_file = list(Path(out_path).glob(f"*building.{f}")) + + if not old_file: + with open(new_file_name, "w") as f: + pass + else: + logger.info(f"Move {old_file[0]} to {new_file_name}") + shutil.move(old_file[0], new_file_name) + + elif snakemake.config["enable"]["download_osm_method"] == "overpass": + microgrids_list = snakemake.config["microgrids_list"] + features = "building" + overpass_url = "https://overpass-api.de/api/interpreter" + output_file = Path.cwd() / "resources" / RDIR / "osm" / "raw" + retrieve_osm_data_geojson(microgrids_list, features, overpass_url, output_file) diff --git a/test/config.distribution.test.yaml b/test/config.distribution.test.yaml index 04cd4a0..c3c0116 100644 --- a/test/config.distribution.test.yaml +++ b/test/config.distribution.test.yaml @@ -25,9 +25,12 @@ enable: retrieve_databundle: true retrieve_cost_data: true download_osm_data: true - download_osm_buildings: false - # If "build_cutout" : true # requires cds API key https://cds.climate.copernicus.eu/api-how-to - # More information https://atlite.readthedocs.io/en/latest/introduction.html#datasets + download_osm_buildings: true + download_osm_method: overpass # or earth_osm + # If "build_cutout" : true # requires cds API key + # https://cds.climate.copernicus.eu/api-how-to + # More information + # https://atlite.readthedocs.io/en/latest/introduction.html#datasets build_cutout: false build_natura_raster: false # If True, then build_natura_raster can be run