From 47f97cf787f1816fe92a4255574b5cad8720579c Mon Sep 17 00:00:00 2001 From: Joonas Kolstela <43985139+Joonasko@users.noreply.github.com> Date: Fri, 26 Apr 2024 00:01:51 +0300 Subject: [PATCH] Delete wildfires_wise_jupyter_demo.ipynb --- wildfires_wise_jupyter_demo.ipynb | 892 ------------------------------ 1 file changed, 892 deletions(-) delete mode 100644 wildfires_wise_jupyter_demo.ipynb diff --git a/wildfires_wise_jupyter_demo.ipynb b/wildfires_wise_jupyter_demo.ipynb deleted file mode 100644 index e09e75c..0000000 --- a/wildfires_wise_jupyter_demo.ipynb +++ /dev/null @@ -1,892 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "dd697ec6-36d6-4378-8eff-e7ed9b70de85", - "metadata": { - "editable": true, - "slideshow": { - "slide_type": "" - }, - "tags": [] - }, - "source": [ - "# Use case wildfires: Application WISE\n", - "\n", - "## The Canadian Wildfire Intelligence and Simulation Engine (WISE)\n", - "\n", - "Wildfire simulation engine based on the Canadian Prometheus fire spread model.\n", - "Based on the Canadian Forest Fire Danger Rating System (CFFDRS) Fire Weather Index (FWI) and Fire Behaviour Prediction (FBP) systems.\n", - "Development is still ongoing, but the model is allready in operational use by e.g. the Government of the Northwest Territories. A version is also adapted at the Finnish Meteorological Institute.\n", - "\n", - "### Fuel information + topography + meteorological conditions = fire behaviour information\n", - "- FWI: Estimates the moisture of different fuel types.\n", - "- FBP: Estimates fire spread rate and type of fire in different fuel classes.\n", - "\n", - "Input variables consist of:\n", - "- Digital Elevation Model (DEM) (16 x 16 m)\n", - "- Fuel classification information (e.g. class 2 = spruce dominated boreal forest, 3 = pine dominated, 101 = non-burning) (16 x 16 m)\n", - "- Meteorological data at a hourly temporal resolution (nearest grid point to ignition working as a virtual weather station)\n", - "\n", - "Output variables consist of:\n", - "- Fire spread at an hourly temporal resolution\n", - "- Maximum flame length in each cell\n", - "- Maximum fire intensity in each cell\n", - "- Percent canopy burned in each cell\n", - "\n", - "# Fire spread calculations in the WISE system\n", - "\n", - "![fbp system](fbp_system.jpg)\n", - "\n", - "\n", - "![WISE propagation](wise_propagation.jpg)\n", - "\n", - "The Huygens' principle to simulate fire growth:\n", - "a - Model selects propagation points along the fire perimeter\n", - "\n", - "b - Fire propagation calculations are done using fuel, topography and weather data\n", - "\n", - "c - New fire perimeter is formed and the loop is repeated\n", - "\n", - "# Code\n", - "\n", - "![WISE workflow](wise_workflow.jpg)\n", - "\n", - "\n", - "# Running the model\n", - "\n", - "## Requested data from the GSV\n", - "\n", - "- Hourly temperature\n", - "- Dewpoint temperature\n", - "- Wind V & U components\n", - "- Precipitation\n", - "\n", - "## run_wildfires_wise.py\n", - "\n", - "- Setting run start and end dates\n", - "- Running the wise.sif container\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "a019431a-4385-43ed-8531-76a6f683ef63", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "running run_wildfires.py\n", - "Dates formatted, running wise container\n", - "launching WISE runs\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "INFO: underlay of /etc/localtime required more than 50 (114) bind mounts\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "running .fgmj modifier\n", - "fgmj file modified\n", - "fgmj file modified\n", - "fgmj file modified\n", - "modify_fgmj.py done\n", - "running ncdf_edits_multiarea.py\n", - "ncdf_edits_multiarea.py done, starting modify_fgmj.py\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Warning 1: Self-intersection at or near point 24.154789463039751 64.005224576456456\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "running run_wise.py\n", - "1990,06,06,1990,06,06\n", - "staring ncdf_edits_multiarea.py\n", - "launching WISE runs\n" - ] - }, - { - "data": { - "text/plain": [ - "CompletedProcess(args=['singularity', 'run', '--env', 'ALL_DATES=1990,06,06,1990,06,06', '--bind', '/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/wise_lumi_files:/testjobs', '--bind', '/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/wise_outputs:/testjobs/testjobs/area1/Outputs', '--bind', '/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/wise_outputs:/testjobs/testjobs/area2/Outputs', '--bind', '/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/wise_outputs:/testjobs/testjobs/area3/Outputs', '--bind', '/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/temp:/input_data', '/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/wise_tester.sif'], returncode=0)" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# import modules\n", - "print('running run_wildfires.py')\n", - "import sys\n", - "import argparse\n", - "import os\n", - "import subprocess\n", - "import csv\n", - "\n", - "# parser used in autosubmit workflow\n", - "\n", - "# creating the parser\n", - "#parser = argparse.ArgumentParser(description='Runscript for data notifier job.')\n", - "\n", - "# adding year, month, day and experiment id arguments\n", - "#parser.add_argument('-year_start', required=True, help='Input year start', default=1)\n", - "#parser.add_argument('-month_start', required=True, help='Input month start', default=2)\n", - "#parser.add_argument('-day_start', required=True, help='Input day start', default=3)\n", - "\n", - "#parser.add_argument('-year_end', required=True, help='Input year end', default=4)\n", - "#parser.add_argument('-month_end', required=True, help='Input month end', default=5)\n", - "#parser.add_argument('-day_end', required=True, help='Input day end', default=6)\n", - "\n", - "#parser.add_argument('-expid', required=True, help='experiment id', default=7)\n", - "\n", - "# parsing the arguments\n", - "#args = parser.parse_args()\n", - "\n", - "# combining all dates\n", - "#all_dates = ','.join([args.year_start, args.month_start, args.day_start, args.year_end, args.month_end, args.day_end])\n", - "\n", - "# placeholder values for manual runs\n", - "year_start = \"1990\"\n", - "year_end = \"1990\"\n", - "month_start = \"06\"\n", - "month_end = \"06\"\n", - "day_start = \"06\"\n", - "day_end = \"06\"\n", - "\n", - "# create combined variable from start and end dates\n", - "all_dates = ','.join([year_start,month_start,day_start,year_end,month_end,day_end])\n", - "\n", - "# creating a environment variable of the dates\n", - "os.environ['ALL_DATES'] = all_dates\n", - "\n", - "\n", - "print(\"Dates formatted, running wise container\")\n", - "#print(ALL_DATES)\n", - "# build the command for running the singularity container wise.sif\n", - "cmd = [\n", - " 'singularity',\n", - " 'run',\n", - " '--env', f'ALL_DATES={all_dates}',\n", - " '--bind', '/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/wise_lumi_files:/testjobs',\n", - " '--bind', '/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/wise_outputs:/testjobs/testjobs/area1/Outputs',\n", - " '--bind', '/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/wise_outputs:/testjobs/testjobs/area2/Outputs',\n", - " '--bind', '/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/wise_outputs:/testjobs/testjobs/area3/Outputs',\n", - " '--bind', '/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/temp:/input_data',\n", - " '/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/wise_tester.sif'\n", - "]\n", - "\n", - "# run the container wise.sif\n", - "print('launching WISE runs')\n", - "subprocess.run(cmd)" - ] - }, - { - "cell_type": "markdown", - "id": "e8474ba1-3083-4c9a-a698-ae7d4c0f8951", - "metadata": {}, - "source": [ - "### run_wise.py\n", - "\n", - "- Combining netcdf files and passing them to the data preprocessing script ncdf_edits_multiarea.py\n", - "- Running the WISE model for the three test areas" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "023131fc-f746-4c8f-a3cf-b4bd66b5d749", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "running run_wise.py\n", - "1990,06,06,1990,06,06\n" - ] - }, - { - "ename": "FileNotFoundError", - "evalue": "[Errno 2] No such file or directory: '/input_data/1990_06_06_T00_to_1990_06_06_T23_2t_hourly_mean.nc'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/xarray/backends/file_manager.py:211\u001b[0m, in \u001b[0;36mCachingFileManager._acquire_with_cache_info\u001b[0;34m(self, needs_lock)\u001b[0m\n\u001b[1;32m 210\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 211\u001b[0m file \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_cache\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_key\u001b[49m\u001b[43m]\u001b[49m\n\u001b[1;32m 212\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mKeyError\u001b[39;00m:\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/xarray/backends/lru_cache.py:56\u001b[0m, in \u001b[0;36mLRUCache.__getitem__\u001b[0;34m(self, key)\u001b[0m\n\u001b[1;32m 55\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_lock:\n\u001b[0;32m---> 56\u001b[0m value \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_cache\u001b[49m\u001b[43m[\u001b[49m\u001b[43mkey\u001b[49m\u001b[43m]\u001b[49m\n\u001b[1;32m 57\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_cache\u001b[38;5;241m.\u001b[39mmove_to_end(key)\n", - "\u001b[0;31mKeyError\u001b[0m: [, ('/input_data/1990_06_06_T00_to_1990_06_06_T23_2t_hourly_mean.nc',), 'r', (('clobber', True), ('diskless', False), ('format', 'NETCDF4'), ('persist', False)), 'f82bd796-bcaa-4319-88a8-a57b572c6a44']", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[0;31mFileNotFoundError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[2], line 37\u001b[0m\n\u001b[1;32m 34\u001b[0m precip_name \u001b[38;5;241m=\u001b[39m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00myear_start\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mmonth_start\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mday_start\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m_T00_to_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00myear_end\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mmonth_end\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mday_end\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m_T23_tp_hourly_mean.nc\u001b[39m\u001b[38;5;124m'\u001b[39m \u001b[38;5;66;03m# precipitation\u001b[39;00m\n\u001b[1;32m 36\u001b[0m \u001b[38;5;66;03m# read the netcdf files and take variables\u001b[39;00m\n\u001b[0;32m---> 37\u001b[0m temp_nc \u001b[38;5;241m=\u001b[39m \u001b[43mxr\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mopen_dataset\u001b[49m\u001b[43m(\u001b[49m\u001b[43min_path\u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[43mtemp_name\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 38\u001b[0m dewpoint_nc \u001b[38;5;241m=\u001b[39m xr\u001b[38;5;241m.\u001b[39mopen_dataset(in_path\u001b[38;5;241m+\u001b[39mdewpoint_name)\n\u001b[1;32m 39\u001b[0m windu_nc \u001b[38;5;241m=\u001b[39m xr\u001b[38;5;241m.\u001b[39mopen_dataset(in_path\u001b[38;5;241m+\u001b[39muwind_name)\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/xarray/backends/api.py:573\u001b[0m, in \u001b[0;36mopen_dataset\u001b[0;34m(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, inline_array, chunked_array_type, from_array_kwargs, backend_kwargs, **kwargs)\u001b[0m\n\u001b[1;32m 561\u001b[0m decoders \u001b[38;5;241m=\u001b[39m _resolve_decoders_kwargs(\n\u001b[1;32m 562\u001b[0m decode_cf,\n\u001b[1;32m 563\u001b[0m open_backend_dataset_parameters\u001b[38;5;241m=\u001b[39mbackend\u001b[38;5;241m.\u001b[39mopen_dataset_parameters,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 569\u001b[0m decode_coords\u001b[38;5;241m=\u001b[39mdecode_coords,\n\u001b[1;32m 570\u001b[0m )\n\u001b[1;32m 572\u001b[0m overwrite_encoded_chunks \u001b[38;5;241m=\u001b[39m kwargs\u001b[38;5;241m.\u001b[39mpop(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124moverwrite_encoded_chunks\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mNone\u001b[39;00m)\n\u001b[0;32m--> 573\u001b[0m backend_ds \u001b[38;5;241m=\u001b[39m \u001b[43mbackend\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mopen_dataset\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 574\u001b[0m \u001b[43m \u001b[49m\u001b[43mfilename_or_obj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 575\u001b[0m \u001b[43m \u001b[49m\u001b[43mdrop_variables\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdrop_variables\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 576\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mdecoders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 577\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 578\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 579\u001b[0m ds \u001b[38;5;241m=\u001b[39m _dataset_from_backend_dataset(\n\u001b[1;32m 580\u001b[0m backend_ds,\n\u001b[1;32m 581\u001b[0m filename_or_obj,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 591\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 592\u001b[0m )\n\u001b[1;32m 593\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ds\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:646\u001b[0m, in \u001b[0;36mNetCDF4BackendEntrypoint.open_dataset\u001b[0;34m(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, format, clobber, diskless, persist, lock, autoclose)\u001b[0m\n\u001b[1;32m 625\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mopen_dataset\u001b[39m( \u001b[38;5;66;03m# type: ignore[override] # allow LSP violation, not supporting **kwargs\u001b[39;00m\n\u001b[1;32m 626\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 627\u001b[0m filename_or_obj: \u001b[38;5;28mstr\u001b[39m \u001b[38;5;241m|\u001b[39m os\u001b[38;5;241m.\u001b[39mPathLike[Any] \u001b[38;5;241m|\u001b[39m BufferedIOBase \u001b[38;5;241m|\u001b[39m AbstractDataStore,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 643\u001b[0m autoclose\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[1;32m 644\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Dataset:\n\u001b[1;32m 645\u001b[0m filename_or_obj \u001b[38;5;241m=\u001b[39m _normalize_path(filename_or_obj)\n\u001b[0;32m--> 646\u001b[0m store \u001b[38;5;241m=\u001b[39m \u001b[43mNetCDF4DataStore\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mopen\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 647\u001b[0m \u001b[43m \u001b[49m\u001b[43mfilename_or_obj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 648\u001b[0m \u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 649\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mformat\u001b[39;49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mformat\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 650\u001b[0m \u001b[43m \u001b[49m\u001b[43mgroup\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 651\u001b[0m \u001b[43m \u001b[49m\u001b[43mclobber\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mclobber\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 652\u001b[0m \u001b[43m \u001b[49m\u001b[43mdiskless\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdiskless\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 653\u001b[0m \u001b[43m \u001b[49m\u001b[43mpersist\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpersist\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 654\u001b[0m \u001b[43m \u001b[49m\u001b[43mlock\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlock\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 655\u001b[0m \u001b[43m \u001b[49m\u001b[43mautoclose\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mautoclose\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 656\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 658\u001b[0m store_entrypoint \u001b[38;5;241m=\u001b[39m StoreBackendEntrypoint()\n\u001b[1;32m 659\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m close_on_error(store):\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:409\u001b[0m, in \u001b[0;36mNetCDF4DataStore.open\u001b[0;34m(cls, filename, mode, format, group, clobber, diskless, persist, lock, lock_maker, autoclose)\u001b[0m\n\u001b[1;32m 403\u001b[0m kwargs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mdict\u001b[39m(\n\u001b[1;32m 404\u001b[0m clobber\u001b[38;5;241m=\u001b[39mclobber, diskless\u001b[38;5;241m=\u001b[39mdiskless, persist\u001b[38;5;241m=\u001b[39mpersist, \u001b[38;5;28mformat\u001b[39m\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mformat\u001b[39m\n\u001b[1;32m 405\u001b[0m )\n\u001b[1;32m 406\u001b[0m manager \u001b[38;5;241m=\u001b[39m CachingFileManager(\n\u001b[1;32m 407\u001b[0m netCDF4\u001b[38;5;241m.\u001b[39mDataset, filename, mode\u001b[38;5;241m=\u001b[39mmode, kwargs\u001b[38;5;241m=\u001b[39mkwargs\n\u001b[1;32m 408\u001b[0m )\n\u001b[0;32m--> 409\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mcls\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mmanager\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgroup\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmode\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlock\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlock\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mautoclose\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mautoclose\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:356\u001b[0m, in \u001b[0;36mNetCDF4DataStore.__init__\u001b[0;34m(self, manager, group, mode, lock, autoclose)\u001b[0m\n\u001b[1;32m 354\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_group \u001b[38;5;241m=\u001b[39m group\n\u001b[1;32m 355\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_mode \u001b[38;5;241m=\u001b[39m mode\n\u001b[0;32m--> 356\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mformat \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mds\u001b[49m\u001b[38;5;241m.\u001b[39mdata_model\n\u001b[1;32m 357\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_filename \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mds\u001b[38;5;241m.\u001b[39mfilepath()\n\u001b[1;32m 358\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mis_remote \u001b[38;5;241m=\u001b[39m is_remote_uri(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_filename)\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:418\u001b[0m, in \u001b[0;36mNetCDF4DataStore.ds\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 416\u001b[0m \u001b[38;5;129m@property\u001b[39m\n\u001b[1;32m 417\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mds\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m--> 418\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_acquire\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:412\u001b[0m, in \u001b[0;36mNetCDF4DataStore._acquire\u001b[0;34m(self, needs_lock)\u001b[0m\n\u001b[1;32m 411\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_acquire\u001b[39m(\u001b[38;5;28mself\u001b[39m, needs_lock\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m):\n\u001b[0;32m--> 412\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_manager\u001b[38;5;241m.\u001b[39macquire_context(needs_lock) \u001b[38;5;28;01mas\u001b[39;00m root:\n\u001b[1;32m 413\u001b[0m ds \u001b[38;5;241m=\u001b[39m _nc4_require_group(root, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_group, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_mode)\n\u001b[1;32m 414\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ds\n", - "File \u001b[0;32m/usr/lib/python3.10/contextlib.py:135\u001b[0m, in \u001b[0;36m_GeneratorContextManager.__enter__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 133\u001b[0m \u001b[38;5;28;01mdel\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39margs, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mkwds, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfunc\n\u001b[1;32m 134\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 135\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mnext\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgen\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 136\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mStopIteration\u001b[39;00m:\n\u001b[1;32m 137\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mgenerator didn\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mt yield\u001b[39m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/xarray/backends/file_manager.py:199\u001b[0m, in \u001b[0;36mCachingFileManager.acquire_context\u001b[0;34m(self, needs_lock)\u001b[0m\n\u001b[1;32m 196\u001b[0m \u001b[38;5;129m@contextlib\u001b[39m\u001b[38;5;241m.\u001b[39mcontextmanager\n\u001b[1;32m 197\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21macquire_context\u001b[39m(\u001b[38;5;28mself\u001b[39m, needs_lock\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m):\n\u001b[1;32m 198\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Context manager for acquiring a file.\"\"\"\u001b[39;00m\n\u001b[0;32m--> 199\u001b[0m file, cached \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_acquire_with_cache_info\u001b[49m\u001b[43m(\u001b[49m\u001b[43mneeds_lock\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 200\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 201\u001b[0m \u001b[38;5;28;01myield\u001b[39;00m file\n", - "File \u001b[0;32m~/.local/lib/python3.10/site-packages/xarray/backends/file_manager.py:217\u001b[0m, in \u001b[0;36mCachingFileManager._acquire_with_cache_info\u001b[0;34m(self, needs_lock)\u001b[0m\n\u001b[1;32m 215\u001b[0m kwargs \u001b[38;5;241m=\u001b[39m kwargs\u001b[38;5;241m.\u001b[39mcopy()\n\u001b[1;32m 216\u001b[0m kwargs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmode\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_mode\n\u001b[0;32m--> 217\u001b[0m file \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_opener\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_args\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 218\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_mode \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mw\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m 219\u001b[0m \u001b[38;5;66;03m# ensure file doesn't get overridden when opened again\u001b[39;00m\n\u001b[1;32m 220\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_mode \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124ma\u001b[39m\u001b[38;5;124m\"\u001b[39m\n", - "File \u001b[0;32msrc/netCDF4/_netCDF4.pyx:2464\u001b[0m, in \u001b[0;36mnetCDF4._netCDF4.Dataset.__init__\u001b[0;34m()\u001b[0m\n", - "File \u001b[0;32msrc/netCDF4/_netCDF4.pyx:2027\u001b[0m, in \u001b[0;36mnetCDF4._netCDF4._ensure_nc_success\u001b[0;34m()\u001b[0m\n", - "\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: '/input_data/1990_06_06_T00_to_1990_06_06_T23_2t_hourly_mean.nc'" - ] - } - ], - "source": [ - "#!/usr/bin/python3\n", - "# import modules\n", - "print('running run_wise.py')\n", - "import sys\n", - "import argparse\n", - "import os\n", - "import xarray as xr\n", - "import subprocess\n", - "import csv\n", - "\n", - "# defining file input / output paths\n", - "in_path = '/input_data/'\n", - "out_path = '/input_data/'\n", - "\n", - "# reading the run dates file\n", - "#with open('/testjobs/run_dates.txt', 'r') as file:\n", - "# lines = file.read().splitlines()\n", - "\n", - "# using the environment variable to get run dates\n", - "dates_str = os.getenv('ALL_DATES')\n", - "print(dates_str)\n", - "if dates_str:\n", - " year_start, month_start, day_start, year_end, month_end, day_end = dates_str.split(',')\n", - "else:\n", - " print(\"Environment variable 'ALL_DATES' not found or is invalid.\")\n", - " sys.exit(1)\n", - " \n", - "\n", - "# Provide the data file name for all variables (weekly)\n", - "temp_name = f'{year_start}_{month_start}_{day_start}_T00_to_{year_end}_{month_end}_{day_end}_T23_2t_hourly_mean.nc' # temperature\n", - "dewpoint_name = f'{year_start}_{month_start}_{day_start}_T00_to_{year_end}_{month_end}_{day_end}_T23_2d_hourly_mean.nc' # dewpoint temperature\n", - "uwind_name = f'{year_start}_{month_start}_{day_start}_T00_to_{year_end}_{month_end}_{day_end}_T23_10u_hourly_mean.nc' # u wind\n", - "vwind_name = f'{year_start}_{month_start}_{day_start}_T00_to_{year_end}_{month_end}_{day_end}_T23_10v_hourly_mean.nc' # v wind\n", - "precip_name = f'{year_start}_{month_start}_{day_start}_T00_to_{year_end}_{month_end}_{day_end}_T23_tp_hourly_mean.nc' # precipitation\n", - "\n", - "# read the netcdf files and take variables\n", - "temp_nc = xr.open_dataset(in_path+temp_name)\n", - "dewpoint_nc = xr.open_dataset(in_path+dewpoint_name)\n", - "windu_nc = xr.open_dataset(in_path+uwind_name)\n", - "windv_nc = xr.open_dataset(in_path+vwind_name)\n", - "precip_nc = xr.open_dataset(in_path+precip_name)\n", - "\n", - "windu_var = windu_nc['10u']\n", - "windv_var = windv_nc['10v']\n", - "temp_var = temp_nc['2t']\n", - "dewpoint_var = dewpoint_nc['2d']\n", - "precip_var = precip_nc['tp']\n", - "\n", - "# combine all variables into singular file\n", - "combined_nc = xr.Dataset({\n", - " '10u': windu_var,\n", - " '10v': windv_var,\n", - " '2t': temp_var,\n", - " '2d': dewpoint_var,\n", - " 'tp': precip_var,\n", - "})\n", - "\n", - "file_name = out_path+'combined_ncdf.nc'\n", - "\n", - "# write the new netcdf file\n", - "combined_nc.to_netcdf(file_name)\n", - "\n", - "# current working dir\n", - "current_directory = os.getcwd()\n", - "\n", - "# get the group id\n", - "directory_stat = os.stat(current_directory)\n", - "\n", - "# get group ownership\n", - "group_owner_gid = directory_stat.st_gid\n", - "\n", - "parent_directory = os.path.dirname(file_name)\n", - "parent_gid = os.stat(parent_directory).st_gid\n", - "\n", - "# change group ownership\n", - "os.chown(file_name, -1, parent_gid)\n", - "\n", - "\n", - "# run the ncdf_edits_multiarea.py script\n", - "cmd = ['python3','/python_scripts/ncdf_edits_multiarea.py']\n", - "print('staring ncdf_edits_multiarea.py')\n", - "subprocess.run(cmd + [out_path+'combined_ncdf.nc'])\n", - "\n", - "# run the WISE model for the three test areas in Finland\n", - "print('launching WISE runs')\n", - "cmd = ['wise','-r', '4', '-f', '0', '-t', '/testjobs/testjobs/area1/job.fgmj']\n", - "subprocess.run(cmd)\n", - "cmd = ['wise','-r', '4', '-f', '0', '-t', '/testjobs/testjobs/area2/job.fgmj']\n", - "subprocess.run(cmd)\n", - "cmd = ['wise','-r', '4', '-f', '0', '-t', '/testjobs/testjobs/area3/job.fgmj']\n", - "subprocess.run(cmd)\n" - ] - }, - { - "cell_type": "markdown", - "id": "819b3130-25a1-4836-9d2b-57ce7ace277b", - "metadata": {}, - "source": [ - "### ncdf_edits_multiarea.py\n", - "\n", - "- Weather data preprocessing (unit changes, relative humidity and wind speed and direction calculations) and weather.txt file creation for model runs\n", - "- running the modify_fgmj.py script" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "780b27ba-8392-4ddd-82eb-91602cb34f2d", - "metadata": { - "editable": true, - "slideshow": { - "slide_type": "" - }, - "tags": [] - }, - "outputs": [], - "source": [ - "#!/usr/bin/python3\n", - "# import modules\n", - "print('running ncdf_edits_multiarea.py')\n", - "import os\n", - "import argparse\n", - "import numpy as np\n", - "import xarray as xr\n", - "import pandas as pd\n", - "import subprocess\n", - "import sys\n", - "from datetime import datetime\n", - "\n", - "# load netcdf dataset\n", - "dataset = xr.open_dataset('/input_data/combined_ncdf.nc')\n", - "\n", - "# calculate wind speed and direction from 10u and 10v components\n", - "wind_speed = np.sqrt(dataset['10u']**2 + dataset['10v']**2)\n", - "dataset['wind_speed'] = wind_speed\n", - "\n", - "wind_direction_rad = np.arctan2(dataset['10v'],dataset['10u'])\n", - "wind_direction_deg = np.degrees(wind_direction_rad)\n", - "wind_direction_deg = (wind_direction_deg + 360) % 360\n", - "dataset['wind_direction'] = wind_direction_deg\n", - "\n", - "# calculate relative humidity and convert temperatures to Celsius\n", - "temperature_celsius = dataset['2t'] - 273.15 # Convert from Kelvin to Celsius\n", - "dewpoint_celsius = dataset['2d'] - 273.15 # Convert from Kelvin to Celsius\n", - "relative_humidity = 100 * (np.exp((17.625 * dewpoint_celsius) / (243.04 + dewpoint_celsius)) / np.exp((17.625 * temperature_celsius) / (243.04 + temperature_celsius)))\n", - "\n", - "dataset['relative_humidity'] = relative_humidity\n", - "dataset['temperature'] = temperature_celsius\n", - "\n", - "# set the ignition coordinates for the three test areas\n", - "area1_lat = 64.007044\n", - "area1_lon = 24.152986\n", - "\n", - "area2_lat = 63.050609\n", - "area2_lon = 29.889436\n", - "\n", - "area3_lat = 63.433700\n", - "area3_lon = 30.540338\n", - "\n", - "# select only closest cell from netcdf to each ignition location\n", - "nearest_cell1 = dataset.sel(lat=area1_lat,lon=area1_lon,method='nearest')\n", - "nearest_cell2 = dataset.sel(lat=area2_lat,lon=area2_lon,method='nearest')\n", - "nearest_cell3 = dataset.sel(lat=area3_lat,lon=area3_lon,method='nearest')\n", - "\n", - "df1 = nearest_cell1.to_dataframe()\n", - "df2 = nearest_cell2.to_dataframe()\n", - "df3 = nearest_cell3.to_dataframe()\n", - "\n", - "# make required dataframe edits\n", - "df1.reset_index(inplace=True)\n", - "df1.set_index('time',inplace=True)\n", - "df2.reset_index(inplace=True)\n", - "df2.set_index('time',inplace=True)\n", - "df3.reset_index(inplace=True)\n", - "df3.set_index('time',inplace=True)\n", - "\n", - "df1['date'] = df1.index.date\n", - "df1['hour'] = df1.index.time\n", - "df2['date'] = df2.index.date\n", - "df2['hour'] = df2.index.time\n", - "df3['date'] = df3.index.date\n", - "df3['hour'] = df3.index.time\n", - "\n", - "# remove unused variables\n", - "variables_to_drop = ['10v','10u','2t','2d']\n", - "df1 = df1.drop(variables_to_drop, axis = 1)\n", - "df2 = df2.drop(variables_to_drop, axis = 1)\n", - "df3 = df3.drop(variables_to_drop, axis = 1)\n", - "\n", - "# create datetime series for scenario start and end times (start at each day 10:00 and end same day 21:00)\n", - "combined_datetime_series = pd.to_datetime(df1.index.date) + pd.to_timedelta([time.hour for time in df1.index], unit='h')\n", - "combined_datetime_series = pd.Series(combined_datetime_series)\n", - "\n", - "# reset the index to default integer index\n", - "combined_datetime_series = combined_datetime_series.reset_index(drop=True)\n", - "#print(combined_datetime_series)\n", - "# select scenario start and end dates\n", - "scenario_start = str(combined_datetime_series.iloc[1])\n", - "scenario_end = str(combined_datetime_series.iloc[-2])\n", - "scenario_start = scenario_start.replace(' ','T')\n", - "scenario_end = scenario_end.replace(' ','T')\n", - "scenario_start = scenario_start+':00'\n", - "scenario_end = scenario_end+':00'\n", - "\n", - "dates_at_10 = combined_datetime_series[combined_datetime_series.apply(lambda x: x.time() == pd.to_datetime('10:00:00').time())]\n", - "dates_at_21 = combined_datetime_series[combined_datetime_series.apply(lambda x: x.time() == pd.to_datetime('21:00:00').time())]\n", - "\n", - "# select the last three dates for model run\n", - "dates_at_10 = str(dates_at_10.iloc[0])\n", - "dates_at_10 = dates_at_10.replace(' ','T')\n", - "dates_at_21 = str(dates_at_21.iloc[-1])\n", - "dates_at_21 = dates_at_21.replace(' ','T')\n", - "dates_at_10 = dates_at_10+':00'\n", - "dates_at_21 = dates_at_21+':00'\n", - "\n", - "df1.reset_index(inplace=True)\n", - "df2.reset_index(inplace=True)\n", - "df3.reset_index(inplace=True)\n", - "\n", - "# set column order\n", - "new_column_order = ['date', 'hour', 'temperature', 'relative_humidity', 'wind_direction', 'wind_speed', 'tp']\n", - "df1 = df1[new_column_order]\n", - "df2 = df2[new_column_order]\n", - "df3 = df3[new_column_order]\n", - "\n", - "# Rename the columns\n", - "df1.rename(columns={\n", - " 'date': 'HOURLY',\n", - " 'hour': 'HOUR',\n", - " 'temperature': 'TEMP',\n", - " 'relative_humidity': 'RH',\n", - " 'wind_direction': 'WD',\n", - " 'wind_speed': 'WS',\n", - " 'tp': 'PRECIP',\n", - "}, inplace=True)\n", - "\n", - "df2.rename(columns={\n", - " 'date': 'HOURLY',\n", - " 'hour': 'HOUR',\n", - " 'temperature': 'TEMP',\n", - " 'relative_humidity': 'RH',\n", - " 'wind_direction': 'WD',\n", - " 'wind_speed': 'WS',\n", - " 'tp': 'PRECIP',\n", - "}, inplace=True)\n", - "\n", - "df3.rename(columns={\n", - " 'date': 'HOURLY',\n", - " 'hour': 'HOUR',\n", - " 'temperature': 'TEMP',\n", - " 'relative_humidity': 'RH',\n", - " 'wind_direction': 'WD',\n", - " 'wind_speed': 'WS',\n", - " 'tp': 'PRECIP',\n", - "}, inplace=True)\n", - "\n", - "# convert 'date' to datetime format\n", - "df1['HOURLY'] = pd.to_datetime(df1['HOURLY'], format='%d/%m/%Y')\n", - "df2['HOURLY'] = pd.to_datetime(df2['HOURLY'], format='%d/%m/%Y')\n", - "df3['HOURLY'] = pd.to_datetime(df3['HOURLY'], format='%d/%m/%Y')\n", - "\n", - "# convert 'hour' to integers\n", - "df1['HOUR'] = df1['HOUR'].apply(lambda x: x.hour).astype(int)\n", - "df2['HOUR'] = df2['HOUR'].apply(lambda x: x.hour).astype(int)\n", - "df3['HOUR'] = df3['HOUR'].apply(lambda x: x.hour).astype(int)\n", - "\n", - "# round all values to one decimal place\n", - "df1 = df1.round(1)\n", - "df2 = df2.round(1)\n", - "df3 = df3.round(1)\n", - "\n", - "# format the 'date' column as 'dd/mm/yyyy'\n", - "df1['HOURLY'] = df1['HOURLY'].dt.strftime('%d/%m/%Y')\n", - "df2['HOURLY'] = df2['HOURLY'].dt.strftime('%d/%m/%Y')\n", - "df3['HOURLY'] = df3['HOURLY'].dt.strftime('%d/%m/%Y')\n", - "\n", - "# save the new .txt format weather files to their designated job folders for WISE runs\n", - "file_path = '/testjobs/testjobs/'\n", - "file_name1 = f'{file_path}area1/Inputs/weather.txt'\n", - "file_name2 = f'{file_path}area2/Inputs/weather.txt'\n", - "file_name3 = f'{file_path}area3/Inputs/weather.txt'\n", - "df1.to_csv((file_name1), sep =',', index =False)\n", - "df2.to_csv((file_name2), sep =',', index =False)\n", - "df3.to_csv((file_name3), sep =',', index =False)\n", - "\n", - "# current working dir\n", - "current_directory = os.getcwd()\n", - "\n", - "# get the group id\n", - "directory_stat = os.stat(current_directory)\n", - "\n", - "# get group ownership\n", - "group_owner_gid = directory_stat.st_gid\n", - "\n", - "parent_directory = os.path.dirname(file_name1)\n", - "parent_gid = os.stat(parent_directory).st_gid\n", - "\n", - "# change group ownership\n", - "os.chown(file_name1, -1, parent_gid)\n", - "os.chown(file_name2, -1, parent_gid)\n", - "os.chown(file_name3, -1, parent_gid)\n", - "\n", - "\n", - "# run the modify_fgmj.py script\n", - "cmd = ['python3','/python_scripts/modify_fgmj.py']\n", - "arguments = [str(scenario_start),str(scenario_end),str(dates_at_10),str(dates_at_21),str(area1_lat),str(area1_lon),str(area2_lat),str(area2_lon),str(area3_lat),str(area3_lon)]\n", - "print('ncdf_edits_multiarea.py done, starting modify_fgmj.py')\n", - "subprocess.run(cmd + arguments)\n" - ] - }, - { - "cell_type": "markdown", - "id": "feef3ae2-6619-49a2-990d-7e3f9ffd411e", - "metadata": {}, - "source": [ - "## modify_fgmj.py\n", - "\n", - "- Defining necessary settings for each model run (ignition locations and times, file locations, used fuel types, requested output files)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e688c691-4563-40f4-9934-b7e66ea7bd88", - "metadata": {}, - "outputs": [], - "source": [ - "#!/usr/bin/python3\n", - "# import modules\n", - "print('running .fgmj modifier')\n", - "import os\n", - "import sys\n", - "import json\n", - "from datetime import datetime\n", - "\n", - "# take the time and ignition lat lon variables\n", - "scenario_start = sys.argv[1]\n", - "scenario_end = sys.argv[2]\n", - "ignition_start = sys.argv[3]\n", - "ignition_end = sys.argv[4]\n", - "ignition_y_1 = float(sys.argv[5])\n", - "ignition_x_1 = float(sys.argv[6])\n", - "ignition_y_2 = float(sys.argv[7])\n", - "ignition_x_2 = float(sys.argv[8])\n", - "ignition_y_3 = float(sys.argv[9])\n", - "ignition_x_3 = float(sys.argv[10])\n", - "\n", - "# set scenario names\n", - "scen_name_1 = 'scen_kalajoki'\n", - "scen_name_2 = 'scen_koli'\n", - "scen_name_3 = 'scen_lieksa'\n", - "\n", - "# set input fgmj path and read the fgmj files\n", - "fgmj_path = '/testjobs/testjobs/job.fgmj'\n", - "\n", - "\n", - "with open(fgmj_path, 'r') as f:\n", - " fgmj_data1 = json.load(f)\n", - "\n", - "with open(fgmj_path, 'r') as f:\n", - " fgmj_data2 = json.load(f)\n", - "\n", - "with open(fgmj_path, 'r') as f:\n", - " fgmj_data3 = json.load(f)\n", - "\n", - "# set variables\n", - "scenario_start = ignition_start\n", - "scenario_end = scenario_end\n", - "local_start_time = ignition_start\n", - "start_time = ignition_start\n", - "end_time = scenario_end\n", - "ignition_start = ignition_start\n", - "output_time = scenario_end\n", - "\n", - "# function for replacing values in dictionary\n", - "def replace_in_dict(data, find, replace):\n", - " if isinstance(data, dict):\n", - " for key, value in data.items():\n", - " if isinstance(value, (dict, list)):\n", - " replace_in_dict(value, find, replace)\n", - " elif isinstance(value, str):\n", - " data[key] = value.replace(find, replace)\n", - "\n", - " elif isinstance(data, list):\n", - " for index, value in enumerate(data):\n", - " if isinstance(value, (dict, list)):\n", - " replace_in_dict(value, find, replace)\n", - " elif isinstance(value, str):\n", - " data[index] = value.replace(find, replace)\n", - "\n", - "# function for editing the job.fgmj files\n", - "def create_job(data_in, job_name, scen_name, ign_lon, ign_lat):\n", - "\n", - " data_in['project']['scenarios']['scenarioData'][0]['startTime']['time'] = scenario_start\n", - "\n", - " data_in['project']['scenarios']['scenarioData'][0]['endTime']['time'] = scenario_end\n", - "\n", - " data_in['project']['scenarios']['scenarioData'][0]['temporalConditions']['daily'][0]['localStartTime']['time'] = local_start_time\n", - "\n", - " data_in['project']['scenarios']['scenarioData'][0]['temporalConditions']['daily'][0]['startTime']['time'] = start_time\n", - "\n", - " data_in['project']['scenarios']['scenarioData'][0]['temporalConditions']['daily'][0]['endTime']['time'] = end_time\n", - "\n", - " data_in['project']['ignitions']['ignitionData'][0]['startTime']['time'] = ignition_start\n", - "\n", - " data_in['project']['ignitions']['ignitionData'][0]['ignitions']['ignitions'][0]['polygon']['polygon']['points'][0]['x']['value'] = ign_lon\n", - "\n", - " data_in['project']['ignitions']['ignitionData'][0]['ignitions']['ignitions'][0]['polygon']['polygon']['points'][0]['y']['value'] = ign_lat\n", - "\n", - " data_in['project']['outputs']['grids'][0]['exportTime']['time'] = output_time\n", - "\n", - " data_in['project']['outputs']['grids'][1]['exportTime']['time'] = output_time\n", - "\n", - " data_in['project']['outputs']['grids'][2]['exportTime']['time'] = output_time\n", - "\n", - " data_in['project']['outputs']['grids'][3]['exportTime']['time'] = output_time\n", - "\n", - " data_in['project']['outputs']['grids'][4]['exportTime']['time'] = output_time\n", - "\n", - " data_in['project']['outputs']['grids'][5]['exportTime']['time'] = output_time\n", - " \n", - " data_in['project']['outputs']['grids'][6]['exportTime']['time'] = output_time\n", - "\n", - " data_in['project']['outputs']['vectors'][0]['perimeterTime']['startTime']['time'] = ignition_start\n", - "\n", - " data_in['project']['outputs']['vectors'][0]['perimeterTime']['endTime']['time'] = output_time\n", - "\n", - " data_in['project']['stations']['wxStationData'][0]['streams'][0]['condition']['startTime']['time'] = scenario_start\n", - "\n", - " replace_in_dict(data_in, 'scen0', scen_name+'_'+ignition_start[0:10])\n", - "\n", - " with open(job_name, 'w') as f:\n", - " json.dump(data_in, f, indent=2)\n", - " print('fgmj file modified')\n", - "\n", - "# current date for filename\n", - "current_datetime = datetime.now()\n", - "formatted_datetime = current_datetime.strftime(\"%Y-%m-%d_%H:%M\")\n", - "\n", - "scen_name_1 = scen_name_1 + \"_\" + str(formatted_datetime)\n", - "scen_name_2 = scen_name_2 + \"_\" + str(formatted_datetime)\n", - "scen_name_3 = scen_name_3 + \"_\" + str(formatted_datetime)\n", - "\n", - "\n", - "# edit the job.fgmj files and save them in repective directories\n", - "file_name1 = '/testjobs/testjobs/area1/job.fgmj'\n", - "file_name2 = '/testjobs/testjobs/area2/job.fgmj'\n", - "file_name3 = '/testjobs/testjobs/area3/job.fgmj'\n", - "create_job(fgmj_data1,file_name1,scen_name_1,ignition_x_1,ignition_y_1)\n", - "create_job(fgmj_data2,file_name2,scen_name_2,ignition_x_2,ignition_y_2)\n", - "create_job(fgmj_data3,file_name3,scen_name_3,ignition_x_3,ignition_y_3)\n", - "\n", - "# current working dir\n", - "current_directory = os.getcwd()\n", - "\n", - "# get the group id\n", - "directory_stat = os.stat(current_directory)\n", - "\n", - "# get group ownership\n", - "group_owner_gid = directory_stat.st_gid\n", - "\n", - "parent_directory = os.path.dirname(file_name1)\n", - "parent_gid = os.stat(parent_directory).st_gid\n", - "\n", - "# change group ownership\n", - "os.chown(file_name1, -1, parent_gid)\n", - "os.chown(file_name2, -1, parent_gid)\n", - "os.chown(file_name3, -1, parent_gid)\n", - "\n", - "\n", - "print('modify_fgmj.py done')" - ] - }, - { - "cell_type": "markdown", - "id": "9327e16f-5197-439a-8b1a-8d0489d59202", - "metadata": {}, - "source": [ - "# Results\n", - "\n", - "- Hourly fire propagation vector files (.kml)\n", - "- Maximum flame lenght (m), maximum fire intensity (kw), maximum crown fraction burned (%) in each cell (16 x 16 m resolution) raster files (.tif)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f455f8ae-08ec-4cbf-a219-20373990326f", - "metadata": {}, - "outputs": [], - "source": [ - "import geopandas as gpd\n", - "import rasterio\n", - "from rasterio.plot import show\n", - "import matplotlib.pyplot as plt\n", - "from matplotlib.colors import LinearSegmentedColormap\n", - "import numpy as np\n", - "\n", - "file_path = \"/mnt/d/DESTINE_CATS/wildfire_wise_demo/wise_testset/wise_testset/wise_outputs/scen_kalajoki_2024-04-25_1539_1990-06-06/\"\n", - "# Function to read and plot a KML file\n", - "def plot_vector(filepath):\n", - " # Read the vector file\n", - " gdf = gpd.read_file(filepath, driver='KML')\n", - " # Plot the vector file\n", - " gdf.plot(color='blue', edgecolor='black')\n", - " plt.title('Fire Perimeter from KML')\n", - " plt.xlabel('Longitude')\n", - " plt.ylabel('Latitude')\n", - " plt.show()\n", - "\n", - "def plot_raster(filepath,plot_title):\n", - " with rasterio.open(filepath) as src:\n", - " data = src.read(1) # Read the first band into a 2D array\n", - "\n", - " \n", - " # Use the 'no data' value from the raster's metadata\n", - " nodata = src.nodata\n", - " \n", - " # Find indices where the data is not 'no data'\n", - " if nodata is not None:\n", - " mask = data != nodata\n", - " else:\n", - " mask = np.isfinite(data) # Assuming 'no data' might be NaNs if not specified\n", - "\n", - " rows, cols = np.where(mask)\n", - " if not rows.size or not cols.size:\n", - " print(\"No valid data cells found.\")\n", - " return\n", - " \n", - " # Determine the extents of the valid data\n", - " row_min, row_max = rows.min(), rows.max()\n", - " col_min, col_max = cols.min(), cols.max()\n", - "\n", - " # Crop the data array\n", - " data_cropped = data[row_min:row_max+1, col_min:col_max+1]\n", - "\n", - "\n", - " # Replace -9999 or other 'no data' values with NaN\n", - " nodata_value = -9999 # Or use src.nodata if it's correctly set\n", - " data_cropped[data_cropped == nodata_value] = np.nan\n", - " \n", - " # Update the transform for the cropped data\n", - " transform_cropped = src.transform * rasterio.Affine.translation(col_min, row_min)\n", - "\n", - " # Create a color map\n", - " cmap = LinearSegmentedColormap.from_list(\"mycmap\", [\"white\", \"red\"])\n", - " # Plotting using imshow to ensure compatibility with colorbar\n", - " fig, ax = plt.subplots()\n", - " cax = ax.imshow(data_cropped, cmap=\"viridis\", extent=rasterio.plot.plotting_extent(src, transform_cropped))\n", - " ax.set_title(plot_title)\n", - " ax.set_xlabel('Pixel X Coordinate')\n", - " ax.set_ylabel('Pixel Y Coordinate')\n", - " plt.colorbar(cax, label='Fuel Consumption (units)')\n", - " plt.show()\n", - "\n", - "# Call the functions to plot the files\n", - "plot_vector(file_path+'perim.kml')\n", - "plot_raster(file_path+'crown_fuel_consumed.tif',\"Crown fuel consumed\")\n", - "plot_raster(file_path+'max_flame_length.tif', \"Maximum flame length\")\n", - "plot_raster(file_path+'max_intensity.tif',\"Maximum fire intensity\")\n", - "\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "id": "1cce05df-c730-4a43-aaaa-313c8534dd65", - "metadata": {}, - "source": [ - "# Test result from Western Finland Koli" - ] - }, - { - "cell_type": "markdown", - "id": "0a94df14-f2cd-47c9-a188-2be86450c25c", - "metadata": {}, - "source": [ - "![WISE koli example](wise.jpg)" - ] - }, - { - "cell_type": "markdown", - "id": "d44f59e9-ed86-46ef-b2a0-48e096364fe6", - "metadata": {}, - "source": [ - "# Test result from daily fire spread simulations over 1.6.2000 - 31.8.2000 in the Kalajoki test area in Western Finland." - ] - }, - { - "cell_type": "markdown", - "id": "a299e368-058d-41aa-b53b-354acba83ec4", - "metadata": {}, - "source": [ - "![WISE workflow](kalajoki_demo_coarse.jpg)" - ] - }, - { - "cell_type": "markdown", - "id": "f293091d-7bf3-4d3e-8b42-86f51819f896", - "metadata": {}, - "source": [ - "# Phase 2 goals:\n", - "- Users can bring their own fuel and topography information. Possibility to add fire breaks.\n", - "- Better capabilities of modelling areal fire risk with e.g. randomized ignition locations, different climate and/or land use scenarios" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "80275364-9137-474c-92a1-a9dcb97c16ca", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.12" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -}