diff --git a/.zenodo.json b/.zenodo.json index c6a731981f..89a81326cb 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -380,6 +380,11 @@ "affiliation": "DLR, Germany", "name": "Bonnet, Pauline", "orcid": "0000-0003-3780-0784" + }, + { + "affiliation": "MetOffice, UK", + "name": "Munday, Gregory", + "orcid": "0000-0003-4750-9923" } ], "description": "ESMValTool: A community diagnostic and performance metrics tool for routine evaluation of Earth system models in CMIP.", diff --git a/CITATION.cff b/CITATION.cff index cd621538b7..7ed624d1d7 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -391,6 +391,11 @@ authors: family-names: Bonnet given-names: Pauline orcid: "https://orcid.org/0000-0003-3780-0784" + - + affiliation: "MetOffice, UK" + family-names: Munday + given-names: Gregory + orcid: "https://orcid.org/0000-0003-4750-9923" cff-version: 1.2.0 date-released: 2023-12-20 diff --git a/doc/sphinx/source/recipes/figures/climate_patterns/patterns.png b/doc/sphinx/source/recipes/figures/climate_patterns/patterns.png new file mode 100644 index 0000000000..396fd78830 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/climate_patterns/patterns.png differ diff --git a/doc/sphinx/source/recipes/index.rst b/doc/sphinx/source/recipes/index.rst index 0f0ce7667d..e18ada0fd7 100644 --- a/doc/sphinx/source/recipes/index.rst +++ b/doc/sphinx/source/recipes/index.rst @@ -32,6 +32,7 @@ Atmosphere :maxdepth: 1 recipe_miles + recipe_climate_patterns recipe_clouds recipe_cmug_h2o recipe_crem diff --git a/doc/sphinx/source/recipes/recipe_climate_patterns.rst b/doc/sphinx/source/recipes/recipe_climate_patterns.rst new file mode 100644 index 0000000000..f7336c91c4 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_climate_patterns.rst @@ -0,0 +1,107 @@ +.. _recipes_climate_patterns: + +Generating Climate Patterns from CMIP6 Models +============================================= + +Overview +-------- + +The recipe recipe_climate_patterns generates climate patterns from CMIP6 model +datasets. + +.. note:: + The regrid setting in the recipe is set to a 2.5x3.75 grid. This is done to + match the current resolution in the IMOGEN-JULES model, but can be + adjusted with no issues for a finer/coarser patterns grid. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + +* recipe_climate_patterns.yml + +Diagnostics are stored in esmvaltool/diag_scripts/climate_patterns/ + +* climate_patterns.py: generates climate patterns from input datasets +* sub_functions.py: set of sub functions to assist with driving scripts +* plotting.py: contains all plotting functions for driving scripts + + +User settings in recipe +----------------------- + +#. Script climate_patterns.py + + *Required settings for script* + + None + + *Optional settings for script* + + * jules_mode: output jules-specific var names + .nc files + * parallelise: parallelise over models or not + * area: calculate the patterns globally, or over land only + + *Required settings for variables* + + * short_name + * additional_datasets + + *Optional settings for variables* + + None + + *Required settings for preprocessor* + + * monthly_statistics: converts data to mean monthly data + + *Optional settings for preprocessor* + + * regrid: regrids data + + +Variables +--------- + +#. Script climate_patterns.py + +* tasmax (atmos, monthly, longitude latitude time) +* tasmin (atmos, monthly, longitude latitude time) +* tas (atmos, monthly, longitude latitude time) +* huss (atmos, monthly, longitude latitude time) +* pr (atmos, monthly, longitude latitude time) +* sfcWind (atmos, monthly, longitude latitude time) +* ps (atmos, monthly, longitude latitude time) +* rsds (atmos, monthly, longitude latitude time) +* rlds (atmos, monthly, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +None + +References +---------- + +* Huntingford, C., Cox, P. An analogue model to derive additional climate + change scenarios from existing GCM simulations. + Climate Dynamics 16, 575–586 (2000). https://doi.org/10.1007/s003820000067 + +* Mathison, C. T. et al. A rapid application emissions-to-impacts tool + for scenario assessment: Probabilistic Regional Impacts from Model patterns + and Emissions (PRIME). + EGUsphere [preprint], (2024). https://doi.org/10.5194/egusphere-2023-2932 + +Example plots +------------- + +.. _fig_climate_patterns_2: +.. figure:: /recipes/figures/climate_patterns/patterns.png + :align: center + :width: 80% + + Patterns generated for CMIP6 models, gridded view. Patterns are shown per + variable, for the month of January. \ No newline at end of file diff --git a/esmvaltool/config-references.yml b/esmvaltool/config-references.yml index b5f43bc911..199dc671e0 100644 --- a/esmvaltool/config-references.yml +++ b/esmvaltool/config-references.yml @@ -404,6 +404,11 @@ authors: institute: BSC, Spain orcid: github: emchamarro + munday_gregory: + name: Munday, Gregory + institute: MetOffice, UK + orcid: https://orcid.org/0000-0003-4750-9923 + github: mo-gregmunday nikulin_grigory: name: Nikulin, Grigory institute: SMHI, Sweden diff --git a/esmvaltool/diag_scripts/climate_patterns/climate_patterns.py b/esmvaltool/diag_scripts/climate_patterns/climate_patterns.py new file mode 100644 index 0000000000..7fdb98a293 --- /dev/null +++ b/esmvaltool/diag_scripts/climate_patterns/climate_patterns.py @@ -0,0 +1,658 @@ +# (C) Crown Copyright 2022-2024, Met Office. +"""Diagnostic script to build climate patterns from CMIP6 models. + +Description +----------- +Builds patterns, anomaly and climatology cubes from CMIP6 models. +This diagnostic needs preprocessed mean monthly cubes, with no +gridding requirements. Default re-grid specification exists to +decrease CPU-load and run-time. + +Author +------ +Gregory Munday (Met Office, UK) + + +Configuration options in recipe +------------------------------- +jules_mode: bool, optional (default: false) + options: true, false + def: outputs extra data (anomaly, climatology) per variable + to drive JULES-IMOGEN configuration +parallelise: bool, optional (default: false) + options: true, false + def: parallelises code to run N models at once +area: str, optional (default: global) + options: global, land + def: area over which to calculate climate patterns +""" + +import logging +from pathlib import Path +import os + +import iris +import iris.coord_categorisation +import iris.cube +import numpy as np +import sklearn.linear_model + +import sub_functions as sf +from plotting import ( + plot_timeseries, + plot_patterns +) +from esmvalcore.preprocessor import ( + area_statistics, + extract_time, + climate_statistics +) +from esmvaltool.diag_scripts.shared import run_diagnostic + +logger = logging.getLogger(Path(__file__).stem) + + +def calculate_climatology(cube, syr=1850, eyr=1889): + """Handle aggregation to make climatology. + + Parameters + ---------- + cube : cube + cube loaded from config dictionary + syr : int + set climatology start year + eyr : int + set climatology end year + + Returns + ------- + cube_aggregated : cube + 40 year climatology cube from syr-eyr (default 1850-1889) + """ + cube_40yr = extract_time( + cube, + start_year=syr, + start_month=1, + start_day=1, + end_year=eyr, + end_month=12, + end_day=31 + ) + cube_aggregated = climate_statistics(cube_40yr, 'mean', 'month') + + return cube_aggregated + + +def diurnal_temp_range(cubelist): + """Calculate diurnal range from monthly max and min temperatures. + + Parameters + ---------- + cubelist : cubelist + cubelist of tasmin and tasmax + + Returns + ------- + range_cube : cube + cube of calculated diurnal range + """ + range_cube = cubelist[0] - cubelist[1] + + # check in case cubes are wrong way around + if np.mean(range_cube.data) < 0: + range_cube = -range_cube + + range_cube.rename("Diurnal Range") + range_cube.var_name = "range_tl1" + + return range_cube + + +def calculate_diurnal_range(clim_list, ts_list): + """Facilitate diurnal range calculation and appending. + + Parameters + ---------- + clim_list : cubelist + cubelist of climatology cubes + ts_list : cubelist + cubelist of standard timeseries cubes + + Returns + ------- + clim_list_final : cubelist + cubelist of climatology cubes including diurnal range + ts_list_final : cubelist + cubelist of standard timeseries cubes including diurnal range + """ + temp_range_list_clim = iris.cube.CubeList([]) + temp_range_list_ts = iris.cube.CubeList([]) + comb_list = [clim_list, ts_list] + + for cube_list in comb_list: + for cube in cube_list: + if (cube.var_name in ("tasmax", "tasmin")) and cube in clim_list: + temp_range_list_clim.append(cube) + elif (cube.var_name in ("tasmax", "tasmin")) and cube in ts_list: + temp_range_list_ts.append(cube) + else: + pass + + derived_diurnal_clim = diurnal_temp_range(temp_range_list_clim) + derived_diurnal_ts = diurnal_temp_range(temp_range_list_ts) + + # append diurnal range to lists + clim_list_final, ts_list_final = append_diurnal_range( + derived_diurnal_clim, derived_diurnal_ts, clim_list, ts_list + ) + + return clim_list_final, ts_list_final + + +def append_diurnal_range(derived_diurnal_clim, + derived_diurnal_ts, + clim_list, + ts_list): + """Append diurnal range to cubelists. + + Parameters + ---------- + derived_diurnal_clim : cube + derived diurnal climatology cube + derived_diurnal_ts : cube + derived diurnal timeseries cube + clim_list : cubelist + existing climatology cubelist, no range + ts_list : cubelist + existing timeseries cubelist, no range + + Returns + ------- + clim_list_final : cubelist + cubelist of climatology cubes including diurnal range + ts_list_final : cubelist + cubelist of standard timeseries cubes including diurnal range + """ + # creating cube list without tasmax or tasmin + # (since we just wanted the diurnal range) + clim_list_final = iris.cube.CubeList([]) + ts_list_final = iris.cube.CubeList([]) + + for cube in clim_list: + if cube.var_name not in ("tasmax", "tasmin"): + clim_list_final.append(cube) + + for cube in ts_list: + if cube.var_name not in ("tasmax", "tasmin"): + ts_list_final.append(cube) + + clim_list_final.append(derived_diurnal_clim) + ts_list_final.append(derived_diurnal_ts) + + return clim_list_final, ts_list_final + + +def calculate_anomaly(clim_list, ts_list): + """Calculate variables as anomalies, and adds diurnal range as variable. + + Parameters + ---------- + clim_list : cubelist + cubelist of climatology variables + ts_list : cubelist + cubelist of standard variable timeseries + + Returns + ------- + clim_list_final : cubelist + cubelist of clim. vars, inc. diurnal range + anom_list_final : cubelist + cubelist of anomaly vars, inc. diurnal range + """ + # calculate diurnal temperature range cube + clim_list_final, ts_list_final = calculate_diurnal_range( + clim_list, + ts_list + ) + + anom_list_final = ts_list_final.copy() + + # calc the anom by subtracting the monthly climatology from + # the time series + for i, _ in enumerate(ts_list_final): + i_months = ( + anom_list_final[i].coord("month_number").points - 1 + ) # -1 because months are numbered 1..12 + anom_list_final[i].data -= clim_list_final[i][i_months].data + + return clim_list_final, anom_list_final + + +def regression(tas, cube_data, area, ocean_frac=None, land_frac=None): + """Calculate coeffs of regression between global surf temp and variable. + + Parameters + ---------- + tas : cube + near-surface air temperature + cube_data : arr + cube.data array of a variable + area: str + area over which to calculate patterns + ocean_frac: cube + gridded ocean fraction + land_frac: cube + gridded land fraction + + Returns + ------- + slope_array : arr + array of grid cells with same shape as initial cube, + containing the regression slope + """ + if area == "land": + # calculate average warming over land + tas_data = sf.area_avg_landsea( + tas, ocean_frac, land_frac, land=True, return_cube=False + ) + else: + # calculate global average warming + tas_data = area_statistics(tas, 'mean').data + + # Reshape cube for regression + cube_reshaped = cube_data.reshape(cube_data.shape[0], -1) + + # Perform linear regression on valid values + model = sklearn.linear_model.LinearRegression( + fit_intercept=False, copy_X=True + ) + model.fit(tas_data.reshape(-1, 1), cube_reshaped) + + # Extract regression coefficients + slopes = model.coef_ + + # Reshape the regression coefficients back to the shape of the grid cells + slope_array = slopes.reshape(cube_data.shape[1:]) + + return slope_array + + +def create_cube(tas_cube, ssp_cube, array, month_number, units=None): + """Create a new cube from existing metadata, and new aray data. + + Parameters + ---------- + tas_cube: cube + near-surface air temperature + ssp_cube: cube + cube of a given variable + array: array + output array from regression + month_number: int + month related to the regression array + units: str + units related to the regression variable + + Returns + ------- + cube: cube + cube filled with regression array and metadata + + """ + # assigning dim_coords + coord1 = tas_cube.coord(contains_dimension=1) + coord2 = tas_cube.coord(contains_dimension=2) + dim_coords_and_dims = [(coord1, 0), (coord2, 1)] + + # assigning aux_coord + coord_month = iris.coords.AuxCoord(month_number, var_name="imogen_drive") + aux_coords_and_dims = [(coord_month, ())] + + cube = sf.rename_variables(ssp_cube, has_orig_vars=False) + + # creating cube + cube = iris.cube.Cube( + array, + units=units, + dim_coords_and_dims=dim_coords_and_dims, + aux_coords_and_dims=aux_coords_and_dims, + var_name=cube.var_name, + standard_name=cube.standard_name, + ) + + return cube + + +def calculate_regressions( + anom_list, + area, + ocean_frac=None, + land_frac=None, + yrs=86 +): + """Facilitate the calculation of regression coeffs (climate patterns). + + Also creates of a new cube of patterns per variable. + + Parameters + ---------- + anom_list : cubelist + cube list of variables as anomalies + area: str + area over which to calculate patterns + ocean_frac: cube + gridded ocean fraction + land_frac: cube + gridded land fraction + yrs : int + int to specify length of scenario + + Returns + ------- + regr_var_list : cubelist + cube list of newly created regression slope value cubes, for each var + """ + regr_var_list = iris.cube.CubeList([]) + + for cube in anom_list: + if cube.var_name == "tl1_anom": + # convert years to months when selecting + tas = cube[-yrs * 12:] + + for cube in anom_list: + cube = cube[-yrs * 12:] + month_list = iris.cube.CubeList([]) + + # extracting months, regressing, and merging + for i in range(1, 13): + month_cube = cube.extract(iris.Constraint(imogen_drive=i)) + month_tas = tas.extract(iris.Constraint(imogen_drive=i)) + + if area == 'land': + regr_array = regression( + month_tas, + month_cube.data, + area=area, + ocean_frac=ocean_frac, + land_frac=land_frac, + ) + else: + regr_array = regression( + month_tas, + month_cube.data, + area=area, + ) + + if cube.var_name in ("swdown_anom", "lwdown_anom"): + units = "W m-2 K-1" + else: + units = cube.units / tas.units + + # create, and append cube of regression values + month_list.append( + create_cube(tas, cube.copy(), regr_array, i, units=units) + ) + + month_list = month_list.merge_cube() + regr_var_list.append(month_list) + + return regr_var_list + + +def cube_saver(list_of_cubelists, work_path, name_list, jules_mode): + """Save desired cubelists to work_dir, depending on switch settings. + + Parameters + ---------- + list_of_cubelists : list + list containing desired cubelists + work_path : path + path to work_dir, to save cubelists + name_list : list + list of filename strings for saving + jules_mode : str + switch option passed through by ESMValTool config dict + + Returns + ------- + None + """ + if jules_mode: + for i in range(0, 3): + iris.save( + list_of_cubelists[i], + os.path.join(work_path, name_list[i]) + ) + else: + for i, cube in enumerate(list_of_cubelists[2]): + list_of_cubelists[2][i] = sf.rename_variables( + cube, has_orig_vars=False + ) + iris.save( + list_of_cubelists[2], + os.path.join(work_path, name_list[2]) + ) + + +def save_outputs( + cfg, + list_of_cubelists, + model +): + """Save data and plots to relevant directories. + + Parameters + ---------- + cfg: dict + Dictionary passed in by ESMValTool preprocessors + list_of_cubelists: list + List of cubelists to save + model : str + model name + + Returns + ------- + None + """ + work_path, plot_path = sf.make_model_dirs( + cfg, model + ) + + name_list = [ + "climatology_variables.nc", + "anomaly_variables.nc", + "patterns.nc", + ] + + # saving data + plotting + if cfg["jules_mode"] is True: + plot_timeseries( + list_of_cubelists[0], + plot_path, + "40 Year Climatologies, 1850-1889", + "Climatologies" + ) + plot_timeseries( + list_of_cubelists[1], + plot_path, + "Anomaly Timeseries, 1850-2100", + "Anomalies" + ) + plot_patterns(list_of_cubelists[2], plot_path) + cube_saver( + list_of_cubelists, + work_path, + name_list, + jules_mode=cfg["jules_mode"] + ) + + else: + plot_patterns(list_of_cubelists[2], plot_path) + cube_saver( + list_of_cubelists, + work_path, + name_list, + jules_mode=cfg["jules_mode"] + ) + + +def get_provenance_record(): + """Create a provenance record describing the diagnostic data and plot. + + Parameters + ---------- + None + + Returns + ------- + record : dict + provenance record + """ + record = { + "caption": ["Generating Climate Patterns from CMIP6 Models"], + "statistics": ["mean", "other"], + "domains": ["global"], + "themes": ["carbon"], + "realms": ["atmos"], + "authors": ["munday_gregory"], + } + + return record + + +def extract_data_from_cfg(cfg, model): + """Extract model data from the cfg. + + Parameters + ---------- + cfg: dict + Dictionary passed in by ESMValTool preprocessors + model : str + model name + + Returns + ------- + clim_list: cubelist + cubelist of climatologies + ts_list: cubelist + cubelist of spatial timeseries + sftlf: cube + land fraction cube + """ + clim_list = iris.cube.CubeList([]) + ts_list = iris.cube.CubeList([]) + + for dataset in cfg["input_data"].values(): + if dataset["dataset"] == model: + input_file = dataset["filename"] + + # preparing single cube + cube = sf.load_cube(input_file) + + if dataset["exp"] != "historical-ssp585": + sftlf = cube + else: + # appending to timeseries list + ts_list.append(cube) + + # making climatology + clim_cube = calculate_climatology(cube) + clim_list.append(clim_cube) + + if cfg["area"] == 'land': + return clim_list, ts_list, sftlf + + return clim_list, ts_list, None + + +def patterns(model, cfg): + """Driving function for script, taking in model data and saving parameters. + + Parameters + ---------- + model : str + model name + cfg: dict + Dictionary passed in by ESMValTool preprocessors + + Returns + ------- + None + """ + clim_list, ts_list, sftlf = extract_data_from_cfg(cfg, model) + + if cfg["area"] == 'land': + # calculate land/ocean_fracs + ocean_frac, land_frac = sf.ocean_fraction_calc(sftlf) + + # calculate anomaly over historical + ssp timeseries + clim_list_final, anom_list_final = calculate_anomaly(clim_list, ts_list) + + for i, cube in enumerate(clim_list_final): + clim_list_final[i] = sf.rename_variables( + cube, has_orig_vars=True, new_extension="_clim" + ) + anom_list_final[i] = sf.rename_variables( + anom_list_final[i], has_orig_vars=True, new_extension="_anom" + ) + + if cfg["area"] == 'land': + regressions = calculate_regressions( + anom_list_final, + cfg["area"], + ocean_frac=ocean_frac, + land_frac=land_frac + ) + else: + regressions = calculate_regressions( + anom_list_final, cfg["area"] + ) + + list_of_cubelists = [clim_list_final, anom_list_final, regressions] + + save_outputs(cfg, list_of_cubelists, model) + + # Provenance Logging, removed due to sporadic errors. Fix later. + + # model_work_dir, _ = sf.make_model_dirs( + # cfg, + # model + # ) + + # provenance_record = get_provenance_record() + # path = os.path.join(model_work_dir, "patterns.nc") + # with ProvenanceLogger(cfg) as provenance_logger: + # provenance_logger.log(path, provenance_record) + + +def main(cfg): + """Take in driving data with parallelisation options. + + Parameters + ---------- + cfg : dict + the global config dictionary, passed by ESMValTool. + + Returns + ------- + None + """ + input_data = cfg["input_data"].values() + parallelise = cfg["parallelise"] + + models = [] + for mod in input_data: + model = mod["dataset"] + if model not in models: + models.append(model) + + if parallelise is True: + sf.parallelise(patterns)(models, cfg) + else: + for model in models: + patterns(model, cfg) + + +if __name__ == "__main__": + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/climate_patterns/plotting.py b/esmvaltool/diag_scripts/climate_patterns/plotting.py new file mode 100644 index 0000000000..447055a392 --- /dev/null +++ b/esmvaltool/diag_scripts/climate_patterns/plotting.py @@ -0,0 +1,128 @@ +# (C) Crown Copyright 2022-2024, Met Office. +"""Script containing plotting functions for driving scripts. + +Author +------ +Gregory Munday (Met Office, UK) +""" +import os + +import iris.quickplot as qplt +import matplotlib.pyplot as plt +import numpy as np + +from esmvalcore.preprocessor import area_statistics + + +def subplot_positions(j): + """Determine sub-plot positions in a 3x3 figure. + + Parameters + ---------- + j : int + index of cube position in cubelist + + Returns + ------- + x_pos : int + x subplot position + y_pos : int + y subplot position + """ + if j <= 2: + y_pos = j + x_pos = 0 + elif 2 < j <= 5: + y_pos = j - 3 + x_pos = 1 + else: + y_pos = j - 6 + x_pos = 2 + + return x_pos, y_pos + + +def plot_patterns(cube_list, plot_path): + """Plot climate patterns for jules_mode: off. + + Parameters + ---------- + cube_list : cubelist + input cubelist for plotting patterns per variable + plot_path : path + path to plot_dir + + Returns + ------- + None + """ + fig, axis = plt.subplots(3, 3, figsize=(14, 12), sharex=True) + fig.suptitle("Patterns from a random grid-cell", fontsize=18, y=0.98) + + plt.figure(figsize=(14, 12)) + plt.subplots_adjust(hspace=0.5) + plt.suptitle("Global Patterns, January", fontsize=18, y=0.95) + + for j, cube in enumerate(cube_list): + # determining plot positions + x_pos, y_pos = subplot_positions(j) + months = np.arange(1, 13) + # plots patterns for an arbitrary grid cell + axis[x_pos, y_pos].plot(months, cube[:, 50, 50].data) + axis[x_pos, + y_pos].set_ylabel(str(cube.var_name) + " / " + str(cube.units)) + if j > 5: + axis[x_pos, y_pos].set_xlabel("Time") + + # January patterns + plt.subplot(3, 3, j + 1) + qplt.pcolormesh(cube[0]) + + plt.tight_layout() + plt.savefig(os.path.join(plot_path, "Patterns"), dpi=300) + plt.close() + + fig.tight_layout() + fig.savefig(os.path.join(plot_path, "Patterns Timeseries"), dpi=300) + + +def plot_timeseries(cubelist, plot_path, title, save_name): + """Plot timeseries and maps of climatologies, anomalies and patterns. + + Parameters + ---------- + cubelist : cubelist + input cubelist for plotting per variable + plot_path : path + path to plot_dir + title: str + title for the figure + save_name: str + name for the saved figure + + Returns + ------- + None + """ + fig, axs = plt.subplots(3, 3, figsize=(14, 12), sharex=True) + fig.suptitle(f"{title}", fontsize=18, y=0.98) + + for j, cube in enumerate(cubelist): + # determining plot positions + x_pos, y_pos = subplot_positions(j) + yrs = (1850 + np.arange(cube.shape[0])).astype("float") + months = np.arange(1, 13) + + # anomaly timeseries + avg_cube = area_statistics(cube, 'mean').data + if save_name == "Climatologies": + axs[x_pos, y_pos].plot(months, avg_cube) + else: + axs[x_pos, y_pos].plot(yrs, avg_cube) + axs[x_pos, + y_pos].set_ylabel(cube.long_name + " / " + str(cube.units)) + if j > 5: + axs[x_pos, y_pos].set_xlabel("Time") + + fig.tight_layout() + fig.savefig(os.path.join(plot_path, f"{save_name}"), dpi=300) diff --git a/esmvaltool/diag_scripts/climate_patterns/sub_functions.py b/esmvaltool/diag_scripts/climate_patterns/sub_functions.py new file mode 100644 index 0000000000..4b3fe00141 --- /dev/null +++ b/esmvaltool/diag_scripts/climate_patterns/sub_functions.py @@ -0,0 +1,267 @@ +# (C) Crown Copyright 2022-2024, Met Office. +"""Script containing relevant sub-functions for driving scripts. + +Author +------ +Gregory Munday (Met Office, UK) +""" + +import logging +import multiprocessing as mp +import os +from functools import partial +from pathlib import Path + +import iris +import iris.analysis.cartography +import iris.coord_categorisation +import dask as da + +logger = logging.getLogger(Path(__file__).stem) + + +def load_cube(filename): + """Load cube, remove any dimensions of length: 1. + + Parameters + ---------- + filename : path + path to load cube file + + Returns + ------- + cube : cube + a cube + """ + logger.debug("Loading %s", filename) + cube = iris.load_cube(filename) + cube = iris.util.squeeze(cube) + + return cube + + +def ocean_fraction_calc(sftlf): + """Calculate gridded land and ocean fractions. + + Parameters + ---------- + sftlf: cube + land-fraction cube from piControl experiment + + Returns + ------- + ocean_frac: cube + ocean_fraction cube for area-weights + land_frac: cube + land_fraction cube for area-weights + """ + sftlf.coord("latitude").coord_system = iris.coord_systems.GeogCS( + 6371229.0 + ) + sftlf.coord("longitude").coord_system = iris.coord_systems.GeogCS( + 6371229.0 + ) + sftof = 100 - sftlf + + ocean_frac = sftof / 100 + land_frac = sftlf / 100 + + return ocean_frac, land_frac + + +def area_avg_landsea(cube, + ocean_frac, + land_frac, + land=True, + return_cube=False): + """Calculate the global mean of a variable in a cube. + + Parameters + ---------- + cube : cube + input cube + ocean_frac : cube + ocean fraction cube, found from sftlf + land_frac : cube + land fraction cube, sftlf + land : bool + option to weight be land or ocean + return_cube : bool + option to return a cube or array + + Returns + ------- + cube2 : cube + cube with collapsed lat-lons, global mean over time + cube2.data : arr + array with collapsed lat-lons, global mean over time + """ + if not cube.coord("latitude").has_bounds(): + cube.coord("latitude").guess_bounds() + if not cube.coord("longitude").has_bounds(): + cube.coord("longitude").guess_bounds() + + global_weights = iris.analysis.cartography.area_weights( + cube, + normalize=False + ) + + if land is False: + ocean_frac.data = da.array.ma.masked_less(ocean_frac.core_data(), 0.01) + weights = iris.analysis.cartography.area_weights( + ocean_frac, + normalize=False + ) + ocean_area = ( + ocean_frac.collapsed( + ["latitude", "longitude"], iris.analysis.SUM, weights=weights + ) + / 1e12 + ) + cube2 = cube * global_weights * ocean_frac + + cube2 = ( + cube2.collapsed(["latitude", "longitude"], iris.analysis.SUM) + / 1e12 + / ocean_area + ) + + if land: + land_frac.data = da.array.ma.masked_less(land_frac.core_data(), 0.01) + weights = iris.analysis.cartography.area_weights( + land_frac, + normalize=False + ) + land_area = ( + land_frac.collapsed( + ["latitude", "longitude"], iris.analysis.SUM, weights=weights + ) + / 1e12 + ) + + # Iris is too strict so we need to use core_data in this calculation + cube2 = cube * global_weights * land_frac.core_data() + cube2 = ( + cube2.collapsed(["latitude", "longitude"], iris.analysis.SUM) + / 1e12 + / land_area + ) + + if return_cube: + return cube2 + + return cube2.data + + +def make_model_dirs(cfg, model): + """Create directories for each input model for saving. + + Parameters + ---------- + cfg: dict + Dictionary passed in by ESMValTool preprocessors + model : str + model name + + Returns + ------- + model_work_dir : path + path to specific model directory in work_dir + model_plot_dir : path + path to specific plot directory in plot_dir + """ + work_path = cfg["work_dir"] + plot_path = cfg["plot_dir"] + model_work_dir = os.path.join(work_path, model) + model_plot_dir = os.path.join(plot_path, model) + + if not os.path.exists(model_work_dir): + os.mkdir(model_work_dir) + if not os.path.exists(model_plot_dir): + os.mkdir(model_plot_dir) + + return model_work_dir, model_plot_dir + + +def rename_variables(cube, has_orig_vars=True, new_extension=""): + """Rename variables and a coord to fit in JULES framework. + + Parameters + ---------- + cube : cube + input cube + has_orig_vars : bool + if True, rename to new var names with correct extension + new_extension : str + extension to add to variable names + + Returns + ------- + cube : cube + cube with renamed variables + """ + original_var_names = ["tas", "range_tl1", "huss", "pr", + "sfcWind", "ps", "rsds", "rlds"] + new_var_names = ["tl1", "range_tl1", "ql1", "precip", + "wind", "pstar", "swdown", "lwdown"] + long_var_names = [ + "Air Temperature", + "Diurnal Range", + "Specific Humidity", + "Precipitation", + "Wind Speed", + "Surface Pressure", + "Surface Downwelling Shortwave Radiation", + "Surface Downwelling Longwave Radiation" + ] + for orig_var, new_var, long_var in zip( + original_var_names, new_var_names, long_var_names + ): + if has_orig_vars: + if cube.var_name == orig_var: + cube.var_name = f"{new_var}{new_extension}" + cube.coord("month_number").rename("imogen_drive") + return cube + else: + if cube.var_name == f"{new_var}_anom": + cube.rename(long_var) + cube.var_name = f"{new_var}_patt" + return cube + if cube.var_name == f"{new_var}_patt": + cube.rename(long_var) + cube.var_name = orig_var + cube.coord("imogen_drive").rename("month_number") + return cube + + return None + + +def parallelise(function, processes=None): + """Parallelise any function, by George Ford, Met Office. + + Parameters + ---------- + function : function + function to be parallelised + processes : int + number of threads to be used in parallelisation + + Returns + ------- + result : any + results of parallelised elements + """ + if processes is None: + processes = max(1, mp.cpu_count() - 1) + if processes <= 0: + processes = 1 + + def easy_parallise(func, sequence, cfg): + with mp.Pool(processes=processes) as pool: + config_wrapper = partial(func, cfg=cfg) + result = pool.map_async(config_wrapper, sequence).get() + pool.close() + pool.join() + return result + + return partial(easy_parallise, function) diff --git a/esmvaltool/recipes/recipe_climate_patterns.yml b/esmvaltool/recipes/recipe_climate_patterns.yml new file mode 100644 index 0000000000..08e0c51779 --- /dev/null +++ b/esmvaltool/recipes/recipe_climate_patterns.yml @@ -0,0 +1,249 @@ +# ESMValTool +# recipe_climate_patterns.yml +--- +documentation: + description: Generating climate patterns from CMIP6 models. + title: Generating Climate Patterns + + authors: + - munday_gregory + + maintainer: + - munday_gregory + + references: + - mathison2024gmd + - huntingford2000climdyn + +preprocessors: + global_mean_monthly: + monthly_statistics: + operator: mean + + regrid: + target_grid: {start_longitude: -180, end_longitude: 176.25, step_longitude: 3.75, + start_latitude: -55, end_latitude: 82.5, step_latitude: 2.5} + scheme: linear + + downscale_sftlf: + regrid: + target_grid: {start_longitude: -180, end_longitude: 176.25, step_longitude: 3.75, + start_latitude: -55, end_latitude: 82.5, step_latitude: 2.5} + scheme: linear + +monthly_global_settings: &monthly_global_settings + mip: Amon + project: CMIP6 + preprocessor: global_mean_monthly + +monthly_global_settings_day: &monthly_global_settings_day + mip: day + project: CMIP6 + preprocessor: global_mean_monthly + + +CMIP6_landfrac: &cmip6_landfrac + - {dataset: ACCESS-CM2, exp: piControl, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: AWI-CM-1-1-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, exp: hist-resIPO,ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: CanESM5-CanOE, exp: piControl, ensemble: r1i1p2f1, grid: gn} + - {dataset: CanESM5-1, exp: piControl, ensemble: r1i1p1f1, grid: gn, institute: CCCma} + # - {dataset: CAS-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn} # Global only + - {dataset: CMCC-ESM2, exp: piControl, ensemble: r1i1p1f1, grid: gn} + # - {dataset: CMCC-CM2-SR5, exp: piControl, ensemble: r1i1p1f1, grid: gn} # No tasmin/tasmax + - {dataset: CNRM-CM6-1, exp: piControl, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-CM6-1-HR, exp: piControl, ensemble: r1i1p1f2, grid: gr} + - {dataset: CNRM-ESM2-1, exp: piControl, ensemble: r1i1p1f2, grid: gr} + # - {dataset: E3SM-1-0, exp: piControl, ensemble: r1i1p1f1, grid: gr} # Tasmax == tasmin + - {dataset: EC-Earth3, exp: piControl, ensemble: r1i1p1f1, grid: gr} + # - {dataset: EC-Earth3-CC, exp: piControl, ensemble: r1i1p1f1, grid: gr} # Global only + - {dataset: EC-Earth3-Veg, exp: piControl, ensemble: r1i1p1f1, grid: gr} + # - {dataset: FGOALS-f3-L, exp: historical, ensemble: r1i1p1f1, grid: gr} # No tasmin/tasmax + - {dataset: FGOALS-g3, exp: piControl, ensemble: r1i1p1f1, grid: gn} + # - {dataset: FIO-ESM-2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn} # Global only + - {dataset: GFDL-CM4, exp: piControl, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GFDL-ESM4, exp: ssp370, ensemble: r1i1p1f1, grid: gr1} + - {dataset: GISS-E2-1-H, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p5f1, grid: gn} + - {dataset: GISS-E2-2-G, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-LL, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: HadGEM3-GC31-MM, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: INM-CM4-8, exp: piControl, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM5-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr} + # - {dataset: KACE-1-0-G, exp: piControl, ensemble: r1i1p1f1, grid: gr} # Global only, weird tasmin/tasmax + # - {dataset: KIOST-ESM, exp: piControl, ensemble: r1i1p1f1, grid: gr} # Global only + - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: MIROC-ES2L, exp: piControl, ensemble: r1i1p1f2, grid: gn} + - {dataset: MIROC-ES2H, exp: piControl, ensemble: r1i1p4f2, grid: gn} + - {dataset: MPI-ESM1-2-HR, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: MPI-ESM1-2-LR, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: MRI-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn} + # - {dataset: NorESM2-LM, exp: piControl, ensemble: r1i1p1f1, grid: gn} # Global only, tasmax == tasmin + - {dataset: NorESM2-MM, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, exp: piControl, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, exp: piControl, ensemble: r1i1p1f2, grid: gn} + +CMIP6_no_tasmax: &cmip6_no_tasmax + # - {dataset: E3SM-1-0, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2099} # bad tasmin/tasmax + # - {dataset: NorESM2-LM, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} # bad tasmin/tasmax + - {dataset: NorESM2-MM, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: TaiESM1, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + +CMIP6_DAY: &cmip6_day + # - {dataset: E3SM-1-0, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2099} # bad tasmin/tasmax + # - {dataset: NorESM2-LM, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} # bad tasmin/tasmax + - {dataset: NorESM2-MM, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: TaiESM1, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + +CMIP6_FULL: &cmip6_full + - {dataset: ACCESS-CM2, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100, institute: CSIRO-ARCCSS} + - {dataset: ACCESS-ESM1-5, exp: [historical, ssp585], ensemble: r3i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: AWI-CM-1-1-MR, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: BCC-CSM2-MR, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: CanESM5, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: CanESM5-1, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100, institute: CCCma} # optional extra + - {dataset: CanESM5-CanOE, exp: [historical, ssp585], ensemble: r1i1p2f1, grid: gn, start_year: 1850, end_year: 2100} + # - {dataset: CAS-ESM2-0, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} # Global only + - {dataset: CMCC-ESM2, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + # - {dataset: CMCC-CM2-SR5, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} # No tasmin/tasmax + - {dataset: CNRM-CM6-1, exp: [historical, ssp585], ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 2100} + - {dataset: CNRM-CM6-1-HR, exp: [historical, ssp585], ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 2100} + - {dataset: CNRM-ESM2-1, exp: [historical, ssp585], ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 2100} + - {dataset: EC-Earth3, exp: [historical, ssp585], ensemble: r11i1p1f1, grid: gr, start_year: 1850, end_year: 2100} + # - {dataset: EC-Earth3-CC, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2100} # Global only + - {dataset: EC-Earth3-Veg, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2100} + # - {dataset: FGOALS-f3-L, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2100} # No tasmin/tasmax + - {dataset: FGOALS-g3, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + # - {dataset: FIO-ESM-2-0, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} # Global only + - {dataset: GFDL-CM4, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 2100} + - {dataset: GFDL-ESM4, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 2100} + - {dataset: GISS-E2-1-H, exp: [historical, ssp585], ensemble: r3i1p1f2, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: GISS-E2-1-G, exp: [historical, ssp585], ensemble: r1i1p5f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: GISS-E2-2-G, exp: [historical, ssp585], ensemble: r1i1p3f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: HadGEM3-GC31-LL, exp: [historical, ssp585], ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: HadGEM3-GC31-MM, exp: [historical, ssp585], ensemble: r1i1p1f3, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: INM-CM4-8, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 2100} + - {dataset: INM-CM5-0, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr1, start_year: 1850, end_year: 2100} + - {dataset: IPSL-CM6A-LR, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2100} + # - {dataset: KACE-1-0-G, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2100} # bad tasmin/tasmax + # - {dataset: KIOST-ESM, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 2100} # optional extra + - {dataset: MIROC6, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: MIROC-ES2L, exp: [historical, ssp585], ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: MIROC-ES2H, exp: [historical, ssp585], ensemble: r1i1p4f2, grid: gn, start_year: 1850, end_year: 2100} # optional extra + - {dataset: MPI-ESM1-2-HR, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: MPI-ESM1-2-LR, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: MRI-ESM2-0, exp: [historical, ssp585], ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 2100} + - {dataset: UKESM1-0-LL, exp: [historical, ssp585], ensemble: r1i1p1f2, grid: gn, start_year: 1850, end_year: 2100} + +diagnostics: + monthly_timeseries: + description: Mean monthly variables + + variables: + + # sftlf: + # short_name: sftlf + # mip: fx + # project: CMIP6 + # preprocessor: downscale_sftlf + # additional_datasets: *cmip6_landfrac + + tasmax_585: + short_name: tasmax + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + tasmin_585: + short_name: tasmin + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + tas_585: + short_name: tas + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + huss_585: + short_name: huss + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + pr_585: + short_name: pr + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + sfcWind_585: + short_name: sfcWind + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + ps_585: + short_name: ps + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + rsds_585: + short_name: rsds + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + rlds_585: + short_name: rlds + <<: *monthly_global_settings + additional_datasets: *cmip6_full + + tasmax_585_day: + short_name: tasmax + <<: *monthly_global_settings_day + additional_datasets: *cmip6_day + + tasmin_585_day: + short_name: tasmin + <<: *monthly_global_settings_day + additional_datasets: *cmip6_day + + tas_585_no_tasmax: + short_name: tas + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + huss_585_no_tasmax: + short_name: huss + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + pr_585_no_tasmax: + short_name: pr + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + sfcWind_585_no_tasmax: + short_name: sfcWind + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + ps_585_no_tasmax: + short_name: ps + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + rsds_585_no_tasmax: + short_name: rsds + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + rlds_585_no_tasmax: + short_name: rlds + <<: *monthly_global_settings + additional_datasets: *cmip6_no_tasmax + + scripts: + climate_patterns_script: + script: climate_patterns/climate_patterns.py + jules_mode: false # options: true, false + parallelise: false # options: true, false + area: global # options global, land. If land, uncomment landfrac recipe settings diff --git a/esmvaltool/references/huntingford2000climdyn.bibtex b/esmvaltool/references/huntingford2000climdyn.bibtex new file mode 100644 index 0000000000..69bc072d49 --- /dev/null +++ b/esmvaltool/references/huntingford2000climdyn.bibtex @@ -0,0 +1,14 @@ +@article{huntingford2000, + title = {An analogue model to derive additional climate change scenarios from existing {GCM} simulations}, + volume = {16}, + issn = {1432-0894}, + url = {https://doi.org/10.1007/s003820000067}, + doi = {10.1007/s003820000067}, + abstract = {Changes in land surface driving variables, predicted by GCM transient climate change experiments, are confirmed to exhibit linearity in the global mean land temperature anomaly, ΔTl. The associated constants of proportionality retain spatial and seasonal characteristics of the GCM output, whilst ΔTlis related to radiative forcing anomalies. The resultant analogue model is shown to be robust between GCM runs and as such provides a computationally efficient technique of extending existing GCM experiments to a large range of climate change scenarios. As an example impacts study, the analogue model is used to drive a terrestrial ecosystem model, and predicted changes in terrestrial carbon are found to be similar to those when using GCM anomalies directly.}, + number = {8}, + journal = {Climate Dynamics}, + author = {Huntingford, C. and Cox, P. M.}, + month = aug, + year = {2000}, + pages = {575--586}, +} diff --git a/esmvaltool/references/mathison2024gmd.bibtex b/esmvaltool/references/mathison2024gmd.bibtex new file mode 100644 index 0000000000..a6090db6c7 --- /dev/null +++ b/esmvaltool/references/mathison2024gmd.bibtex @@ -0,0 +1,10 @@ +@Article{mathison2024, + AUTHOR = {Mathison, C. T. and Burke, E. and Kovacs, E. and Munday, G. and Huntingford, C. and Jones, C. and Smith, C. and Steinert, N. and Wiltshire, A. and Gohar, L. and Varney, R.}, + TITLE = {A rapid application emissions-to-impacts tool for scenario assessment: Probabilistic Regional Impacts from Model patterns and Emissions (PRIME)}, + JOURNAL = {EGUsphere}, + VOLUME = {2024}, + YEAR = {2024}, + PAGES = {1--28}, + URL = {https://egusphere.copernicus.org/preprints/2024/egusphere-2023-2932/}, + DOI = {10.5194/egusphere-2023-2932} +}