From b1d37edb7da3c0128a07282757d3a528b85deed8 Mon Sep 17 00:00:00 2001
From: Jess <20195932+wrongkindofdoctor@users.noreply.github.com>
Date: Tue, 23 Jan 2024 06:22:13 -0500
Subject: [PATCH 1/7] bump intake-esm version in base yaml (#502)
---
src/conda/env_base.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/conda/env_base.yml b/src/conda/env_base.yml
index f753be183..a37101bcf 100644
--- a/src/conda/env_base.yml
+++ b/src/conda/env_base.yml
@@ -24,7 +24,7 @@ dependencies:
- ecgtools=2023.7.13
- cfunits=3.3.6
- intake=0.7.0
-- intake-esm=2023.07.07
+- intake-esm=2023.11.10
- subprocess32=3.5.4
- pyyaml=6.0.1
- click=8.1.7
From e04154eaad124c005dcc904470a473129c960a06 Mon Sep 17 00:00:00 2001
From: delsbury <95831224+delsbury@users.noreply.github.com>
Date: Wed, 31 Jan 2024 04:28:14 -0700
Subject: [PATCH 2/7] Stc qbo enso (#495)
* updated this script, which I debugged in pursuit of modifying POD to accept daily and monthly input
* updated this to accept sea surface temperature inputs
* some updates
* all files
* adding rst file
* updated changes 2
* updated config
* same files
* copy of good files
* new driver
* copies
* moving config file to right destination
* removed debugging code from this script
* removed debugging code from this script
* removed debugging code from this script
* removed debugging code from this script
* copied in the correct header information for the output webpage
* updated driver
* codeQL fixes: removed uneeded imports and implement some restructing to enso_indexing function to eliminate possiblity that the hemisphere if statements I was using previusly are not met
* same as previous commit. Just adding other files to complete push
* The driver script and helper plotting scripts were revised by removing incomplete if statements. For instance, saying if hemisphere equal to NH was replaced with code that more directly says something like when hemisphere is equal to northern hemisphere, do X. This fix was made to alleviate many of the if statements flagged by CodeQL.
* updated date in version, contact, and license metadata section
---
config_file.jsonc | 125 ++
data/fieldlist_CMIP.jsonc | 16 +
diagnostics/stc_qbo_enso/config_file.jsonc | 125 ++
diagnostics/stc_qbo_enso/doc/stc_qbo_enso.rst | 209 +++
.../stc_qbo_enso/make_era5_rean_pod_data.py | 190 +++
diagnostics/stc_qbo_enso/settings.jsonc | 73 +
diagnostics/stc_qbo_enso/stc_qbo_enso.html | 181 +++
diagnostics/stc_qbo_enso/stc_qbo_enso.py | 1217 +++++++++++++++++
.../stc_qbo_enso_plottingcodeenso.py | 767 +++++++++++
.../stc_qbo_enso_plottingcodeqbo.py | 774 +++++++++++
10 files changed, 3677 insertions(+)
create mode 100644 config_file.jsonc
create mode 100644 diagnostics/stc_qbo_enso/config_file.jsonc
create mode 100644 diagnostics/stc_qbo_enso/doc/stc_qbo_enso.rst
create mode 100644 diagnostics/stc_qbo_enso/make_era5_rean_pod_data.py
create mode 100644 diagnostics/stc_qbo_enso/settings.jsonc
create mode 100644 diagnostics/stc_qbo_enso/stc_qbo_enso.html
create mode 100644 diagnostics/stc_qbo_enso/stc_qbo_enso.py
create mode 100644 diagnostics/stc_qbo_enso/stc_qbo_enso_plottingcodeenso.py
create mode 100644 diagnostics/stc_qbo_enso/stc_qbo_enso_plottingcodeqbo.py
diff --git a/config_file.jsonc b/config_file.jsonc
new file mode 100644
index 000000000..4f8f0aa7d
--- /dev/null
+++ b/config_file.jsonc
@@ -0,0 +1,125 @@
+// Configuration for MDTF-diagnostics driver script self-test.
+//
+// Copy this file and customize the settings as needed to run the framework on
+// your own model output without repeating command-line options. Pass it to the
+// framework at the end of the command line (positionally) or with the
+// -f/--input-file flag. Any other explicit command line options will override
+// what's listed here.
+//
+// All text to the right of an unquoted "//" is a comment and ignored, as well
+// as blank lines (JSONC quasi-standard.)
+{
+ "case_list" : [
+ // The cases below correspond to the different sample model data sets. Note
+ // that the MDTF package does not currently support analyzing multiple
+ // models in a single invocation. Comment out or delete the first entry and
+ // uncomment the second to run NOAA-GFDL-AM4 only for the MJO_prop_amp POD,
+ // and likewise for the SM_ET_coupling POD.
+ {
+ "CASENAME" : "CESM2-WACCM_CMIP_historical_r1i1p1f1",
+ "model" : "CESM2-WACCM_CMIP_historical_r1i1p1f1",
+ "convention" : "CMIP",
+ "FIRSTYR" : 1979,
+ "LASTYR" : 2014,
+ "QBOisobar" : 30,
+ "pod_list": ["stc_qbo_enso"]
+ }
+ // {
+ // "CASENAME" : "GFDL.CM4.c96L32.am4g10r8",
+ // "model" : "AM4",
+ // "convention" : "GFDL",
+ // "FIRSTYR" : 1,
+ // "LASTYR" : 10,
+ // "pod_list" : ["MJO_prop_amp"]
+ // }
+ // {
+ // "CASENAME" : "Lmon_GISS-E2-H_historical_r1i1p1",
+ // "model" : "CMIP",
+ // "convention" : "CMIP",
+ // "FIRSTYR" : 1951,
+ // "LASTYR" : 2005,
+ // "pod_list" : ["SM_ET_coupling"]
+ // }
+ // {
+ // "CASENAME" : "NCAR-CAM5.timeslice",
+ // "model" : "CESM",
+ // "convention" : "CMIP",
+ // "FIRSTYR" : 2000,
+ // "LASTYR" : 2004,
+ // "pod_list": ["example"]
+ // }
+ ],
+ // PATHS ---------------------------------------------------------------------
+ // Location of supporting data downloaded when the framework was installed.
+
+ // If a relative path is given, it's resolved relative to the MDTF-diagnostics
+ // code directory. Environment variables (eg, $HOME) can be referenced with a
+ // "$" and will be expended to their current values when the framework runs.
+
+ // Parent directory containing observational data used by individual PODs.
+ "OBS_DATA_ROOT": "../inputdata/obs_data",
+
+ // Parent directory containing results from different models.
+ //"MODEL_DATA_ROOT": "/Volumes/Personal-Folders/CCP-Amy/CMIP6_fromZac/data-for-amy/",
+ //"MODEL_DATA_ROOT": "/Users/delsbury/Desktop/mdtf/scratch/",
+ "MODEL_DATA_ROOT": "/Users/delsbury/Desktop/mdtf/inputdata/model/",
+
+ // Working directory. Defaults to OUTPUT_DIR if blank.
+ "WORKING_DIR": "../wkdir",
+
+ // Directory to write output. The results of each run of the framework will be
+ // put in a subdirectory of this directory.
+ "OUTPUT_DIR": "../wkdir",
+
+ // Location of the Anaconda/miniconda installation to use for managing
+ // dependencies (path returned by running `conda info --base`.) If empty,
+ // framework will attempt to determine location of system's conda installation.
+ "conda_root": "/Users/delsbury/opt/anaconda3",
+
+ // Directory containing the framework-specific conda environments. This should
+ // be equal to the "--env_dir" flag passed to conda_env_setup.sh. If left
+ // blank, the framework will look for its environments in the system default
+ // location.
+ "conda_env_root": "/Users/delsbury/opt/anaconda3/envs",
+
+ // SETTINGS ------------------------------------------------------------------
+ // Any command-line option recognized by the mdtf script (type `mdtf --help`)
+ // can be set here, in the form "flag name": "desired setting".
+
+ // Method used to fetch model data.
+ "data_manager": "Local_File",
+
+ // Type of data that POD(s) will analyze
+ // "single_run" (default) or "multi_run"
+ "data_type": "single_run",
+
+ // Method used to manage dependencies.
+ "environment_manager": "Conda",
+
+ // Settings affecting what output is generated:
+
+ // Set to true to have PODs save postscript figures in addition to bitmaps.
+ "save_ps": false,
+
+ // Set to true to have PODs save netCDF files of processed data.
+ "save_nc": true,
+
+ // Set to true to save HTML and bitmap plots in a .tar file.
+ "make_variab_tar": false,
+
+ // Set to true to overwrite results in OUTPUT_DIR; otherwise results saved
+ // under a unique name.
+ "overwrite": true,
+
+ // Settings used in debugging:
+
+ // Log verbosity level.
+ "verbose": 1,
+
+ // Set to true for framework test. Data is fetched but PODs are not run.
+ "test_mode": false,
+
+ // Set to true for framework test. No external commands are run and no remote
+ // data is copied. Implies test_mode.
+ "dry_run": false
+}
diff --git a/data/fieldlist_CMIP.jsonc b/data/fieldlist_CMIP.jsonc
index 41324bb9f..25144e7a8 100644
--- a/data/fieldlist_CMIP.jsonc
+++ b/data/fieldlist_CMIP.jsonc
@@ -49,6 +49,12 @@
"scalar_coord_templates": {"plev": "ua{value}"},
"ndim": 4
},
+ "u10": {
+ "standard_name": "10 hPa zonal_wind",
+ "units": "m s-1",
+ "scalar_coord_templates": {"plev": "u10{value}"},
+ "ndim": 3
+ },
"va": {
"standard_name": "northward_wind",
"units": "m s-1",
@@ -147,12 +153,22 @@
"units": "Pa",
"ndim": 3
},
+ "tos": {
+ "standard_name": "sea_surface_temperature",
+ "units": "degC",
+ "ndim": 3
+ },
"sfcWind": {
"standard_name": "wind_speed",
"units": "m s-1",
"modifier": "atmos_height",
"ndim": 3
},
+ "tos": {
+ "standard_name": "sea_surface_temperature",
+ "units": "K",
+ "ndim": 3
+ },
// radiative fluxes:
"rsus": {
"standard_name": "surface_upwelling_shortwave_flux_in_air",
diff --git a/diagnostics/stc_qbo_enso/config_file.jsonc b/diagnostics/stc_qbo_enso/config_file.jsonc
new file mode 100644
index 000000000..4f8f0aa7d
--- /dev/null
+++ b/diagnostics/stc_qbo_enso/config_file.jsonc
@@ -0,0 +1,125 @@
+// Configuration for MDTF-diagnostics driver script self-test.
+//
+// Copy this file and customize the settings as needed to run the framework on
+// your own model output without repeating command-line options. Pass it to the
+// framework at the end of the command line (positionally) or with the
+// -f/--input-file flag. Any other explicit command line options will override
+// what's listed here.
+//
+// All text to the right of an unquoted "//" is a comment and ignored, as well
+// as blank lines (JSONC quasi-standard.)
+{
+ "case_list" : [
+ // The cases below correspond to the different sample model data sets. Note
+ // that the MDTF package does not currently support analyzing multiple
+ // models in a single invocation. Comment out or delete the first entry and
+ // uncomment the second to run NOAA-GFDL-AM4 only for the MJO_prop_amp POD,
+ // and likewise for the SM_ET_coupling POD.
+ {
+ "CASENAME" : "CESM2-WACCM_CMIP_historical_r1i1p1f1",
+ "model" : "CESM2-WACCM_CMIP_historical_r1i1p1f1",
+ "convention" : "CMIP",
+ "FIRSTYR" : 1979,
+ "LASTYR" : 2014,
+ "QBOisobar" : 30,
+ "pod_list": ["stc_qbo_enso"]
+ }
+ // {
+ // "CASENAME" : "GFDL.CM4.c96L32.am4g10r8",
+ // "model" : "AM4",
+ // "convention" : "GFDL",
+ // "FIRSTYR" : 1,
+ // "LASTYR" : 10,
+ // "pod_list" : ["MJO_prop_amp"]
+ // }
+ // {
+ // "CASENAME" : "Lmon_GISS-E2-H_historical_r1i1p1",
+ // "model" : "CMIP",
+ // "convention" : "CMIP",
+ // "FIRSTYR" : 1951,
+ // "LASTYR" : 2005,
+ // "pod_list" : ["SM_ET_coupling"]
+ // }
+ // {
+ // "CASENAME" : "NCAR-CAM5.timeslice",
+ // "model" : "CESM",
+ // "convention" : "CMIP",
+ // "FIRSTYR" : 2000,
+ // "LASTYR" : 2004,
+ // "pod_list": ["example"]
+ // }
+ ],
+ // PATHS ---------------------------------------------------------------------
+ // Location of supporting data downloaded when the framework was installed.
+
+ // If a relative path is given, it's resolved relative to the MDTF-diagnostics
+ // code directory. Environment variables (eg, $HOME) can be referenced with a
+ // "$" and will be expended to their current values when the framework runs.
+
+ // Parent directory containing observational data used by individual PODs.
+ "OBS_DATA_ROOT": "../inputdata/obs_data",
+
+ // Parent directory containing results from different models.
+ //"MODEL_DATA_ROOT": "/Volumes/Personal-Folders/CCP-Amy/CMIP6_fromZac/data-for-amy/",
+ //"MODEL_DATA_ROOT": "/Users/delsbury/Desktop/mdtf/scratch/",
+ "MODEL_DATA_ROOT": "/Users/delsbury/Desktop/mdtf/inputdata/model/",
+
+ // Working directory. Defaults to OUTPUT_DIR if blank.
+ "WORKING_DIR": "../wkdir",
+
+ // Directory to write output. The results of each run of the framework will be
+ // put in a subdirectory of this directory.
+ "OUTPUT_DIR": "../wkdir",
+
+ // Location of the Anaconda/miniconda installation to use for managing
+ // dependencies (path returned by running `conda info --base`.) If empty,
+ // framework will attempt to determine location of system's conda installation.
+ "conda_root": "/Users/delsbury/opt/anaconda3",
+
+ // Directory containing the framework-specific conda environments. This should
+ // be equal to the "--env_dir" flag passed to conda_env_setup.sh. If left
+ // blank, the framework will look for its environments in the system default
+ // location.
+ "conda_env_root": "/Users/delsbury/opt/anaconda3/envs",
+
+ // SETTINGS ------------------------------------------------------------------
+ // Any command-line option recognized by the mdtf script (type `mdtf --help`)
+ // can be set here, in the form "flag name": "desired setting".
+
+ // Method used to fetch model data.
+ "data_manager": "Local_File",
+
+ // Type of data that POD(s) will analyze
+ // "single_run" (default) or "multi_run"
+ "data_type": "single_run",
+
+ // Method used to manage dependencies.
+ "environment_manager": "Conda",
+
+ // Settings affecting what output is generated:
+
+ // Set to true to have PODs save postscript figures in addition to bitmaps.
+ "save_ps": false,
+
+ // Set to true to have PODs save netCDF files of processed data.
+ "save_nc": true,
+
+ // Set to true to save HTML and bitmap plots in a .tar file.
+ "make_variab_tar": false,
+
+ // Set to true to overwrite results in OUTPUT_DIR; otherwise results saved
+ // under a unique name.
+ "overwrite": true,
+
+ // Settings used in debugging:
+
+ // Log verbosity level.
+ "verbose": 1,
+
+ // Set to true for framework test. Data is fetched but PODs are not run.
+ "test_mode": false,
+
+ // Set to true for framework test. No external commands are run and no remote
+ // data is copied. Implies test_mode.
+ "dry_run": false
+}
diff --git a/diagnostics/stc_qbo_enso/doc/stc_qbo_enso.rst b/diagnostics/stc_qbo_enso/doc/stc_qbo_enso.rst
new file mode 100644
index 000000000..47fcc9e49
--- /dev/null
+++ b/diagnostics/stc_qbo_enso/doc/stc_qbo_enso.rst
@@ -0,0 +1,209 @@
+.. This is a comment in RestructuredText format (two periods and a space).
+
+.. Note that all "statements" and "paragraphs" need to be separated by a blank
+ line. This means the source code can be hard-wrapped to 80 columns for ease
+ of reading. Multi-line comments or commands like this need to be indented by
+ exactly three spaces.
+
+.. Underline with '='s to set top-level heading:
+ https://docutils.sourceforge.io/docs/user/rst/quickref.html#section-structure
+
+Stratosphere-Troposphere Coupling: QBO and ENSO stratospheric teleconnections
+================================
+
+Last update: 2023-10-03
+
+This script and its helper scripts (“stc_qbo_enso_plottingcodeqbo.py” and
+“stc_qbo_enso_plottingcodeenso.py”) do calculations to assess the representation
+of stratospheric telconnections associated with the Quasi-Biennial Oscillation
+(QBO) and the El Nino Southern Oscillation (ENSO). This POD uses monthly 4D
+(time x plev x lat x lon) zonal wind, 4D meridional wind, 4D temperature, 3D
+(time x lat x lon) sea level pressure, and 3D sea surface temperature data.
+Coupling between the QBO and the boreal polar stratosphere takes place during
+boreal fall and winter whereas coupling between the QBO and the austral polar
+stratosphere takes place mainly during austral spring and summer. By default,
+the POD defines the QBO for NH (SH) analyses using the Oct-Nov (Jul-Aug) 5S-5N
+30 hPa zonal winds. The QBO is thought to influence the polar stratospheres,
+the so-called “polar route,” by modulating the lower stratospheric (~100-50 hPa)
+and middle stratospheric (~20-5 hPa) mid-latitude circulation. The aforementioned
+lower stratospheric teleconnection is also associated with a change in the strength
+and position of the tropospheric jet; the so-called “subtropical route.” In addition,
+evidence continues to show that the QBO directly influences the underlying tropical
+tropospheric circulation, referred to as the “tropical route.” These three
+teleconnections allow the QBO to elicit surface impacts globally. Said teleconnections
+are visualized herein by using a metric of planetary wave propagation (eddy heat flux),
+circulation response (zonal wind), and surface impact (sea level pressure).
+Additionally, metrics of model QBOs (e.g., amplitude, height, width) are produced.
+ENSO’s coupling with the polar stratospheres takes place as the amplitude of ENSO
+maximizes during boreal fall and winter. By default, the POD defines ENSO for NH
+(SH) analyses using the Nov-Mar (Sep-Jan) Nino3.4 SSTs. Though ENSO’s teleconnections
+are global, it interacts with the stratosphere by stimulating tropical-extratropical
+Rossby waves that constructively interfere with the climatological extratropical
+stationary wave mainly over the Pacific, promoting enhanced upward planetary wave
+propagation into the stratosphere. Similar to the QBO code, ENSO’s teleconnections
+are visualized using the eddy heat flux, the zonal wind, and the sea level pressure.
+
+This POD makes six kinds of figures and one text file from provided model data:
+
+- Zonal-mean zonal wind anomalies (deviations from seasonal cycle) composited
+ based on El Nino and La Nina years are shown in red/blue shading. Nina minus
+ Nino differences are shown in shading as well and climatological winds are
+ overlaid on all aforementioned plots in black contours
+- Zonal-mean eddy heat flux anomalies composited based on El Nino and La Nina
+ years are shown in red/blue shading. Nina minus Nino differences are shown
+ in shading as well and climatological heat flux is overlaid on all aforementioned
+ plots in black contours
+- Sea level pressure anomalies composited based on El Nino and La Nina
+ years are shown in red/blue shading. Nina minus Nino differences are shown
+ in shading as well and climatological sea level pressure is overlaid on all aforementioned
+ plots in black contours
+- A text file of QBO metrics (min/mean/max QBO periodicity, easterly/westerly/total
+ amplitude, lowest altitude tropical stratospheric isobar that the QBO reaches,
+ the height or vertical extent of the QBO, and its latitudinal width) is produced.
+- Should the above QBO metrics code detect a QBO in the model data, similar plots as
+ the aforementioned three ENSO plots, but composited based on easterly
+ and westerly QBO years, are made
+
+These plots are made for both hemispheres, with a focus on winter and spring, the seasons
+when upward propagating planetary waves couple the troposphere and stratosphere.
+The metrics are designed to reveal the extratropical circulation response to two forms
+of tropical internal variability, which are generally difficult to represent spontaneously
+in climate models (QBO+ENSO). Though ENSO's representation in climate models as well as the
+representation of its teleconnections has significantly improved over multiple generations
+of CMIP experiments (Bellenger et al. 2014; Planton et al. 2021), it is less clear how
+ENSO's coupling with the polar stratosphere is represented by models. The sea level
+pressure ENSO responses reveal the precursor tropospheric forcing patterns
+(e.g., Aleutian Low response) that should stimulate or reduce upward planetary wave
+propagation into the stratosphere. The zonal wind and eddy heat flux plots reveal if the
+reinforced or suppressed upward planetary wave propagation due to ENSO is actually "felt"
+in the stratosphere and the sea level pressure plots can again be referenced for evidence
+of a downward propagating winter/spring annular mode response to ENSO modulating the polar vortex.
+
+Similar plots to the ones made based on El Nino and La Nina years are made for easterly
+and westerly QBO years if and when a QBO is detected in the model data; e.g., models with
+too-coarse vertical resolution may not simulate a QBO (Zhao et al. 2018). It should be
+interesting to compare the QBO metrics with the representation of QBO teleconnections in
+models. Models struggle to represent several QBO attributes (Richter et al. 2020) and
+since the structure of the QBO (e.g., its amplitude or latitudinal width) is intimately
+tied to the representation of QBO teleconnections (Garfinkel and Hartmann 2011; Hansen
+et al. 2013), models generally have a difficult time representing the extratropical
+impacts of the QBO (Rao et al. 2020).
+
+
+Version & Contact info
+----------------------
+
+- Version/revision information: v1.0 (Oct 2023)
+- Project PIs: Amy H. Butler (NOAA CSL) and Zachary D. Lawrence (CIRES/NOAA PSL)
+- Developer/point of contact: Dillon Elsbury (dillon.elsbury@noaa.gov)
+
+Open source copyright agreement
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The MDTF framework is distributed under the LGPLv3 license (see LICENSE.txt).
+
+
+Functionality
+-------------
+
+This POD is driven by the file ``stc_qbo_enso.py``, with two helper scripts called
+``stc_qbo_enso_plottingcodeenso.py`` and ``stc_qbo_enso_plottingcodeqbo.py``.
+The driver script reads in the model fields, identifies El Nino/La Nina years and
+easterly/westerly QBO years, computes the eddy heat flux, retrieves the QBO metrics,
+and then uses the two helper scripts to make the associated zonal wind, eddy heat
+flux, and sea level pressure plots.
+
+The atmospheric observational data this POD uses is based on ERA5 reanalysis
+(Hersbach, et al., 2020), and includes pre-computed monthly zonal-mean zonal winds, zonal-
+mean eddy heat fluxes, and sea level pressure. The oceanic observational data that
+this POD uses is from HadiSST (Rayner et al. 2003) and includes pre-computed monthly sea
+surface temperature.
+
+
+Required programming language and libraries
+-------------------------------------------
+
+This POD requires Python 3, with the following packages:
+
+- numpy
+- xarray
+- xesmf
+- os
+- matplotlib
+- cartopy
+- scipy
+
+Required model output variables
+-------------------------------
+
+The following monthly mean fields are required:
+
+- Zonal Winds, ``ua`` as ``(time,lev,lat,lon)`` (units: m/s)
+- Meridional Winds, ``va`` as ``(time,lev,lat,lon)`` (units: m/s)
+- Temperature, ``ta`` as ``(time,lev,lat,lon)`` (units: K)
+- Sea level pressure, ``psl`` as ``(time,lat,lon)`` (units: Pa)
+- Sea surface temperature, ``tos`` as ``(time,lat,lon)`` (units: Kelvin)
+
+References
+----------
+
+.. _ref-Bellenger:
+
+ Bellenger, H., Guilyardi, E., Leloup, J., Lengaigne, M., & Vialard, J. (2014).
+ ENSO representation in climate models: From CMIP3 to CMIP5. Climate Dynamics, 42,
+ 1999-2018, https://doi.org/10.1007/s00382-013-1783-z
+
+.. _ref-Planton:
+
+ Planton, Y. Y., Guilyardi, E., Wittenberg, A. T., Lee, J., Gleckler, P. J., Bayr, T.,
+ ... & Voldoire, A. (2021). Evaluating climate models with the CLIVAR 2020 ENSO metrics
+ package. Bulletin of the American Meteorological Society, 102(2), E193-E217,
+ https://doi.org/10.1175/BAMS-D-19-0337.1
+
+.. _ref-Zhao:
+
+ Zhao, M., Golaz, J. C., Held, I. M., Guo, H., Balaji, V., Benson, R., ... & Xiang, B.
+ (2018). The GFDL global atmosphere and land model AM4. 0/LM4. 0: 1. Simulation
+ characteristics with prescribed SSTs. Journal of Advances in Modeling Earth Systems,
+ 10(3), 691-734, https://doi.org/10.1002/2017MS001209
+
+.. _ref-Hersbach:
+
+ Hersbach, H. and coauthors, 2020: The ERA5 global reanalysis. Q J R Meteorol Soc.,
+ 146, 1999-2049, https://doi.org/10.1002/qj.3803
+
+.. _ref-Richter:
+
+ Richter, J. H., Anstey, J. A., Butchart, N., Kawatani, Y., Meehl, G. A., Osprey, S.,
+ & Simpson, I. R. (2020). Progress in simulating the quasi‐biennial oscillation in
+ CMIP models. Journal of Geophysical Research: Atmospheres, 125(8), e2019JD032362,
+ https://doi.org/10.1029/2019JD032362
+
+.. _ref-Garfinkel:
+
+ Garfinkel, C. I., & Hartmann, D. L. (2011). The influence of the quasi-biennial
+ oscillation on the troposphere in winter in a hierarchy of models. Part I: Simplified
+ dry GCMs. Journal of the Atmospheric Sciences, 68(6), 1273-1289,
+ https://doi.org/10.1175/2011JAS3665.1
+
+.. _ref-Hansen:
+ Hansen, F., Matthes, K., & Gray, L. J. (2013). Sensitivity of stratospheric dynamics
+ and chemistry to QBO nudging width in the chemistry‒climate model WACCM. Journal of
+ Geophysical Research: Atmospheres, 118(18), 10-464,
+ https://doi.org/10.1002/jgrd.50812
+
+.. _ref-Rao:
+ Rao, J., Garfinkel, C. I., & White, I. P. (2020). Impact of the quasi-biennial
+ oscillation on the northern winter stratospheric polar vortex in CMIP5/6 models.
+ Journal of Climate, 33(11), 4787-4813, https://doi.org/10.1175/JCLI-D-19-0663.1
+
+More about this POD
+--------------------------
+
+**Statistical testing**
+
+A student's 2-tailed t-test is used to assess how likely it is that the Nina minus
+Nino anomalies and easterly QBO minus westerly QBO anomalies arise by chance for the zonal
+wind, eddy heat flux, and sea level pressure plots. A p-value <= 0.05 is used as the
+threshold for "statistical significance," which is denoted on the aforementioned figures
+in the third row using stippling.
\ No newline at end of file
diff --git a/diagnostics/stc_qbo_enso/make_era5_rean_pod_data.py b/diagnostics/stc_qbo_enso/make_era5_rean_pod_data.py
new file mode 100644
index 000000000..35e30eae2
--- /dev/null
+++ b/diagnostics/stc_qbo_enso/make_era5_rean_pod_data.py
@@ -0,0 +1,190 @@
+# ==============================================================================
+# MDTF Strat-Trop Coupling: Vertical Wave Propagation POD
+# ==============================================================================
+#
+# This file is part of the Strat-Trop Coupling: Vertical Wave Propagation POD
+# of the MDTF code package (see mdtf/MDTF-diagnostics/LICENSE.txt)
+#
+# This script shows a simple recipe for creating the "pre-digested" reanalysis
+# data used for making the reanalysis versions of the POD plots.
+#
+# Data for individual variables obtained from:
+# https://cds.climate.copernicus.eu/cdsapp#!/dataset/reanalysis-era5-pressure-levels-monthly-means?tab=form
+#
+# Need the following variables:
+# -----------------------------
+# v-component of wind
+# air temperature
+# geopotential (this is post-processed to geopot height by dividing by 9.80665)
+
+import numpy as np
+import xarray as xr
+import xesmf as xe
+
+### BEGIN: READ INPUT FIELDS ###
+# The following code/paths will have to be adapted for your own system.
+#
+# On my system, the monthly-mean variables are contained in individual
+# files that span all available years in the reanalysis from 1979 to
+# the ~present. I set these as environment variables, but explicitly
+# show the paths in comments as examples
+
+# Some needed functions #
+
+def field_regridding(ds,latname,lonname):
+
+ r""" Regrid input data so that there are 73 latitude points. This grid
+ includes 5S and 5N, which are needed for the QBO analysis. This grid
+ includes 60N, which is needed for the SSW analysis. """
+
+ # Check to see if the latitudes are organized north to south or south to north
+ lat_first = ds[latname].values[0]
+ lat_end = ds[latname].values[-1]
+
+ if lat_first >= lat_end:
+ lats = np.linspace(90,-90,num=73)
+ if lat_end > lat_first:
+ lats = np.linspace(-90,90,num=73)
+
+ # Check to see if the longitudes are organized -180/180 or 0 to 360
+ lon_first = ds[lonname].values[0]
+ print (lon_first, 'lon_first')
+
+ if lon_first < 0:
+ lons = np.linspace(-180,177.5,num=144)
+ if lon_first >= 0:
+ lons = np.linspace(0,357.5,num=144)
+
+ ds_out = xr.Dataset({'lat': (['lat'], lats),'lon': (['lon'], lons),})
+ regridder = xe.Regridder(ds, ds_out, 'bilinear')
+ regridded = regridder(ds)
+ print (regridded, 'regridded')
+
+ return regridded
+
+def compute_total_eddy_heat_flux(varray, tarray, vname, tname):
+
+ r""" Compute the total (all zonal wavenumbers) eddy heat flux
+ using monthly data. Output field has new variable, 'ehf.' """
+
+ # Take the zonal means of v and T
+ dummy = varray.mean('lon')
+
+ eddyv = (varray - varray.mean('lon'))[vname]
+ eddyt = (tarray - tarray.mean('lon'))[tname]
+
+ ehf = np.nanmean(eddyv.values * eddyt.values,axis=-1)
+ dummy[vname].values[:] = ehf
+ dummy = dummy.rename({vname:'ehf'})
+ print (dummy)
+
+ return dummy
+
+# Load the observational data #
+
+sfi = '/Volumes/Personal-Folders/CCP-Dillon/ERA5/stationary/POD/HadISST_sst.nc'
+pfi = '/Volumes/Personal-Folders/CCP-Dillon/ERA5/stationary/POD/cat-era5-prmsl-monmean-91-180.nc'
+ufi = '/Volumes/Personal-Folders/CCP-Dillon/ERA5/stationary/POD/cat-era5-uwnd-monmean-91-180.nc'
+vfi = '/Volumes/Personal-Folders/CCP-Dillon/ERA5/stationary/POD/cat-era5-vwnd-monmean-91-180.nc'
+tfi = '/Volumes/Personal-Folders/CCP-Dillon/ERA5/stationary/POD/cat-era5-air-monmean-91-180.nc'
+
+# Open datasets #
+
+sst_ds = xr.open_dataset(sfi)
+psl_ds = xr.open_dataset(pfi)
+uwnd_ds = xr.open_dataset(ufi)
+vwnd_ds = xr.open_dataset(vfi)
+air_ds = xr.open_dataset(tfi)
+
+# Regrid #
+
+sst_regridded = field_regridding(sst_ds,'latitude','longitude')
+psl_regridded = field_regridding(psl_ds,'lat','lon')
+uwnd_regridded = field_regridding(uwnd_ds,'lat','lon')
+vwnd_regridded = field_regridding(vwnd_ds,'lat','lon')
+air_regridded = field_regridding(air_ds,'lat','lon')
+
+# By the end of this block of code, the vwnd, air, and hgt variables
+# should each contain all available months of meridional wind,
+# air temperature and geopotential height, respectively. They can be
+# lazily loaded with xarray (e.g., after using open_mfdataset)
+### END: READ INPUT FIELDS ###
+
+r""" Compute the total (all zonal wavenumbers) eddy heat flux
+using monthly data. Output field has new variable, 'ehf.' """
+
+# Take the zonal means of v and T
+dummy = vwnd_regridded.mean('lon')
+
+eddyv = (vwnd_regridded - vwnd_regridded.mean('lon'))['vwnd']
+eddyt = (air_regridded - air_regridded.mean('lon'))['air']
+
+ehf = np.nanmean(eddyv.values * eddyt.values,axis=-1)
+dummy['vwnd'].values[:] = ehf
+ehf = dummy.rename({'vwnd':'ehf'})
+ehf.attrs['long_name'] = "Zonal Mean Eddy Heat Flux (v'T')"
+
+r""" Zonally average the zonal wind """
+uzm = uwnd_regridded.mean('lon')
+uzm = uzm.rename({'uwnd':'ua'})
+
+r""" Change name in psl file """
+psl_out = psl_regridded.rename({'prmsl':'psl'})
+
+print ('######### BREAK ##########')
+
+print (sst_regridded)
+print ('sst_regridded')
+print (' ')
+print (ehf)
+print ('ehf')
+print (' ')
+print (uzm)
+print ('uzm')
+print (' ')
+print (psl_ds)
+print ('psl_ds')
+print (' ')
+
+
+# Merge DataArrays into output dataset
+out_ds = xr.merge([ehf, uzm, psl_out])
+print (out_ds)
+out_ds = out_ds.rename({'level':'lev'})
+out_ds.attrs['reanalysis'] = 'ERA5'
+out_ds.attrs['notes'] = 'Fields derived from monthly-mean ERA5 data on pressure levels'
+
+out_ds.psl.attrs['units'] = 'Pa'
+out_ds.ua.lev.attrs['units'] = 'hPa'
+out_ds.ehf.lev.attrs['units'] = 'hPa'
+
+sst_out_ds = sst_regridded
+sst_out_ds.attrs['reanalysis'] = 'HadiSST'
+sst_out_ds.attrs['notes'] = 'Fields derived from monthly-mean HadiSST sea surface temperature'
+sst_out_ds = sst_out_ds.rename({'sst':'tos'})
+
+'''
+# To reduce size of output file without changing results much, will thin the
+# available latitudes from 0.25 to 0.5 degrees. This is optional.
+out_ds = out_ds.isel(lat=slice(0,None,2))
+
+# To reduce size of output file even more, we will only keep latitudes poleward
+# of 30 degrees (since we are primarily interested in the extratropics).
+# This step is also optional.
+out_ds = out_ds.where(np.abs(out_ds.lat) >= 30, drop=True)
+
+# To reduce size of output file even more more, we will use zlib compression
+# on each variable. This step is also optional.
+encoding = {'ta_zm_50': {'dtype':'float32', 'zlib':True, 'complevel':7},
+ 'zg_zm': {'dtype':'float32', 'zlib':True, 'complevel':7},
+ 'ehf_100': {'dtype':'float32', 'zlib':True, 'complevel':7}}
+
+# Save the output file
+filename = 'stc_eddy_heat_fluxes_obs-data.nc'
+out_ds.rename({'level':'lev'}).to_netcdf(filename, encoding=encoding)
+'''
+
+filename = 'stc-qbo-enso-obs-atm.nc'
+out_ds.to_netcdf(filename)
+filename = 'stc-qbo-enso-obs-ocn.nc'
+sst_out_ds.to_netcdf(filename)
diff --git a/diagnostics/stc_qbo_enso/settings.jsonc b/diagnostics/stc_qbo_enso/settings.jsonc
new file mode 100644
index 000000000..f5b1219e1
--- /dev/null
+++ b/diagnostics/stc_qbo_enso/settings.jsonc
@@ -0,0 +1,73 @@
+// Strat-Trop Coupling: Vertical Wave Propagation
+//
+// This POD requires monthly-frequency meridional winds, air temperatures,
+// and geopotential heights with pressure levels in the troposphere
+// and stratosphere.
+//
+{
+ "settings" : {
+ "driver" : "stc_qbo_enso.py",
+ "long_name" : "Metrics of QBO and extratropical circulation impact of QBO and ENSO",
+ "realm" : ["atmos", "ocean"],
+ "description" : "Assess the representation of the QBO",
+ "pod_env_vars" : {
+ // Isobar (hPa) used to define the QBO in the tropical stratosphere
+ // Defaults to 30 hPa
+ "QBOisobar" : "30"
+ },
+ "runtime_requirements": {
+ "python3": ["matplotlib", "numpy", "scipy", "xarray", "xesmf"]
+ }
+ },
+ "data": {
+ "min_frequency": "6hr",
+ "max_frequency": "yr"},
+ "dimensions": {
+ "lat": {"standard_name": "latitude"},
+ "lon": {"standard_name": "longitude"},
+ "lev": {
+ "standard_name": "air_pressure",
+ "units": "Pa",
+ "positive": "down",
+ "axis": "Z"
+ },
+ "time": {"standard_name": "time"}
+ },
+ "varlist": {
+ "tos": {
+ "standard_name" : "sea_surface_temperature",
+ "units" : "degC",
+ "frequency": "mon",
+ "dimensions": ["time", "lat", "lon"],
+ "requirement": "required"
+ },
+ "ua": {
+ "standard_name" : "eastward_wind",
+ "units" : "m s-1",
+ "frequency": "mon",
+ "dimensions": ["time", "lev", "lat", "lon"],
+ "requirement": "required"
+ },
+ "va": {
+ "standard_name" : "northward_wind",
+ "units" : "m s-1",
+ "frequency": "mon",
+ "dimensions": ["time", "lev", "lat", "lon"],
+ "requirement": "required"
+ },
+ "ta": {
+ "standard_name" : "air_temperature",
+ "units" : "K",
+ "frequency": "mon",
+ "dimensions": ["time", "lev", "lat", "lon"],
+ "requirement": "required"
+ },
+ "psl": {
+ "standard_name" : "air_pressure_at_mean_sea_level",
+ "units" : "Pa",
+ "frequency": "mon",
+ "dimensions": ["time", "lat", "lon"],
+ "requirement": "required"
+ }
+ }
+}
\ No newline at end of file
diff --git a/diagnostics/stc_qbo_enso/stc_qbo_enso.html b/diagnostics/stc_qbo_enso/stc_qbo_enso.html
new file mode 100644
index 000000000..c852bab5d
--- /dev/null
+++ b/diagnostics/stc_qbo_enso/stc_qbo_enso.html
@@ -0,0 +1,181 @@
+
+
MDTF example diagnostic
+
+STC QBO and ENSO stratospheric teleconnections
+
+This script and its helper scripts (“stc_qbo_enso_plottingcodeqbo.py” and “stc_qbo_enso_plottingcodeenso.py”) do
+calculations to assess the representation of stratospheric telconnections associated with the Quasi-Biennial
+Oscillation (QBO) and the El Nino Southern Oscillation (ENSO). This POD uses monthly 4D (time x plev x lat x lon)
+zonal wind, 4D meridional wind, 4D temperature, 3D (time x lat x lon) sea level pressure, and 3D sea surface
+temperature data. Coupling between the QBO and the boreal polar stratosphere takes place during boreal fall and
+winter whereas coupling between the QBO and the austral polar stratosphere takes place mainly during austral
+spring and summer. By default, the POD defines the QBO for NH (SH) analyses using the Oct-Nov (Jul-Aug) 5S-5N
+30 hPa zonal winds. The QBO is thought to influence the polar stratospheres, the so-called “polar route,” by
+modulating the lower stratospheric (~100-50 hPa) and middle stratospheric (~20-5 hPa) mid-latitude circulation. The
+aforementioned lower stratospheric teleconnection is also associated with a change in the strength and position of
+the tropospheric jet; the so-called “subtropical route.” In addition, evidence continues to show that the QBO directly
+influences the underlying tropical tropospheric circulation, referred to as the “tropical route.” These three
+teleconnections allow the QBO to elicit surface impacts globally. Said teleconnections are visualized herein by using
+a metric of planetary wave propagation (eddy heat flux), circulation response (zonal wind), and surface impact (sea
+level pressure). Additionally, metrics of model QBOs (e.g., amplitude, height, width) are produced. ENSO’s coupling
+with the polar stratospheres takes place as the amplitude of ENSO maximizes during boreal fall and winter. By
+default, the POD defines ENSO for NH (SH) analyses using the Nov-Mar (Sep-Jan) Nino3.4 SSTs. Though ENSO’s
+teleconnections are truly global, it interacts with the stratosphere by stimulating tropical-extratropical Rossby waves
+that constructively interfere with the climatological extratropical stationary wave, promoting enhanced upward
+planetary wave propagation into the stratosphere. Similar to the QBO code, ENSO’s teleconnections are visualized
+using the eddy heat flux, the zonal wind, and the sea level pressure.
+
+
+{{CASENAME}}
+ENSO teleconnections northern hemisphere
+
+
+
+
+ENSO teleconnections southern hemisphere
+
+
+
+
+QBO teleconnections northern hemisphere
+
+
+
+
+QBO teleconnections southern hemisphere
+
+
+
+
diff --git a/diagnostics/stc_qbo_enso/stc_qbo_enso.py b/diagnostics/stc_qbo_enso/stc_qbo_enso.py
new file mode 100644
index 000000000..0b9ef72ed
--- /dev/null
+++ b/diagnostics/stc_qbo_enso/stc_qbo_enso.py
@@ -0,0 +1,1217 @@
+# ==============================================================================
+# MDTF Strat-Trop Coupling: QBO and ENSO stratospheric teleconnections
+# ==============================================================================
+#
+# This file is part of the Strat-Trop Coupling: QBO and ENSO stratospheric teleconnections
+# POD of the MDTF code package (see mdtf/MDTF-diagnostics/LICENSE.txt)
+#
+# STC QBO and ENSO stratospheric teleconnections
+# Last update: 2023-10-03
+#
+# This script and its helper scripts (“stc_qbo_enso_plottingcodeqbo.py” and “stc_qbo_enso_plottingcodeenso.py”) do
+# calculations to assess the representation of stratospheric telconnections associated with the Quasi-Biennial
+# Oscillation (QBO) and the El Nino Southern Oscillation (ENSO). This POD uses monthly 4D (time x plev x lat x lon)
+# zonal wind, 4D meridional wind, 4D temperature, 3D (time x lat x lon) sea level pressure, and 3D sea surface
+# temperature data. Coupling between the QBO and the boreal polar stratosphere takes place during boreal fall and
+# winter whereas coupling between the QBO and the austral polar stratosphere takes place mainly during austral
+# spring and summer. By default, the POD defines the QBO for NH (SH) analyses using the Oct-Nov (Jul-Aug) 5S-5N
+# 30 hPa zonal winds. The QBO is thought to influence the polar stratospheres, the so-called “polar route,” by
+# modulating the lower stratospheric (~100-50 hPa) and middle stratospheric (~20-5 hPa) mid-latitude circulation. The
+# aforementioned lower stratospheric teleconnection is also associated with a change in the strength and position of
+# the tropospheric jet; the so-called “subtropical route.” In addition, evidence continues to show that the QBO directly
+# influences the underlying tropical tropospheric circulation, referred to as the “tropical route.” These three
+# teleconnections allow the QBO to elicit surface impacts globally. Said teleconnections are visualized herein by using
+# a metric of planetary wave propagation (eddy heat flux), circulation response (zonal wind), and surface impact (sea
+# level pressure). Additionally, metrics of model QBOs (e.g., amplitude, height, width) are produced. ENSO’s coupling
+# with the polar stratospheres takes place as the amplitude of ENSO maximizes during boreal fall and winter. By
+# default, the POD defines ENSO for NH (SH) analyses using the Nov-Mar (Sep-Jan) Nino3.4 SSTs. Though ENSO’s
+# teleconnections are truly global, it interacts with the stratosphere by stimulating tropical-extratropical Rossby waves
+# that constructively interfere with the climatological extratropical stationary wave, promoting enhanced upward
+# planetary wave propagation into the stratosphere. Similar to the QBO code, ENSO’s teleconnections are visualized
+# using the eddy heat flux, the zonal wind, and the sea level pressure.
+#
+# Please see the references for the scientific foundations of this POD.
+#
+# ==============================================================================
+# Version, Contact Info, and License
+# ==============================================================================
+# - Version/revision information: v1.0 (2024-01-23)
+# - PIs: Amy H. Butler, NOAA CSL & Zachary D. Lawrence, CIRES + CU Boulder /NOAA PSL
+# - Developer/point of contact: Dillon Elsbury, dillon.elsbury@noaa.gov
+# - Other contributors: Zachary D. Lawrence, CIRES + CU Boulder / NOAA PSL,
+# zachary.lawrence@noaa.gov; Amy H. Butler, NOAA CSL
+#
+# The MDTF framework is distributed under the LGPLv3 license (see LICENSE.txt).
+#
+# ==============================================================================
+# Functionality
+# ==============================================================================
+# This POD contains three scripts. The primary script is stc_qbo_enso.py. There
+# are two helper scripts with functions used by the primary script that are
+# called stc_qbo_enso_plottingcodeqbo.py and stc_qbo_enso_plottingcodeenso.py.
+# The primary script stc_qbo_enso.py goes through these basic steps:
+# (1) Loads in the reanalysis data and restricts the time period to 1979-2014.
+# The 1979-2014 period is the default period, but this can be altered by
+# modifying the FIRSTYR and LASTYR environment variables, which can be
+# specified in config_file.jsonc and the settings.jsonc.
+# (2) Computes the reanalysis ENSO indices and composities the zonal-mean zonal wind,
+# zonal-mean eddy heat flux, and sea level pressure around the El Nino and
+# La Nina years.
+# (3) Does the same as (2), but for the QBO. The POD then produces reanalysis
+# QBO metrics. By default, the QBO indexing and QBO metrics are calculated by
+# defining the QBO at 30 hPa. This can be altered using the QBOisobar environment
+# variable defined in config_file.jsonc and settings.jsonc.
+# (4) Loads in the model data and restrics the time period to 1979-2014 by default.
+# The vertical ("plev") axis of the 4D fields (zonal wind, meridional wind, and
+# temperature) is then modified, if necessary, to have pressure levels
+# denoted in hPa rather than Pa, which is used in some data sets.
+# (5) Computes the model ENSO indices and composites the zonal-mean zonal wind, zonal-
+# mean eddy heat flux, and sea level pressure around the model El Nino/La Nina years.
+# (6) Runs the QBO metric code using the default definition of the QBO at 30 hPa.
+# If a QBO is detected (some models cannot simulate one), proceeds to compute
+# model QBO indices and composite the zonal-mean zonal wind, zonal-mean eddy heat
+# flux, and sea level pressure around easterly and westerly QBO years. string
+#
+# ==============================================================================
+# Required programming language and libraries
+# ==============================================================================
+# This POD is done fully in python, and primarily makes use of numpy and
+# xarray to read, subset, and transform the data. It also makes use of scipy to
+# calculate the fast fourier transform (FFT) of the tropical stratospheric zonal
+# winds to identify the QBO using its frequency spectrum (20-36 months in
+# observations). matplotlib and cartopy are used for general plotting and
+# creating map plots.
+#
+# ==============================================================================
+# Required model input variables
+# ==============================================================================
+# This POD requires monthly-mean fields of
+# - 4D (time x plev x lat x lon) zonal wind velocity (ua, units: m/s)
+# - 4D (time x plev x lat x lon) meridional wind velocity (va, units: m/s)
+# - 4D (time x plev x lat x lon) temperature (ta, units: Kelvin)
+# - 3D (tmee x lat x lon) sea level pressure (psl, units: Pa)
+# - 3D (time x lat x lon) sea surface temperature (tos, units: Kelvin)
+#
+# ==============================================================================
+# References
+# ==============================================================================
+# QBO metrics related papers:
+#
+# Schenzinger, Verena, et al. "Defining metrics of the Quasi-Biennial Oscillation in
+# global climate models." Geoscientific Model Development 10.6 (2017): 2157-2168
+# doi.org/10.5194/gmd-10-2157-2017
+# Richter, Jadwiga H., et al. "Progress in simulating the quasi‐biennial oscillation
+# in CMIP models." Journal of Geophysical Research: Atmospheres 125.8 (2020):
+# e2019JD032362. doi.org/10.1029/2019JD032362
+# Pascoe, Charlotte L., et al. "The quasi‐biennial oscillation: Analysis using ERA‐40
+# data." Journal of Geophysical Research: Atmospheres 110.D8 (2005).
+# doi.org/10.1029/2004JD004941
+#
+# QBO teleconnections, surface impacts, and indexing methods
+#
+# Gray, Lesley J., et al. "Surface impacts of the quasi biennial oscillation."
+# Atmospheric Chemistry and Physics 18.11 (2018): 8227-8247.
+# doi.org/10.5194/acp-18-8227-2018
+# Rao, Jian, et al. "Southern hemisphere response to the quasi-biennial oscillation
+# in the CMIP5/6 models." Journal of Climate 36.8 (2023): 2603-2623.
+# doi.org/10.1175/JCLI-D-22-0675.1
+#
+# ENSO teleconnections, surface impacts, and indexing methods
+#
+# Domeisen, Daniela IV, Chaim I. Garfinkel, and Amy H. Butler. "The teleconnection
+# of El Niño Southern Oscillation to the stratosphere." Reviews of Geophysics 57.1
+# (2019): 5-47. doi.org/10.1029/2018RG000596
+# Iza, Maddalen, Natalia Calvo, and Elisa Manzini. "The stratospheric pathway of
+# La Niña." Journal of Climate 29.24 (2016): 8899-8914.
+# doi.org/10.1175/JCLI-D-16-0230.1
+
+import os
+import xarray as xr
+import numpy as np
+import xesmf as xe
+
+import matplotlib as mpl
+import matplotlib.pyplot as plt
+import cartopy.crs as ccrs
+
+from scipy.fft import fft,fftfreq
+from scipy import interpolate
+
+from stc_qbo_enso_plottingcodeqbo import qbo_uzm
+from stc_qbo_enso_plottingcodeqbo import qbo_vt
+from stc_qbo_enso_plottingcodeqbo import qbo_slp
+
+from stc_qbo_enso_plottingcodeenso import enso_uzm
+from stc_qbo_enso_plottingcodeenso import enso_vt
+from stc_qbo_enso_plottingcodeenso import enso_slp
+
+mpl.rcParams['font.family'] = 'sans-serif'
+mpl.rcParams['font.sans-serif'] = 'Roboto'
+mpl.rcParams['font.size'] = 12
+mpl.rcParams['hatch.color']='gray'
+
+def qbo_metrics(ds,QBOisobar):
+
+ r""" Calculates Quasi-Biennial Oscillation (QBO) metrics for the an input
+ zonal wind dataset (dimensions = time x level x latitude x longitude)
+
+ Parameters
+ ----------
+ ds : `xarray.DataArray` or `xarray.Dataset`
+ The input DataArray or Dataset for which to calculate QBO diagnostics for
+
+ Returns
+ -------
+ period_min: scalar
+ The minimum number of months comprising a full QBO cycle (a period of easterlies [EQBO]
+ and westerlies [WQBO])
+
+ period_mean: scalar
+ The average QBO cycle (EQBO + WQBO) duration in months
+
+ period_max: scalar
+ The maximum QBO cycle (EQBO + WQBO) duration in months
+
+ easterly_amp: scalar
+ The average easterly amplitude, arrived at by averaging together the minimum latitudinally
+ averaged 5S-5N 10 hPa monthly zonal wind from each QBO cycle
+
+ westerly_amp: scalar
+ The average westerly amplitude, arrived at by averaging together the minimum latitudinally
+ averaged 5S-5N 10 hPa monthly zonal wind from each QBO cycle
+
+ qbo_amp: scalar
+ The total QBO amplitude, which is estimated by adding half of the mean easterly amplitude
+ (easterly_amp) to half of the mean westerly amplitude (westerly_amp)
+
+ lowest_lev: scalar
+ The lowermost pressure level at which the the latitudinally averaged 5S-5N QBO Fourier
+ amplitude is equal to 10% of its maximum value
+
+ latitudinal_extent: scalar
+ The full width at half maximum of a Gaussian fit to the 10 hPa portion of the QBO
+ Fourier amplitude pressure-latitude cross section
+
+ Notes
+ -----
+ The input xarray variable ds is assumed to have a dimension named "time x lev x lat x lon."
+ E.g., if your data differs in dimension name, e.g., "latitude", use the rename method:
+ ds.rename({'latitude':'lat'})
+ ds.rename({'level':'lev'})
+
+ period_min is required to be >= 14 months. This requirement is used because period_min and
+ period_max are the time bounds that determine which frequencies are most "QBO-like." This
+ step is needed, because say e.g., period_min was allowed to be less than 14 months and ended
+ up being equal to 12 months. Then the annual cycle zonal wind variability would be deemed
+ "QBO-like" and annual cycle variability would be incorporated into the QBO Fourier amplitude
+ calculations, rendering the Fourier amplitude and all of the QBO spatial metrics useless.
+ """
+
+ print ("Running the QBO metrics function 'qbo_metrics'")
+
+ if ds.lev.values[-1] > ds.lev.values[0]:
+ ds = ds.reindex(lev=list(reversed(ds.lev)))
+
+ uwnd = ds.ua.values
+
+ # Subset for 10 hPa #
+ subset = ds.sel(lev=QBOisobar)
+
+ # Select 5S-5N winds #
+ tropical_subset = subset.isel(lat = np.logical_and(subset.lat >= -5, subset.lat <= 5))
+
+ # Latitudinally weight and averaged betwteen 5S-5N
+ qbo = tropical_subset.mean('lat')
+ weights = np.cos(np.deg2rad(tropical_subset.lat.values))
+ interim = np.multiply(tropical_subset.ua.values,weights[np.newaxis,:])
+ interim = np.nansum(interim,axis=1)
+ interim = np.true_divide(interim,np.sum(weights))
+ qbo.ua.values[:] = interim[:]
+
+ # Smooth with five month running mean #
+ qbo = qbo.rolling(time=5, center=True).mean()
+
+ # Identify the indices corresponding to QBO phase changes (e.g., + zonal wind (westerly) -> - zonal wind (easterly))
+ zero_crossings = np.where(np.diff(np.sign(qbo.ua.values)))[0]
+
+ # Using the phase change indices, identify QBO cycles: a set of easterlies and westerlies. After doing this, #
+ # the minimum/maximum/average QBO cycle duration will be retrieved. The first phase change index from #
+ # zero_crossings is excluded in the event that the first QBO cycle is close to making a phase change, which would bias #
+ # the QBO duration statistics low. #
+ store = []
+ cycles = []
+ periods = []
+
+ for i,v in enumerate(zero_crossings):
+
+ # Append pairs of easterly/westerly winds to "store" #
+ if i != 0:
+ tmp = qbo.ua.values[zero_crossings[i-1]+1:zero_crossings[i]+1]
+ store.append(tmp)
+
+ # Retrieve the most recent QBO cycle (easterlies + westerlies) from store. Loop looks for even number indices. #
+ # E.g., if i == 2, one set of QBO easterlies and westerlies has been appended to store already. #
+ if i != 0 and i % 2 == 0:
+ concat = np.concatenate((store[-2],store[-1]))
+
+ # Inserting requirement that each cycle must be at least 14 months. No observed QBO cycle has progressed #
+ # this quickly, but cycles do in the models. This requirement is essential because the minimum and maximum QBO cycle
+ # durations are used to calculate the QBO Fourier amplitude. A minimum QBO cycle duration of, say, 12 months #
+ # would lead to the QBO Fourier amplitude overlapping with the annual cycle Fourier amplitude.
+ if len(concat) >= 14:
+ cycles.append(concat)
+ periods.append(len(concat))
+
+ # Retrieve the minimum/maximum/average QBO cycle duration #
+ period_min = np.round(np.nanmin(periods),1)
+ period_max = np.round(np.nanmax(periods),1)
+ period_mean = np.round(np.nanmean(periods),1)
+
+ print (period_min, "minimum period (months)")
+ print (period_mean, "mean period (months)")
+ print (period_max, "maximum period (months)")
+
+ # Retrieve the minimum/maximum zonal wind from each QBO cycle. Averaging the minima (maxima) gives us the #
+ # easterly (westerly) amplitute #
+
+ easterly_amp = np.round(np.nanmean([np.nanmin(v) for v in cycles]),1)
+ westerly_amp = np.round(np.nanmean([np.nanmax(v) for v in cycles]),1)
+
+ print (easterly_amp, 'easterly amplitude')
+ print (westerly_amp, 'westerly amplitude')
+
+ # Define the 10 hPa amplitude as in Richter et al. (2020)
+ qbo_amp = np.abs(easterly_amp/2) + np.abs(westerly_amp/2)
+ qbo_amp = np.round(qbo_amp,1)
+ print (qbo_amp, '10 hPa amplitude')
+
+ #################################################################################################################
+ # Retrieve the QBO Fourier amplitude, defined as in Pascoe et al. (2005) as the ratio of the QBO power spectrum #
+ # to the power spectrum of the entire zonal wind dataset, multiplied by a metric of the zonal wind variability, #
+ # the standard deviation of the zonal wind dataset. #
+ #################################################################################################################
+
+ # Standard deviation across entire zonal wind dataset #
+ std = np.nanstd(uwnd,axis=0)
+
+ # Define Fourier frequencies comprising data and filter for frequencies between minimum/maximum QBO cycle duration #
+ freq = 1/fftfreq(len(uwnd))
+ arr = np.where((freq > period_min) & (freq < period_max))[0]
+
+ # FFT of entire zonal wind dataset. Square and sum Fourier coefficients. np.abs applies unneeded square root, hence #
+ # np.power to power 2 is used to undo this #
+ y = fft(uwnd,axis=0)
+ amplitudes = np.power(np.abs(y)[:len(y)//2],2)
+
+ # Calculate ratio of QBO power spectrum to full zonal wind power spectrum #
+ quotients = []
+ for i,v in enumerate(ds.lev.values):
+ qbodata = np.nansum(amplitudes[arr,i],axis=0)
+ alldata = np.nansum(amplitudes[1:,i],axis=0)
+ quot = np.true_divide(qbodata,alldata)
+ quotients.append(quot)
+ filtered = np.array(quotients)
+
+ # Ratio of the QBO power spectrum to the power #
+ # spectrum of the entire model dataset (units:%) #
+
+ vmin = 0
+ vmax = 100
+ vlevs = np.linspace(vmin,vmax,num=21)
+
+ from palettable.colorbrewer.diverging import RdBu_11
+ x2,y2 = np.meshgrid(ds.lat.values,ds.lev.values)
+
+ plt.title('Ratio of QBO power spectrum\n to zonal wind spectrum (%)')
+ plt.contourf(x2,y2,filtered*100,vmin=vmin,vmax=vmax,levels=vlevs,cmap='coolwarm')
+ plt.semilogy()
+ plt.gca().invert_yaxis()
+ if np.nanmax(ds.lev.values) > 2000:
+ plt.ylabel('Pressure (Pa)')
+ if np.nanmax(ds.lev.values) < 2000:
+ plt.ylabel('Pressure (hPa)')
+ plt.xlabel('Latitude')
+ plt.colorbar()
+
+ # Retrieve the Fourier amplitude by multiplying aforementioned ratio by standard deviation of zonal wind #
+ fa = np.multiply(filtered,std)
+
+ #################################################################################################################
+ # Do the Fourier amplitude calculations between 5S and 5N to retrieve spatial metrics (e.g., latitudinal width) #
+ # of the QBO #
+ #################################################################################################################
+
+ # hmax fixed at 10 hPa per Richter et al. (2020, JGR) #
+ hmax = np.where(ds.lev.values == qbo.lev.values)[0]
+
+ # Retreive the indices of lats between 5S and 5N #
+ lat_hits = [i for i,v in enumerate(ds.lat.values) if v >= -5 and v <=5]
+
+ # Retrieve the Fourier amplitude profile averaged latitudinally (w/weighting) between 5S and 5N #
+ weights = np.cos(np.deg2rad(ds.lat.values[lat_hits]))
+ interim = np.multiply(fa[:,lat_hits],weights[np.newaxis,:])
+ interim2 = np.nansum(interim,axis=1)
+ height_profile = np.true_divide(interim2,np.sum(weights))
+
+ # Retrieve half the max Fourier amplitude and 10% of the max Fourier amplitude #
+ half_max = np.max(height_profile)/2
+ qbo_base = np.max(height_profile)*0.1
+
+ # Interpolate the equatorial Fourier amplitude profile to have 10000 vertical grid points, enough #
+ # points so that each isobar can be selected to a one tenth of a hPa accuracy #
+ f = interpolate.interp1d(ds.lev.values,height_profile)
+ xnew = np.linspace(ds.lev.values[0],ds.lev.values[-1],num=10000)
+ ynew = f(xnew)
+
+ # Of the 20,000 vertical grid points, find the one corresponding to hmax. #
+ hmax_idx = (np.abs(xnew - ds.lev.values[hmax])).argmin()
+
+ # The lower and upper portions of the fourier amplitude tropical wind height profile, #
+ # which has been interpolated to 10000 grid points. #
+ lower_portion = ynew[:hmax_idx]
+ upper_portion = ynew[hmax_idx:]
+
+ # Same as above, but these are lists of the isobars corresponding to the above fourier amplitudes. #
+ lower_portion_isobar = xnew[:hmax_idx]
+ upper_portion_isobar = xnew[hmax_idx:]
+
+ # Retrieve the indices in the upper/lower portions corresponding to half the fourier max. #
+ lower_vertical_extent = (np.abs(lower_portion - half_max)).argmin()
+ upper_vertical_extent = (np.abs(upper_portion - half_max)).argmin()
+
+ # Find the upper/lower portion isboars corresponding to the half fourier max values identified above. #
+ bottom = lower_portion_isobar[lower_vertical_extent]
+ top = upper_portion_isobar[upper_vertical_extent]
+
+ # Convert the isobars into altitudes in meters. #
+ sfc = 1000 # hPa
+ bottomz = np.log(bottom/sfc)*-7000
+ topz = np.log(top/sfc)*-7000
+
+ # Obtain the vertical extent by differencing the bottomz and topz. #
+ vertical_extent = (topz - bottomz)/1000
+ vertical_extent = np.round(vertical_extent,1)
+ print (vertical_extent, "vertical_extent")
+
+ # Retrieve the lowest lev the QBO extends to using 10% of the maximum Fourier amplitude #
+ # "lower" for CMIP6 datasets and "upper" for ERA5 reanalysis
+ lowest_lev = (lower_portion_isobar[(np.abs(lower_portion - qbo_base)).argmin()])
+ lowest_lev = np.round(lowest_lev,1)
+ print (lowest_lev, "lowest_lev")
+
+ ##############################################################################################
+ ##############################################################################################
+ ##############################################################################################
+
+ # Retrieve the latitudinal extent #
+
+ # https://www.geeksforgeeks.org/python-gaussian-fit/
+ xdata = ds.lat.values
+ ydata = fa[hmax][0]
+ ydata[0] = 0
+ ydata[-1] = 0
+
+ # Recast xdata and ydata into numpy arrays so we can use their handy features
+ xdata = np.array(xdata)
+ ydata = np.array(ydata)
+
+ from scipy.optimize import curve_fit
+
+ def gauss(x, H, A, x0, sigma):
+ return H + A * np.exp(-(x - x0) ** 2 / (2 * sigma ** 2))
+
+ def gauss_fit(x, y):
+ mean = sum(x * y) / sum(y)
+ sigma = np.sqrt(sum(y * (x - mean) ** 2) / sum(y))
+ popt, pcov = curve_fit(gauss, x, y, p0=[min(y), max(y), mean, sigma])
+ return popt
+
+ out = gauss(xdata, *gauss_fit(xdata, ydata))
+
+ f = interpolate.interp1d(ds.lat.values,out)
+ xnew = np.linspace(ds.lat.values[0],ds.lat.values[-1],num=10000)
+ ynew = f(xnew)
+
+ lower_portion = ynew[:5000]
+ upper_portion = ynew[5000:]
+
+ lower_portion_lat = xnew[:5000]
+ upper_portion_lat = xnew[5000:]
+
+ lat1 = lower_portion_lat[(np.abs(lower_portion - (np.max(out)/2))).argmin()]
+ lat2 = upper_portion_lat[(np.abs(upper_portion - (np.max(out)/2))).argmin()]
+
+ latitudinal_extent = np.abs(lat1) + np.abs(lat2)
+ latitudinal_extent = np.round(latitudinal_extent,1)
+
+ print (latitudinal_extent, "latitudinal_extent")
+
+
+ if period_min != period_max:
+ print ('Based on period statistics, dataset is likely to have a QBO')
+ qbo_switch = 1
+ else:
+ print ('Persistent stratospheric easterlies detected - dataset likely does not have QBO')
+ qbo_switch = 0
+
+ metrics = ['minimum period: %s (months)' % period_min,
+ 'mean period: %s (months)' % period_mean,
+ 'maximum period: %s (months)' % period_max,
+ 'easterly amplitude: %s (m/s)' % easterly_amp,
+ 'westerly amplitude: %s (m/s)' % westerly_amp,
+ 'QBO amplitude: %s (m/s)' % qbo_amp,
+ 'lowest QBO level: %s (hPa)' % lowest_lev,
+ 'vertical extent: %s (kilometers)' % vertical_extent,
+ 'latitudinal extent of QBO: %s (degrees)' % latitudinal_extent]
+
+ return metrics, qbo_switch
+
+
+def compute_total_eddy_heat_flux(v, T):
+ r""" Compute the total zonal mean eddy heat flux from meridonal winds
+ and temperatures. The eddy heat flux is calculated as:
+ ehf = zonal_mean( (v - zonal_mean(v)) * (T - zonal_mean(T)))
+
+ Parameters
+ ----------
+ v : `xarray.DataArray`
+ The meridional wind component. Assumed to have the same dimensions as T
+
+ T : `xarray.DataArray`
+ The air temperature. Assumed to have the same dimensions as v
+
+ Returns
+ -------
+ ehf : `xarray.DataArray`
+ The zonal mean eddy heat flux
+
+ Notes
+ -----
+ The input fields v and T are assumed to have dimensions named "lat"
+ and "lon". E.g., if your data has dimensions "latitude" and/or "longitude",
+ use the rename method:
+ ds.rename({'latitude':'lat','longitude':'lon'})
+
+ Ideally v and T would be provided on the same latitude/longitude grid.
+ In practice this is not necessarily the case as some models provide
+ different variables at cell-centers and cell-edges. If this is found
+ to be the case, this function will use xesmf to do bilinear regridding
+ of the meridional wind component to match the grid of the temperatures.
+
+ """
+
+ # Take the zonal means of v and T
+ v_zm = v.mean('lon')
+ T_zm = T.mean('lon')
+
+ # If v and T are on same grid, can multiply the eddies and take zonal mean
+ if (np.array_equal(v.lat,T.lat)) and (np.array_equal(v.lon, T.lon)):
+ ehf = ((v - v_zm) * (T - T_zm)).mean('lon')
+
+ # If v and T are on different grids, interpolate v to T's grid beforehand
+ else:
+ # Set up xESMF regridder with necessary grid-defining datasets
+ print('*** Interpolating v to same grid as T')
+ in_grid = xr.Dataset(
+ {
+ "lat": (["lat"], v.lat.values),
+ "lon": (["lon"], v.lon.values),
+ }
+ )
+
+ out_grid = xr.Dataset(
+ {
+ "lat": (["lat"], T.lat.values),
+ "lon": (["lon"], T.lon.values),
+ }
+ )
+ regridder = xe.Regridder(in_grid, out_grid, "bilinear")
+
+ ehf = (regridder(v - v_zm)*(T - T_zm)).mean('lon')
+
+ return ehf
+
+##################################################################################################
+##################################################################################################
+##################################################################################################
+
+########################
+# --- BEGIN SCRIPT --- #
+########################
+print('\n=======================================')
+print('BEGIN stc_qbo_enso.py ')
+print('=======================================\n')
+
+# Parse MDTF-set environment variables
+print('*** Parse MDTF-set environment variables ...')
+CASENAME = os.environ['CASENAME']
+FIRSTYR = os.environ['FIRSTYR']
+LASTYR = os.environ['LASTYR']
+WK_DIR = os.environ['WK_DIR']
+OBS_DATA = os.environ['OBS_DATA']
+QBOisobar = os.environ['QBOisobar']
+
+###########################################################################
+################################ observations #############################
+###########################################################################
+
+print(f'*** Now working on obs data\n------------------------------')
+obs_file_atm = OBS_DATA+'/stc-qbo-enso-obs-atm.nc'
+obs_file_ocn = OBS_DATA+'/stc-qbo-enso-obs-ocn.nc'
+
+print(f'*** Reading obs data from {obs_file_atm}')
+obs_atm = xr.open_dataset(obs_file_atm)
+
+print (obs_atm, 'obs_atm')
+
+print(f'*** Reading obs data from {obs_file_ocn}')
+obs_sst = xr.open_dataset(obs_file_ocn)
+
+print (obs_sst, 'obs_sst')
+
+# Subset the data for the user defined first and last years #
+
+obs_atm = obs_atm.sel(time=slice(str(FIRSTYR),str(LASTYR)))
+obs_sst = obs_sst.sel(time=slice(str(FIRSTYR),str(LASTYR)))
+
+# Create the POD figures directory
+
+plot_dir = f'{WK_DIR}/obs/'
+
+################################################
+print ('*** Running the observed ENSO indexing')
+################################################
+
+# Extract the tropical domain #
+ENSO = obs_sst.isel(lat = np.logical_and(obs_sst.lat >= -5, obs_sst.lat <= 5))
+
+# Extract date and longitude info from ENSO dataset #
+date_first = obs_sst.time[0]
+date_last = obs_sst.time[-1]
+lon_first = obs_sst.lon.values[0]
+
+# Identify the correct ENSO longitudinal grid #
+if lon_first < 0:
+ ENSO = ENSO.sel(lon=slice(-170,-120))
+else:
+ ENSO = ENSO.sel(lon=slice(190,240))
+
+# Latitudinally average the ENSO data #
+weighted_mean = ENSO.mean('lat')
+weights = np.cos(np.deg2rad(ENSO.lat.values))
+interim = np.multiply(ENSO.tos.values,weights[np.newaxis,:,np.newaxis])
+interim = np.nansum(interim,axis=1)
+interim = np.true_divide(interim,np.sum(weights))
+weighted_mean.tos.values[:] = interim[:]
+
+def enso_index(seasonal):
+
+ # Create 5-month seasonally averaged standardized ENSO anomalies. Weight each month by number of days comprising month #
+ day_in_month_weights = seasonal.time.dt.days_in_month.values[:5]/np.sum(seasonal.time.dt.days_in_month.values[:5])
+ sstindex = np.reshape(seasonal.tos.values,(int(len(seasonal.tos.values)/5),5))
+ sstindex = np.nanmean(np.multiply(sstindex,day_in_month_weights[np.newaxis,:]),axis=1)
+ anom = np.subtract(sstindex,np.nanmean(sstindex))
+ anom = np.true_divide(anom,np.nanstd(sstindex))
+
+ # Get the unique years from "seasonal" and then remove the last one, which is not needed
+ years = [v for v in set(np.sort(seasonal.time.dt.year.values))]
+ nina_years = [years[i] for i,v in enumerate(anom) if v <= -1]
+ nino_years = [years[i] for i,v in enumerate(anom) if v >= 1]
+
+ return nina_years, nino_years
+
+# Subsample ENSO data for NH #
+seasonal = weighted_mean.sel(time=slice('%s-11-01' % date_first.dt.year.values, '%s-03-31' % date_last.dt.year.values))
+seasonal = seasonal.sel(time=seasonal.time.dt.month.isin([11,12,1,2,3])).mean('lon')
+nh_nina, nh_nino = enso_index(seasonal)
+seasonal.close()
+
+# Subsample ENSO data for SH #
+seasonal = weighted_mean.sel(time=slice('%s-09-01' % date_first.dt.year.values, '%s-01-31' % date_last.dt.year.values))
+seasonal = seasonal.sel(time=seasonal.time.dt.month.isin([9,10,11,12,1])).mean('lon')
+sh_nina, sh_nino = enso_index(seasonal)
+seasonal.close()
+
+# Store the Nina/Nino years in a dictionary to call later #
+enso_dict = {}
+enso_dict['NH'] = nh_nina, nh_nino
+enso_dict['SH'] = sh_nina, sh_nino
+
+##########################################################################
+# Define ENSO plotting parameters to be passed to the plotting functions #
+##########################################################################
+
+nh_enso_uzm = obs_atm.ua.sel(time=slice('%s-11-01' % date_first.dt.year.values, '%s-03-31' % date_last.dt.year.values))
+nh_enso_vtzm = obs_atm.ehf.sel(time=slice('%s-11-01' % date_first.dt.year.values, '%s-03-31' % date_last.dt.year.values))
+nh_enso_psl = obs_atm.psl.sel(time=slice('%s-11-01' % date_first.dt.year.values, '%s-03-31' % date_last.dt.year.values))
+nh_enso_titles = ['November','December','January','February','March']
+nh_enso_plot_months = [11,12,1,2,3]
+nh_enso_axes = [0,90,1000,1]
+nh_enso_psl_axes = [-180, 180, 20, 90]
+
+sh_enso_uzm = obs_atm.ua.sel(time=slice('%s-09-01' % date_first.dt.year.values, '%s-01-31' % date_last.dt.year.values))
+sh_enso_vtzm = obs_atm.ehf.sel(time=slice('%s-09-01' % date_first.dt.year.values, '%s-01-31' % date_last.dt.year.values))
+sh_enso_psl = obs_atm.psl.sel(time=slice('%s-09-01' % date_first.dt.year.values, '%s-01-31' % date_last.dt.year.values))
+sh_enso_titles = ['September','October','November','December','January']
+sh_enso_plot_months = [9,10,11,12,1]
+sh_enso_axes = [-90,0,1000,1]
+sh_enso_psl_axes = [-180, 180, -90, -20]
+
+uzm_dict = {}
+uzm_dict['NH'] = nh_enso_uzm, nh_enso_titles, nh_enso_plot_months, nh_enso_axes
+uzm_dict['SH'] = sh_enso_uzm, sh_enso_titles, sh_enso_plot_months, sh_enso_axes
+
+vtzm_dict = {}
+vtzm_dict['NH'] = nh_enso_vtzm, nh_enso_titles, nh_enso_plot_months, nh_enso_axes
+vtzm_dict['SH'] = sh_enso_vtzm, sh_enso_titles, sh_enso_plot_months, sh_enso_axes
+
+psl_dict = {}
+psl_dict['NH'] = nh_enso_psl, nh_enso_titles, nh_enso_plot_months, ccrs.NorthPolarStereo(), nh_enso_psl_axes
+psl_dict['SH'] = sh_enso_psl, sh_enso_titles, sh_enso_plot_months, ccrs.SouthPolarStereo(), sh_enso_psl_axes
+
+###############################################
+print ('*** Running the observed QBO indexing')
+###############################################
+
+print (QBOisobar, "QBOisobar")
+
+# Subset atmospheric data for user defined isobar #
+subset = obs_atm.sel(lev=QBOisobar)
+
+# Select 5S-5N winds #
+tropical_subset = subset.interp(lat=[-5,-2.5,0,2.5,5])
+
+# Latitudinally weight and average #
+qbo = tropical_subset.mean('lat')
+weights = np.cos(np.deg2rad(tropical_subset.lat.values))
+interim = np.multiply(tropical_subset.ua.values,weights[np.newaxis,:])
+interim = np.nansum(interim,axis=1)
+interim = np.true_divide(interim,np.sum(weights))
+qbo.ua.values[:] = interim[:]
+
+def qbo_index(seasonal):
+
+ # Create 2-month seasonally averaged standardized QBO anomalies. Weight each month by number of days comprising month #
+ day_in_month_weights = seasonal.time.dt.days_in_month.values[:2]/np.sum(seasonal.time.dt.days_in_month.values[:2])
+ qboindex = np.reshape(seasonal.ua.values,(int(len(seasonal.ua.values)/2),2))
+ qboindex = np.nanmean(np.multiply(qboindex,day_in_month_weights[np.newaxis,:]),axis=1)
+ anom = np.subtract(qboindex,np.nanmean(qboindex))
+ anom = np.true_divide(anom,np.nanstd(qboindex))
+
+ # Get the unique years from "seasonal" and then remove the last one, which is not needed
+ years = [v for v in set(np.sort(seasonal.time.dt.year.values))]
+ eqbo_years = [years[i] for i,v in enumerate(anom) if v <= -1]
+ wqbo_years = [years[i] for i,v in enumerate(anom) if v >= 1]
+
+ return eqbo_years, wqbo_years
+
+# Subsample QBO data for NH #
+seasonal = qbo.sel(time=qbo.time.dt.month.isin([10,11])).mean('lon')
+nh_eqbo, nh_wqbo = qbo_index(seasonal)
+seasonal.close()
+
+# Subsample QBO data for SH #
+seasonal = qbo.sel(time=qbo.time.dt.month.isin([7,8])).mean('lon')
+sh_eqbo, sh_wqbo = qbo_index(seasonal)
+seasonal.close()
+
+# Store the Nina/Nino years in a dictionary to call later #
+qbo_dict = {}
+qbo_dict['NH'] = nh_eqbo, nh_wqbo
+qbo_dict['SH'] = sh_eqbo, sh_wqbo
+
+# Extract date and longitude info from QBO dataset #
+date_first = obs_atm.time[0]
+date_last = obs_atm.time[-1]
+lon_first = obs_atm.lon.values[0]
+
+#########################################################################
+# Define QBO plotting parameters to be passed to the plotting functions #
+#########################################################################
+
+nh_qbo_uzm = obs_atm.ua.sel(time=slice('%s-10-01' % date_first.dt.year.values, '%s-02-28' % date_last.dt.year.values))
+nh_qbo_vtzm = obs_atm.ehf.sel(time=slice('%s-10-01' % date_first.dt.year.values, '%s-02-28' % date_last.dt.year.values))
+nh_qbo_psl = obs_atm.psl.sel(time=slice('%s-10-01' % date_first.dt.year.values, '%s-02-28' % date_last.dt.year.values))
+nh_qbo_titles = ['October','November','December','January','February']
+nh_qbo_plot_months = [10,11,12,1,2]
+nh_qbo_axes = [0,90,1000,1]
+nh_qbo_psl_axes = [-180, 180, 20, 90]
+
+sh_qbo_uzm = obs_atm.ua.sel(time=slice('%s-07-01' % date_first.dt.year.values, '%s-11-30' % date_last.dt.year.values))
+sh_qbo_vtzm = obs_atm.ehf.sel(time=slice('%s-07-01' % date_first.dt.year.values, '%s-11-30' % date_last.dt.year.values))
+sh_qbo_psl = obs_atm.psl.sel(time=slice('%s-07-01' % date_first.dt.year.values, '%s-11-30' % date_last.dt.year.values))
+sh_qbo_titles = ['July','August','September','October','November']
+sh_qbo_plot_months = [7,8,9,10,11]
+sh_qbo_axes = [-90,0,1000,1]
+sh_qbo_psl_axes = [-180, 180, -90, -20]
+
+uzm_qbo_dict = {}
+uzm_qbo_dict['NH'] = nh_qbo_uzm, nh_qbo_titles, nh_qbo_plot_months, nh_qbo_axes
+uzm_qbo_dict['SH'] = sh_qbo_uzm, sh_qbo_titles, sh_qbo_plot_months, sh_qbo_axes
+
+vtzm_qbo_dict = {}
+vtzm_qbo_dict['NH'] = nh_qbo_vtzm, nh_qbo_titles, nh_qbo_plot_months, nh_qbo_axes
+vtzm_qbo_dict['SH'] = sh_qbo_vtzm, sh_qbo_titles, sh_qbo_plot_months, sh_qbo_axes
+
+psl_qbo_dict = {}
+psl_qbo_dict['NH'] = nh_qbo_psl, nh_qbo_titles, nh_qbo_plot_months, ccrs.NorthPolarStereo(), nh_qbo_psl_axes
+psl_qbo_dict['SH'] = sh_qbo_psl, sh_qbo_titles, sh_qbo_plot_months, ccrs.SouthPolarStereo(), sh_qbo_psl_axes
+
+hemispheres = ['NH','SH']
+
+for hemi in hemispheres:
+
+ ###############################################
+ print ('*** Calling the observed ENSO indices')
+ ###############################################
+ obs_nina, obs_nino = enso_dict[hemi]
+
+ print (obs_nina,'obs_nina')
+ print (obs_nino, 'obs_nino')
+
+ ###################################################################
+ print ('*** Running the observed ENSO zonal mean zonal wind calcs')
+ ###################################################################
+
+ obstos_plot = f'{plot_dir}/obs-enso34-uzm-{FIRSTYR}-{LASTYR}-%s.png' % hemi
+ out_fig, out_ax = enso_uzm(uzm_dict[hemi][0],obs_nina,obs_nino,uzm_dict[hemi][1],uzm_dict[hemi][2],uzm_dict[hemi][3])
+ out_fig.savefig(obstos_plot,dpi=700)
+
+ ############################################################
+ print ('*** Running the observed ENSO eddy heat flux calcs')
+ ############################################################
+ obsvt_plot = f'{plot_dir}/obs-enso34-vt-{FIRSTYR}-{LASTYR}-%s.png' % hemi
+ out_fig, out_ax = enso_vt(vtzm_dict[hemi][0],obs_nina,obs_nino,vtzm_dict[hemi][1],vtzm_dict[hemi][2],vtzm_dict[hemi][3])
+ out_fig.savefig(obsvt_plot,dpi=700)
+
+ ##########################################################
+ print ('*** Running the observed ENSO sea level pressure')
+ ##########################################################
+ obsps_plot = f'{plot_dir}/obs-enso34-psl-{FIRSTYR}-{LASTYR}-%s.png' % hemi
+ out_fig, out_ax = enso_slp(psl_dict[hemi][0],obs_nina,obs_nino,psl_dict[hemi][1],psl_dict[hemi][2],psl_dict[hemi][3],psl_dict[hemi][4])
+ out_fig.savefig(obsps_plot,dpi=700)
+
+ ##############################################
+ print ('*** Calling the observed QBO indices')
+ ##############################################
+ obs_eqbo, obs_wqbo = qbo_dict[hemi]
+
+ print (obs_eqbo,'obs_eqbo')
+ print (obs_wqbo, 'obs_wqbo')
+
+ #####################################################################
+ print ('*** Running the observed QBO zonal mean zonal wind plotting')
+ #####################################################################
+ uzm_plot = f'{plot_dir}/obs-qbo{QBOisobar}hPa-uzm-{FIRSTYR}-{LASTYR}-%s.png' % hemi
+ out_fig, out_ax = qbo_uzm(uzm_qbo_dict[hemi][0],obs_eqbo,obs_wqbo,QBOisobar,uzm_qbo_dict[hemi][1],uzm_qbo_dict[hemi][2],uzm_qbo_dict[hemi][3])
+ out_fig.savefig(uzm_plot,dpi=700)
+
+ #########################################################################
+ print ('*** Running the observed QBO zonal mean eddy heat flux plotting')
+ #########################################################################
+ vtzm_plot = f'{plot_dir}/obs-qbo{QBOisobar}hPa-vt-{FIRSTYR}-{LASTYR}-%s.png' % hemi
+ out_fig, out_ax = qbo_vt(vtzm_qbo_dict[hemi][0],obs_eqbo,obs_wqbo,QBOisobar,vtzm_qbo_dict[hemi][1],vtzm_qbo_dict[hemi][2],vtzm_qbo_dict[hemi][3])
+ out_fig.savefig(vtzm_plot,dpi=700)
+
+ ##################################################################
+ print ('*** Running the observed QBO sea level pressure plotting')
+ ##################################################################
+ psl_plot = f'{plot_dir}/obs-qbo{QBOisobar}hPa-psl-{FIRSTYR}-{LASTYR}-%s.png' % hemi
+ out_fig, out_ax = qbo_slp(psl_qbo_dict[hemi][0],obs_eqbo,obs_wqbo,QBOisobar,psl_qbo_dict[hemi][1],psl_qbo_dict[hemi][2],psl_qbo_dict[hemi][3],psl_qbo_dict[hemi][4])
+ out_fig.savefig(psl_plot,dpi=700)
+
+
+print('*** Running the observed QBO metrics')
+metricsout, switch = qbo_metrics(obs_atm,QBOisobar)
+
+filepath = f'{plot_dir}/obs-qbo{QBOisobar}hPa-metrics-{FIRSTYR}-{LASTYR}.txt'
+with open(filepath, 'w') as file_handler:
+ file_handler.write(f"{'QBO metrics: periodicity and spatial characteristics'}\n")
+ file_handler.write(f"{' '}\n")
+ for item in metricsout:
+ file_handler.write(f"{item}\n")
+
+###############################################
+# Tidy up by closing the open xarray datasets #
+###############################################
+
+obs_atm.close()
+obs_sst.close()
+ENSO.close()
+weighted_mean.close()
+nh_enso_uzm.close()
+nh_enso_vtzm.close()
+nh_enso_psl.close()
+sh_enso_uzm.close()
+sh_enso_vtzm.close()
+sh_enso_psl.close()
+subset.close()
+tropical_subset.close()
+qbo.close()
+nh_qbo_uzm.close()
+nh_qbo_vtzm.close()
+nh_qbo_psl.close()
+sh_qbo_uzm.close()
+sh_qbo_vtzm.close()
+sh_qbo_psl.close()
+
+###########################################################################
+################################## model ##################################
+###########################################################################
+
+plot_dir = f'{WK_DIR}/model/'
+
+# Read the input model data
+print(f'*** Now starting work on {CASENAME}\n------------------------------')
+print('*** Reading variables ...')
+
+sfi = os.environ['TOS_FILE']
+pfi = os.environ['PSL_FILE']
+ufi = os.environ['UA_FILE']
+vfi = os.environ['VA_FILE']
+tfi = os.environ['TA_FILE']
+
+tos = xr.open_dataset(sfi)
+psl = xr.open_dataset(pfi)
+ua = xr.open_dataset(ufi)
+va = xr.open_dataset(vfi)
+ta = xr.open_dataset(tfi)
+
+# Compute the diagnostics (note, here we assume that all model variables are the same length in time)
+mod_firstyr = ua.time.dt.year.values[0]
+mod_lastyr = ua.time.dt.year.values[-1]
+print(mod_firstyr,mod_lastyr)
+print (FIRSTYR,LASTYR)
+
+ps = psl.sel(time=slice(str(FIRSTYR),str(LASTYR)))
+uas = ua.sel(time=slice(str(FIRSTYR),str(LASTYR))).mean('lon')
+vas = va.sel(time=slice(str(FIRSTYR),str(LASTYR)))
+tas = ta.sel(time=slice(str(FIRSTYR),str(LASTYR)))
+toss = tos.sel(time=slice(str(FIRSTYR),str(LASTYR)))
+
+print(f'***Determine whether model pressure levels are in Pa or hPa, convert to hPa')
+if getattr(uas.lev,'units') == 'Pa':
+ print(f'**Converting pressure levels to hPa')
+ uas = uas.assign_coords({"lev": (uas.lev/100.)})
+ uas.lev.attrs['units'] = 'hPa'
+ vas = vas.assign_coords({"lev": (vas.lev/100.)})
+ vas.lev.attrs['units'] = 'hPa'
+ tas = tas.assign_coords({"lev": (tas.lev/100.)})
+ tas.lev.attrs['units'] = 'hPa'
+
+if getattr(ps.psl,'units') == 'Pa':
+ print(f'**Converting pressure levels to hPa')
+ ps.psl.attrs['units'] = 'hPa'
+ ps.psl.values[:] = ps.psl.values/100.
+
+# Create the POD figures directory
+plot_dir = f'{WK_DIR}/model/'
+
+#############################################
+print ('*** Running the model ENSO indexing')
+#############################################
+
+# Extract the tropical domain #
+ENSO = toss.isel(lat = np.logical_and(toss.lat >= -5, toss.lat <= 5))
+
+# Extract date and longitude info from ENSO dataset #
+date_first = toss.time[0]
+date_last = toss.time[-1]
+lon_first = toss.lon.values[0]
+
+# Identify the correct ENSO longitudinal grid #
+if lon_first < 0:
+ ENSO = ENSO.sel(lon=slice(-170,-120))
+else:
+ ENSO = ENSO.sel(lon=slice(190,240))
+
+# Latitudinally average the ENSO data #
+weighted_mean = ENSO.mean('lat')
+weights = np.cos(np.deg2rad(ENSO.lat.values))
+interim = np.multiply(ENSO.tos.values,weights[np.newaxis,:,np.newaxis])
+interim = np.nansum(interim,axis=1)
+interim = np.true_divide(interim,np.sum(weights))
+weighted_mean.tos.values[:] = interim[:]
+
+# Subsample ENSO data for NH #
+seasonal = weighted_mean.sel(time=slice('%s-11-01' % date_first.dt.year.values, '%s-03-31' % date_last.dt.year.values))
+seasonal = seasonal.sel(time=seasonal.time.dt.month.isin([11,12,1,2,3])).mean('lon')
+nh_nina, nh_nino = enso_index(seasonal)
+seasonal.close()
+
+# Subsample ENSO data for SH #
+seasonal = weighted_mean.sel(time=slice('%s-09-01' % date_first.dt.year.values, '%s-01-31' % date_last.dt.year.values))
+seasonal = seasonal.sel(time=seasonal.time.dt.month.isin([9,10,11,12,1])).mean('lon')
+sh_nina, sh_nino = enso_index(seasonal)
+seasonal.close()
+
+# Store the Nina/Nino years in a dictionary to call later #
+model_enso_dict = {}
+model_enso_dict['NH'] = nh_nina, nh_nino
+model_enso_dict['SH'] = sh_nina, sh_nino
+
+##########################################################################
+# Define ENSO plotting parameters to be passed to the plotting functions #
+##########################################################################
+
+#########################################################
+print ('*** Doing the model eddy heat flux calculations')
+#########################################################
+vt = compute_total_eddy_heat_flux(vas.va,tas.ta)
+
+model_nh_enso_uzm = uas.ua.sel(time=slice('%s-11-01' % date_first.dt.year.values, '%s-03-31' % date_last.dt.year.values))
+model_nh_enso_vtzm = vt.sel(time=slice('%s-11-01' % date_first.dt.year.values, '%s-03-31' % date_last.dt.year.values))
+model_nh_enso_psl = ps.psl.sel(time=slice('%s-11-01' % date_first.dt.year.values, '%s-03-31' % date_last.dt.year.values))
+model_nh_enso_titles = ['November','December','January','February','March']
+model_nh_enso_plot_months = [11,12,1,2,3]
+model_nh_enso_axes = [0,90,1000,1]
+model_nh_enso_psl_axes = [-180, 180, 20, 90]
+
+model_sh_enso_uzm = uas.ua.sel(time=slice('%s-09-01' % date_first.dt.year.values, '%s-01-31' % date_last.dt.year.values))
+model_sh_enso_vtzm = vt.sel(time=slice('%s-09-01' % date_first.dt.year.values, '%s-01-31' % date_last.dt.year.values))
+model_sh_enso_psl = ps.psl.sel(time=slice('%s-09-01' % date_first.dt.year.values, '%s-01-31' % date_last.dt.year.values))
+model_sh_enso_titles = ['September','October','November','December','January']
+model_sh_enso_plot_months = [9,10,11,12,1]
+model_sh_enso_axes = [-90,0,1000,1]
+model_sh_enso_psl_axes = [-180, 180, -90, -20]
+
+model_uzm_dict = {}
+model_uzm_dict['NH'] = model_nh_enso_uzm, model_nh_enso_titles, model_nh_enso_plot_months, model_nh_enso_axes
+model_uzm_dict['SH'] = model_sh_enso_uzm, model_sh_enso_titles, model_sh_enso_plot_months, model_sh_enso_axes
+
+model_vtzm_dict = {}
+model_vtzm_dict['NH'] = model_nh_enso_vtzm, model_nh_enso_titles, model_nh_enso_plot_months, model_nh_enso_axes
+model_vtzm_dict['SH'] = model_sh_enso_vtzm, model_sh_enso_titles, model_sh_enso_plot_months, model_sh_enso_axes
+
+model_psl_dict = {}
+model_psl_dict['NH'] = model_nh_enso_psl, model_nh_enso_titles, model_nh_enso_plot_months, ccrs.NorthPolarStereo(), model_nh_enso_psl_axes
+model_psl_dict['SH'] = model_sh_enso_psl, model_sh_enso_titles, model_sh_enso_plot_months, ccrs.SouthPolarStereo(), model_sh_enso_psl_axes
+
+#########################################################################
+# Define QBO plotting parameters to be passed to the plotting functions #
+#########################################################################
+
+model_nh_qbo_uzm = uas.ua.sel(time=slice('%s-10-01' % date_first.dt.year.values, '%s-02-28' % date_last.dt.year.values))
+model_nh_qbo_vtzm = vt.sel(time=slice('%s-10-01' % date_first.dt.year.values, '%s-02-28' % date_last.dt.year.values))
+model_nh_qbo_psl = ps.psl.sel(time=slice('%s-10-01' % date_first.dt.year.values, '%s-02-28' % date_last.dt.year.values))
+model_nh_qbo_titles = ['October','November','December','January','February']
+model_nh_qbo_plot_months = [10,11,12,1,2]
+model_nh_qbo_axes = [0,90,1000,1]
+model_nh_qbo_psl_axes = [-180, 180, 20, 90]
+
+model_sh_qbo_uzm = uas.ua.sel(time=slice('%s-07-01' % date_first.dt.year.values, '%s-11-30' % date_last.dt.year.values))
+model_sh_qbo_vtzm = vt.sel(time=slice('%s-07-01' % date_first.dt.year.values, '%s-11-30' % date_last.dt.year.values))
+model_sh_qbo_psl = ps.psl.sel(time=slice('%s-07-01' % date_first.dt.year.values, '%s-11-30' % date_last.dt.year.values))
+model_sh_qbo_titles = ['July','August','September','October','November']
+model_sh_qbo_plot_months = [7,8,9,10,11]
+model_sh_qbo_axes = [-90,0,1000,1]
+model_sh_qbo_psl_axes = [-180, 180, -90, -20]
+
+model_uzm_qbo_dict = {}
+model_uzm_qbo_dict['NH'] = model_nh_qbo_uzm, model_nh_qbo_titles, model_nh_qbo_plot_months, model_nh_qbo_axes
+model_uzm_qbo_dict['SH'] = model_sh_qbo_uzm, model_sh_qbo_titles, model_sh_qbo_plot_months, model_sh_qbo_axes
+print (model_uzm_qbo_dict)
+
+model_vtzm_qbo_dict = {}
+model_vtzm_qbo_dict['NH'] = model_nh_qbo_vtzm, model_nh_qbo_titles, model_nh_qbo_plot_months, model_nh_qbo_axes
+model_vtzm_qbo_dict['SH'] = model_sh_qbo_vtzm, model_sh_qbo_titles, model_sh_qbo_plot_months, model_sh_qbo_axes
+print (model_vtzm_qbo_dict)
+
+model_psl_qbo_dict = {}
+model_psl_qbo_dict['NH'] = model_nh_qbo_psl, model_nh_qbo_titles, model_nh_qbo_plot_months, ccrs.NorthPolarStereo(), model_nh_qbo_psl_axes
+model_psl_qbo_dict['SH'] = model_sh_qbo_psl, model_sh_qbo_titles, model_sh_qbo_plot_months, ccrs.SouthPolarStereo(), model_sh_qbo_psl_axes
+print (model_psl_qbo_dict)
+
+
+hemispheres = ['NH','SH']
+
+for hemi in hemispheres:
+
+ ############################################
+ print ('*** Calling the model ENSO indices')
+ ############################################
+ model_nina, model_nino = model_enso_dict[hemi]
+
+ print (model_nina,'model_nina')
+ print (model_nino, 'model_nino')
+
+ ################################################################
+ print ('*** Running the model ENSO zonal mean zonal wind calcs')
+ ################################################################
+ out_plot = f'{plot_dir}/{CASENAME}-{FIRSTYR}-{LASTYR}-enso34-uzm-%s.png' % hemi
+ out_fig, out_ax = enso_uzm(model_uzm_dict[hemi][0],model_nina,model_nino,model_uzm_dict[hemi][1],model_uzm_dict[hemi][2],model_uzm_dict[hemi][3])
+ out_fig.savefig(out_plot,dpi=700)
+
+ #########################################################
+ print ('*** Running the model ENSO eddy heat flux calcs')
+ #########################################################
+ out_plot = f'{plot_dir}/{CASENAME}-{FIRSTYR}-{LASTYR}-enso34-vt-%s.png' % hemi
+ out_fig, out_ax = enso_vt(model_vtzm_dict[hemi][0],model_nina,model_nino,model_vtzm_dict[hemi][1],model_vtzm_dict[hemi][2],model_vtzm_dict[hemi][3])
+ out_fig.savefig(out_plot,dpi=700)
+
+ #######################################################
+ print ('*** Running the model ENSO sea level pressure')
+ #######################################################
+ out_plot = f'{plot_dir}/{CASENAME}-{FIRSTYR}-{LASTYR}-enso34-psl-%s.png' % hemi
+ out_fig, out_ax = enso_slp(model_psl_dict[hemi][0],model_nina,model_nino,model_psl_dict[hemi][1],model_psl_dict[hemi][2],model_psl_dict[hemi][3],model_psl_dict[hemi][4])
+ out_fig.savefig(out_plot,dpi=700)
+
+##########################################
+print('*** Running the model QBO metrics')
+##########################################
+metricsout, switch = qbo_metrics(uas,QBOisobar)
+
+filepath = f'{plot_dir}/{CASENAME}-{FIRSTYR}-{LASTYR}-qbo{QBOisobar}hPa-metrics.txt'
+with open(filepath, 'w') as file_handler:
+ file_handler.write(f"{'QBO metrics: periodicity and spatial characteristics'}\n")
+ file_handler.write(f"{' '}\n")
+ for item in metricsout:
+ file_handler.write(f"{item}\n")
+
+if switch == 1:
+
+ ###################################################################################
+ print ('*** A model QBO was detected so POD is now running the model QBO indexing')
+ ###################################################################################
+
+ print (QBOisobar, "QBOisobar")
+
+ # Subset atmospheric data for user defined isobar #
+ subset = uas.sel(lev=QBOisobar)
+
+ # Select 5S-5N winds #
+ tropical_subset = subset.interp(lat=[-5,-2.5,0,2.5,5])
+
+ # Latitudinally weight and average #
+ qbo = tropical_subset.mean('lat')
+ weights = np.cos(np.deg2rad(tropical_subset.lat.values))
+ interim = np.multiply(tropical_subset.ua.values,weights[np.newaxis,:])
+ interim = np.nansum(interim,axis=1)
+ interim = np.true_divide(interim,np.sum(weights))
+ qbo.ua.values[:] = interim[:]
+
+ def qbo_index(seasonal):
+
+ # Create 2-month seasonally averaged standardized QBO anomalies. Weight each month by number of days comprising month #
+ day_in_month_weights = seasonal.time.dt.days_in_month.values[:2]/np.sum(seasonal.time.dt.days_in_month.values[:2])
+ qboindex = np.reshape(seasonal.ua.values,(int(len(seasonal.ua.values)/2),2))
+ qboindex = np.nanmean(np.multiply(qboindex,day_in_month_weights[np.newaxis,:]),axis=1)
+ anom = np.subtract(qboindex,np.nanmean(qboindex))
+ anom = np.true_divide(anom,np.nanstd(qboindex))
+
+ # Get the unique years from "seasonal" and then remove the last one, which is not needed
+ years = [v for v in set(np.sort(seasonal.time.dt.year.values))]
+ eqbo_years = [years[i] for i,v in enumerate(anom) if v <= -1]
+ wqbo_years = [years[i] for i,v in enumerate(anom) if v >= 1]
+
+ return eqbo_years, wqbo_years
+
+ # Subsample QBO data for NH #
+ seasonal = qbo.sel(time=qbo.time.dt.month.isin([10,11]))
+ model_nh_eqbo, model_nh_wqbo = qbo_index(seasonal)
+ seasonal.close()
+
+ # Subsample QBO data for SH #
+ seasonal = qbo.sel(time=qbo.time.dt.month.isin([7,8]))
+ model_sh_eqbo, model_sh_wqbo = qbo_index(seasonal)
+ seasonal.close()
+
+ for hemi in hemispheres:
+
+ # Store the Nina/Nino years in a dictionary to call later #
+ model_qbo_dict = {}
+ model_qbo_dict['NH'] = model_nh_eqbo, model_nh_wqbo
+ model_qbo_dict['SH'] = model_sh_eqbo, model_sh_wqbo
+
+ ############################################
+ print ('*** Running the model QBO indexing')
+ ############################################
+ model_eqbo, model_wqbo = model_qbo_dict[hemi]
+
+ print (model_eqbo, 'model_eqbo')
+ print (model_wqbo, 'model_wqbo')
+
+ ##################################################################
+ print ('*** Running the model QBO zonal mean zonal wind plotting')
+ ##################################################################
+ out_plot = f'{plot_dir}/{CASENAME}-{FIRSTYR}-{LASTYR}-qbo{QBOisobar}hPa-uzm-%s.png' % hemi
+ out_fig, out_ax = qbo_uzm(model_uzm_qbo_dict[hemi][0],model_eqbo,model_wqbo,QBOisobar,model_uzm_qbo_dict[hemi][1],model_uzm_qbo_dict[hemi][2],model_uzm_qbo_dict[hemi][3])
+ out_fig.savefig(out_plot,dpi=700)
+
+ ######################################################################
+ print ('*** Running the model QBO zonal mean eddy heat flux plotting')
+ ######################################################################
+ out_plot = f'{plot_dir}/{CASENAME}-{FIRSTYR}-{LASTYR}-qbo{QBOisobar}hPa-vt-%s.png' % hemi
+ out_fig, out_ax = qbo_vt(model_vtzm_qbo_dict[hemi][0],model_eqbo,model_wqbo,QBOisobar,model_vtzm_qbo_dict[hemi][1],model_vtzm_qbo_dict[hemi][2],model_vtzm_qbo_dict[hemi][3])
+ out_fig.savefig(out_plot,dpi=700)
+
+ print ('*** Running the model QBO sea level pressure plotting')
+ out_plot = f'{plot_dir}/{CASENAME}-{FIRSTYR}-{LASTYR}-qbo{QBOisobar}hPa-psl-%s.png' % hemi
+ out_fig, out_ax = qbo_slp(model_psl_qbo_dict[hemi][0],model_eqbo,model_wqbo,QBOisobar,model_psl_qbo_dict[hemi][1],model_psl_qbo_dict[hemi][2],model_psl_qbo_dict[hemi][3],model_psl_qbo_dict[hemi][4])
+ out_fig.savefig(out_plot,dpi=700)
+
+if switch == 0:
+
+ print ("No QBO detected in the model data. As a result, QBO Ubar, v'T', ans SLP plots were not made.")
+
+###################################
+# Prepare the output dictionaries #
+###################################
+
+vt_data = {}
+uzm_data = {}
+slp_data = {}
+
+###########################
+# Saving some of the data #
+###########################
+
+vt_data['NH'] = vt.sel(lat = np.logical_and(vt.lat >= 0, vt.lat <= 90))
+vt_data['SH'] = vt.sel(lat = np.logical_and(vt.lat >= -90, vt.lat <= 0))
+vt_out = xr.concat([vt_data['SH'], vt_data['NH']], dim='hemi')
+vt_out.name = 'vt_out'
+vt_out.attrs['units'] = 'Km s**-1'
+vt_out.attrs['long_name'] = 'Pole to equator zonal-mean eddy heat flux'
+
+uzm_data['NH'] = uas.ua.sel(lat = np.logical_and(uas.lat >= 0, uas.lat <= 90))
+uzm_data['SH'] = uas.ua.sel(lat = np.logical_and(uas.lat >= -90, uas.lat <= 0))
+uzm_out = xr.concat([uzm_data['SH'], uzm_data['NH']], dim='hemi')
+uzm_out.name = 'uzm_out'
+uzm_out.attrs['units'] = 'm s**-1'
+uzm_out.attrs['long_name'] = 'Pole to equator zonal-mean zonal wind'
+
+slp_data['NH'] = ps.psl.sel(lat = np.logical_and(ps.lat >= 20, ps.lat <= 90))
+slp_data['SH'] = ps.psl.sel(lat = np.logical_and(ps.lat >= -90, ps.lat <= -20))
+slp_out = xr.concat([slp_data['SH'], slp_data['NH']], dim='hemi')
+slp_out.name = 'slp_out'
+slp_out.attrs['units'] = 'hPa'
+slp_out.attrs['long_name'] = 'Pole to 20N/S sea level pressure'
+
+qbo_out = uas.ua.interp(lat=[-5,-2.5,0,2.5,5]).sel(lev=QBOisobar)
+qbo_out.name = 'qbo_out'
+qbo_out.attrs['units'] = 'm s**-1'
+qbo_out.attrs['long_name'] = f'5S to 5N {QBOisobar} hPa zonal-mean zonal wind'
+print (qbo_out, 'qbo_out')
+
+out_ds = xr.merge([vt_out,uzm_out,slp_out,qbo_out])
+print ('OUT_DS')
+print (out_ds)
+print (' ')
+print (' ')
+
+print('*** Preparing to save derived data')
+data_dir = f'{WK_DIR}/model/netCDF'
+outfile = data_dir+f'/{CASENAME}_qbo-enso_diagnostics.nc'
+
+encoding = {'vt_out': {'dtype':'float32'},
+ 'uzm_out': {'dtype':'float32'},
+ 'slp_out': {'dtype':'float32'},
+ 'qbo_out': {'dtype':'float32'}}
+
+print(f'*** Saving qbo-enso diagnostic data to {outfile}')
+out_ds.to_netcdf(outfile, encoding=encoding)
+
+
+print('\n=====================================')
+print('END stc_qbo_enso.py ')
+print('=====================================\n')
\ No newline at end of file
diff --git a/diagnostics/stc_qbo_enso/stc_qbo_enso_plottingcodeenso.py b/diagnostics/stc_qbo_enso/stc_qbo_enso_plottingcodeenso.py
new file mode 100644
index 000000000..35f797de7
--- /dev/null
+++ b/diagnostics/stc_qbo_enso/stc_qbo_enso_plottingcodeenso.py
@@ -0,0 +1,767 @@
+'''
+This module contains functions used in the Stratospheric QBO and ENSO POD.
+
+Contains:
+ enso_slp: plots sea level pressure response to ENSO as a function of month and ENSO phase
+ enso_uzm: plots the zonal-mean zonal wind response to ENSO as a function of month and ENSO phase
+ enso_vt: plots the zonally averaged eddy heat flux responses to the ENSO as a function of month and ENSO phase
+'''
+
+
+import numpy as np
+import matplotlib as mpl
+import matplotlib.pyplot as plt
+import cartopy.crs as ccrs
+import matplotlib.path as mpath
+from cartopy.util import add_cyclic_point
+from scipy import stats
+
+##################################################################################################
+##################################################################################################
+##################################################################################################
+
+def enso_uzm(uzm,negative_indices,positive_indices,titles,plot_months,axes):
+
+ r""" Compute the zonal mean zonal wind response to ENSO. Anomalies are defined as
+ deviations from the seasonal cycle and composites of anomalies are made during La Nina years,
+ El Nino years, and then their difference is shown. Stippling is used to denote statistical
+ significance on the La Nina minus El Nino composites.
+
+ Parameters
+ ----------
+ uzm : xarray.DataArray
+ The zonal mean zonal wind.
+
+ negative_indices : list
+ A list of La Nina years.
+
+ positive_indices : list
+ A list of El Nino years.
+
+ titles : list of strings
+ A list of month names that will be used as the titles for each column of subplots
+
+ plot_months : list
+ A list of numbers corresponding to each month (e.g., 10 = october)
+
+ axes : A list of numbers used to set the pressure and latitude limits of the subplots.
+ [0,90,1000,1] are used for the N. hemisphere and [-90,0,1000,1] for S. hemisphere
+
+ Returns
+ -------
+ 3 row by 6 column plot of La Nina uzm anomalies (top row), El Nino uzm anomalies (middle),
+ and their difference (bottom row) with stippling highlighting differences between El Nino
+ and La Nina winds statistically significant at the 95% level using a two-sided t-test.
+
+ Notes
+ -----
+ The input field uzm is assumed to have dimensions named "lat" and "lev".
+ E.g., if your data has dimensions "latitude" and/or "level",
+ use the rename method:
+ ds.rename({'latitude':'lat','level':'lev'})
+ """
+
+ nina_out = []
+ nino_out = []
+ diff_out = []
+ sigs_out = []
+ clim_out = []
+
+ for mon in plot_months:
+
+ clim = uzm.sel(time=uzm.time.dt.month.isin([mon]))
+ clim_out.append(clim.mean('time').values)
+
+ anom = clim.groupby("time.month") - clim.groupby("time.month").mean("time")
+
+ if mon == 7 or mon == 8 or mon == 9 or mon == 10 or mon == 11 or mon == 12:
+ tmp_negative_indices = np.add(negative_indices,0)
+ tmp_positive_indices = np.add(positive_indices,0)
+ if mon == 1 or mon == 2 or mon == 3:
+ tmp_negative_indices = np.add(negative_indices,1)
+ tmp_positive_indices = np.add(positive_indices,1)
+
+ nina_tmp = anom.sel(time=anom.time.dt.year.isin([tmp_negative_indices]))
+ nino_tmp = anom.sel(time=anom.time.dt.year.isin([tmp_positive_indices]))
+
+ t,p = stats.ttest_ind(nina_tmp.values,nino_tmp.values,axis=0,nan_policy='omit')
+
+ sigs_out.append(np.subtract(1,p))
+ diff_out.append(np.subtract(nina_tmp.mean('time').values,nino_tmp.mean('time').values))
+ nina_out.append(nina_tmp.mean('time').values)
+ nino_out.append(nino_tmp.mean('time').values)
+
+ clim.close()
+ anom.close()
+ nina_tmp.close()
+ nino_tmp.close()
+ uzm.close()
+
+ nina_out = np.array(nina_out)
+ nino_out = np.array(nino_out)
+ diff_out = np.array(diff_out)
+ sigs_out = np.array(sigs_out)
+ clim_out = np.array(clim_out)
+
+ ############# Begin the plotting ############
+
+ fig, ax = plt.subplots()
+
+ mpl.rcParams['font.sans-serif'].insert(0, 'Arial')
+
+ vmin = -10
+ vmax = 10
+ vlevs = np.linspace(vmin,vmax,num=21)
+ vlevs = [v for v in vlevs if v != 0]
+ ticks = [vmin,vmin/2,0,vmax/2,vmax]
+
+ cmin = -200
+ cmax = 200
+ clevs = np.linspace(cmin,cmax,num=41)
+ clevs = [v for v in clevs if v != 0]
+
+ plt.suptitle('ENSO zonal-mean zonal wind (m/s)',fontsize=12,fontweight='normal')
+
+ # Add colormap #
+ from palettable.colorbrewer.diverging import RdBu_11
+ cmap1=RdBu_11.mpl_colormap.reversed()
+
+ x, y = np.meshgrid(uzm.lat.values,uzm.lev.values)
+ uzm.close()
+
+ cols = [0,1,2,3,4]
+
+ for i in cols:
+
+ print (i)
+
+ # Nina #
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(0,cols[i]))
+ plt.title('%s' % titles[i],fontsize=10,y=0.93,fontweight='normal')
+
+ cs = plt.contourf(x,y,nina_out[i],cmap=cmap1,levels=vlevs,extend="both",vmin=vmin,vmax=vmax,zorder=1)
+
+ mpl.rcParams["lines.linewidth"] = 0.2
+ mpl.rcParams["lines.dashed_pattern"] = 10, 3
+ black = plt.contour(x,y,clim_out[i],colors='k',levels=clevs,extend="both",vmin=cmin,vmax=cmax,zorder=3)
+ plt.clabel(black,black.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+
+ plt.semilogy()
+ yticks = [1,10,100,1000]
+ plt.yticks(yticks,yticks,fontsize=6,fontweight='normal')
+ xticks = [-90,-60,-30,0,30,60,90]
+ plt.xticks(xticks,xticks,fontsize=6,fontweight='normal')
+ plt.gca().invert_yaxis()
+ plt.axis(axes)
+ if i == 0:
+ plt.ylabel('Pressure (hPa)',fontsize=8,fontweight='normal')
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('Nina (%s seasons)' % int(len(negative_indices)),fontsize=10)
+
+ # Nino #
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(1,cols[i]))
+
+ cs = plt.contourf(x,y,nino_out[i],cmap=cmap1,levels=vlevs,extend="both",vmin=vmin,vmax=vmax,zorder=1)
+
+ mpl.rcParams["lines.linewidth"] = 0.2
+ mpl.rcParams["lines.dashed_pattern"] = 10, 3
+ black = plt.contour(x,y,clim_out[i],colors='k',levels=clevs,extend="both",vmin=cmin,vmax=cmax,zorder=3)
+ plt.clabel(black,black.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+
+ plt.semilogy()
+ yticks = [1,10,100,1000]
+ plt.yticks(yticks,yticks,fontsize=6,fontweight='normal')
+ xticks = [-90,-60,-30,0,30,60,90]
+ plt.xticks(xticks,xticks,fontsize=6,fontweight='normal')
+ plt.gca().invert_yaxis()
+ plt.axis(axes)
+ if i == 0:
+ plt.ylabel('Pressure (hPa)',fontsize=8,fontweight='normal')
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('Nino (%s seasons)' % int(len(positive_indices)),fontsize=10)
+
+ # Diff: Nina minus Nino #
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(2,cols[i]))
+
+ cs = plt.contourf(x,y,diff_out[i],cmap=cmap1,levels=vlevs,extend="both",vmin=vmin,vmax=vmax,zorder=1)
+
+ mpl.rcParams["lines.linewidth"] = 0.2
+ mpl.rcParams["lines.dashed_pattern"] = 10, 3
+ black = plt.contour(x,y,clim_out[i],colors='k',levels=clevs,extend="both",vmin=cmin,vmax=cmax,zorder=3)
+ plt.clabel(black,black.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+
+ plt.semilogy()
+ yticks = [1,10,100,1000]
+ plt.yticks(yticks,yticks,fontsize=6,fontweight='normal')
+ xticks = [-90,-60,-30,0,30,60,90]
+ plt.xticks(xticks,xticks,fontsize=6,fontweight='normal')
+ plt.gca().invert_yaxis()
+ plt.axis(axes)
+ if i == 0:
+ plt.ylabel('Pressure (hPa)',fontsize=8,fontweight='normal')
+ plt.xlabel('Latitude',fontsize=8,fontweight='normal')
+
+ sig_levs = [0.95,1]
+ mpl.rcParams['hatch.linewidth'] = 0.2
+ plt.contourf(x,y,sigs_out[i],colors='black',vmin=0.95,vmax=1,levels=sig_levs,hatches=['......','......'],alpha=0.0)
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('Nina - Nino',fontsize=10)
+
+ # Add colorbar #
+
+ cb_ax = fig.add_axes([0.365, 0.04, 0.30, 0.015])
+ cbar = fig.colorbar(cs, cax=cb_ax, ticks=ticks,orientation='horizontal')
+ cbar.ax.tick_params(labelsize=8, width=1)
+ cbar.ax.set_xticklabels(ticks,weight='normal')
+
+ plt.subplots_adjust(top=0.86,bottom=0.16,hspace=0.5,wspace=0.55,left=0.08,right=0.95)
+ return fig, ax
+
+##################################################################################################
+##################################################################################################
+##################################################################################################
+
+def enso_vt(vt,negative_indices,positive_indices,titles,plot_months,axes):
+
+ r""" Compute the zonal mean eddy heat flux response to ENSO. Anomalies are defined as
+ deviations from the seasonal cycle and composites of anomalies are made during La Nina years,
+ El Nino years, and then their difference is shown. Stippling is used to denote statistical
+ significance on the La Nina minus El Nino composites.
+
+ Parameters
+ ----------
+ vt : xarray.DataArray
+ The zonal mean eddy heat flux. This quantity is calculated using the
+ compute_total_eddy_heat_flux function given in the driver script stc_qbo_enso.py
+
+ negative_indices : list
+ A list of La Nina years.
+
+ positive_indices : list
+ A list of El Nino years.
+
+ titles : list of strings
+ A list of month names that will be used as the titles for each column of subplots
+
+ plot_months : list
+ A list of numbers corresponding to each month (e.g., 10 = october)
+
+ axes : A list of numbers used to set the pressure and latitude limits of the subplots.
+ [0,90,1000,1] are used for the N. hemisphere and [-90,0,1000,1] for S. hemisphere
+
+ Returns
+ -------
+ 3 row by 6 column plot of La Nina vt anomalies (top row), El Nino vt anomalies (middle),
+ and their difference (bottom row) with stippling highlighting differences between El Nino
+ and La Nina winds statistically significant at the 95% level using a two-sided t-test.
+
+ Notes
+ -----
+ The input field vt is assumed to have dimensions named "lat" and "lev".
+ E.g., if your data has dimensions "latitude" and/or "level",
+ use the rename method:
+ ds.rename({'latitude':'lat','level':'lev'})
+ """
+
+ nina_out = []
+ nino_out = []
+ diff_out = []
+ sigs_out = []
+ clim_out = []
+
+ for mon in plot_months:
+
+ clim = vt.sel(time=vt.time.dt.month.isin([mon]))
+ clim_out.append(clim.mean('time').values)
+
+ anom = clim.groupby("time.month") - clim.groupby("time.month").mean("time")
+
+ if mon == 7 or mon == 8 or mon == 9 or mon == 10 or mon == 11 or mon == 12:
+ tmp_negative_indices = np.add(negative_indices,0)
+ tmp_positive_indices = np.add(positive_indices,0)
+ if mon == 1 or mon == 2 or mon == 3:
+ tmp_negative_indices = np.add(negative_indices,1)
+ tmp_positive_indices = np.add(positive_indices,1)
+
+ nina_tmp = anom.sel(time=anom.time.dt.year.isin([tmp_negative_indices]))
+ nino_tmp = anom.sel(time=anom.time.dt.year.isin([tmp_positive_indices]))
+
+ t,p = stats.ttest_ind(nina_tmp.values,nino_tmp.values,axis=0,nan_policy='omit')
+
+ sigs_out.append(np.subtract(1,p))
+ diff_out.append(np.subtract(nina_tmp.mean('time').values,nino_tmp.mean('time').values))
+ nina_out.append(nina_tmp.mean('time').values)
+ nino_out.append(nino_tmp.mean('time').values)
+
+ clim.close()
+ anom.close()
+ nina_tmp.close()
+ nino_tmp.close()
+ vt.close()
+
+ nina_out = np.array(nina_out)
+ nino_out = np.array(nino_out)
+ diff_out = np.array(diff_out)
+ sigs_out = np.array(sigs_out)
+ clim_out = np.array(clim_out)
+
+ ############# Begin the plotting ############
+
+ fig, ax = plt.subplots()
+
+ mpl.rcParams['font.sans-serif'].insert(0, 'Arial')
+
+ blevs = []
+
+ blevs.append(-2)
+ blevs.append(-6)
+ blevs.append(-10)
+ blevs.append(-25)
+ blevs.append(-50)
+ blevs.append(-100)
+ #blevs.append(-200)
+
+ blevs.append(2)
+ blevs.append(6)
+ blevs.append(10)
+ blevs.append(25)
+ blevs.append(50)
+ blevs.append(100)
+ #blevs.append(200)
+
+
+ blevs = np.sort(blevs)
+ print (blevs)
+
+ cmin = -200
+ cmax = 200
+ clevs = np.linspace(cmin,cmax,num=41)
+ clevs = [v for v in clevs if v != 0]
+
+ plt.suptitle('ENSO zonal-mean eddy heat flux (Km/s)',fontsize=12,fontweight='normal')
+
+ # Add colormap #
+ from palettable.colorbrewer.diverging import RdBu_11
+ cmap1=RdBu_11.mpl_colormap.reversed()
+
+ x, y = np.meshgrid(vt.lat.values,vt.lev.values)
+
+ cols = [0,1,2,3,4]
+
+ for i in cols:
+
+ print (i)
+
+ # Nina #
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(0,cols[i]))
+ plt.title('%s' % titles[i],fontsize=10,y=0.93,fontweight='normal')
+
+ cs = plt.contourf(x,y,nina_out[i],blevs,norm = mpl.colors.SymLogNorm(linthresh=2,linscale=1,vmin=-100,vmax=100),cmap=cmap1,extend="both",zorder=1)
+
+ mpl.rcParams["lines.linewidth"] = 0.2
+ mpl.rcParams["lines.dashed_pattern"] = 10, 3
+ black = plt.contour(x,y,clim_out[i],colors='k',levels=clevs,extend="both",vmin=cmin,vmax=cmax,zorder=3)
+ plt.clabel(black,black.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+
+ plt.semilogy()
+ yticks = [1,10,100,1000]
+ plt.yticks(yticks,yticks,fontsize=6,fontweight='normal')
+ xticks = [-90,-60,-30,0,30,60,90]
+ plt.xticks(xticks,xticks,fontsize=6,fontweight='normal')
+ plt.gca().invert_yaxis()
+ plt.axis(axes)
+ if i == 0:
+ plt.ylabel('Pressure (hPa)',fontsize=8,fontweight='normal')
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('Nina (%s seasons)' % int(len(negative_indices)),fontsize=10)
+
+ # Nino #
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(1,cols[i]))
+
+ cs = plt.contourf(x,y,nino_out[i],blevs,norm = mpl.colors.SymLogNorm(linthresh=2,linscale=1,vmin=-100,vmax=100),cmap=cmap1,extend="both",zorder=1)
+
+ mpl.rcParams["lines.linewidth"] = 0.2
+ mpl.rcParams["lines.dashed_pattern"] = 10, 3
+ black = plt.contour(x,y,clim_out[i],colors='k',levels=clevs,extend="both",vmin=cmin,vmax=cmax,zorder=3)
+ plt.clabel(black,black.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+
+ plt.semilogy()
+ yticks = [1,10,100,1000]
+ plt.yticks(yticks,yticks,fontsize=6,fontweight='normal')
+ xticks = [-90,-60,-30,0,30,60,90]
+ plt.xticks(xticks,xticks,fontsize=6,fontweight='normal')
+ plt.gca().invert_yaxis()
+ plt.axis(axes)
+ if i == 0:
+ plt.ylabel('Pressure (hPa)',fontsize=8,fontweight='normal')
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('Nino (%s seasons)' % int(len(positive_indices)),fontsize=10)
+
+ # Diff: Nina minus Nino #
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(2,cols[i]))
+
+ cs = plt.contourf(x,y,diff_out[i],blevs,norm = mpl.colors.SymLogNorm(linthresh=2,linscale=1,vmin=-100,vmax=100),cmap=cmap1,extend="both",zorder=1)
+
+ mpl.rcParams["lines.linewidth"] = 0.2
+ mpl.rcParams["lines.dashed_pattern"] = 10, 3
+ black = plt.contour(x,y,clim_out[i],colors='k',levels=clevs,extend="both",vmin=cmin,vmax=cmax,zorder=3)
+ plt.clabel(black,black.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+
+ plt.semilogy()
+ yticks = [1,10,100,1000]
+ plt.yticks(yticks,yticks,fontsize=6,fontweight='normal')
+ xticks = [-90,-60,-30,0,30,60,90]
+ plt.xticks(xticks,xticks,fontsize=6,fontweight='normal')
+ plt.gca().invert_yaxis()
+ plt.axis(axes)
+ if i == 0:
+ plt.ylabel('Pressure (hPa)',fontsize=8,fontweight='normal')
+ plt.xlabel('Latitude',fontsize=8,fontweight='normal')
+
+ sig_levs = [0.95,1]
+ mpl.rcParams['hatch.linewidth'] = 0.2
+ plt.contourf(x,y,sigs_out[i],colors='black',vmin=0.95,vmax=1,levels=sig_levs,hatches=['......','......'],alpha=0.0)
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('Nina - Nino',fontsize=10)
+
+ # Add colorbar #
+
+ oticks = [-100,-50,-25,-10,-6,-2,2,6,10,25,50,100]
+ cb_ax = fig.add_axes([0.365, 0.04, 0.30, 0.015])
+ cbar = fig.colorbar(cs, cax=cb_ax, ticks=oticks,orientation='horizontal')
+ cbar.ax.tick_params(labelsize=6, width=1)
+ cbar.ax.set_xticklabels(oticks,weight='normal')
+
+ plt.subplots_adjust(top=0.86,bottom=0.16,hspace=0.5,wspace=0.55,left=0.08,right=0.95)
+ return fig, ax
+
+##################################################################################################
+##################################################################################################
+##################################################################################################
+
+
+def enso_slp(ps,negative_indices,positive_indices,titles,plot_months,projection,axes):
+
+ r""" Compute the sea level pressure response to ENSO. Anomalies are defined as
+ deviations from the seasonal cycle and composites of anomalies are made during La Nina years,
+ El Nino years, and then their difference is shown. Stippling is used to denote statistical
+ significance on the La Nina minus El Nino composites.
+
+ Parameters
+ ----------
+ ps : xarray.DataArray
+ The sea level pressure.
+
+ negative_indices : list
+ A list of La Nina years.
+
+ positive_indices : list
+ A list of El Nino years.
+
+ titles : list of strings
+ A list of month names that will be used as the titles for each column of subplots
+
+ plot_months : list
+ A list of numbers corresponding to each month (e.g., 10 = october)
+
+ projection : ccrs.NorthPolarStereo() or ccrs.SouthPolarStereo()
+
+ axes : A list of numbers used to set the longitude and latitude bounds of the subplots.
+ [-180, 180, 20, 90] are used for the N. hemisphere and [-180, 180, -90, -20] for S. hemisphere
+
+ Returns
+ -------
+ 3 row by 6 column plot of La Nina ps anomalies (top row), El Nino ps anomalies (middle),
+ and their difference (bottom row) with stippling highlighting differences between El Nino
+ and La Nina winds statistically significant at the 95% level using a two-sided t-test.
+
+ Notes
+ -----
+ The input field ps is assumed to have dimensions named "lat" and "lon".
+ E.g., if your data has dimensions "latitude" and/or "level",
+ use the rename method:
+ ds.rename({'latitude':'lat','longitude':'lon'})
+
+ The input field ps is expected to have units of hPa. Directly below, the code
+ will check to see if the units are Pa instead, and if they are, convert them to hPa.
+ """
+
+ if getattr(ps,'units') == 'Pa':
+ print(f'**Converting pressure levels to hPa')
+ ps.attrs['units'] = 'hPa'
+ ps.values[:] = ps.values/100.
+
+ print (np.nanmin(ps.values))
+ print (np.nanmedian(ps.values))
+ print (np.nanmean(ps.values))
+ print (np.nanmax(ps.values))
+
+ nina_out = []
+ nino_out = []
+ diff_out = []
+ sigs_out = []
+ clim_out = []
+
+ for mon in plot_months:
+
+ clim = ps.sel(time=ps.time.dt.month.isin([mon]))
+ clim_out.append(clim.mean('time').values)
+
+ anom = clim.groupby("time.month") - clim.groupby("time.month").mean("time")
+
+ if mon == 7 or mon == 8 or mon == 9 or mon == 10 or mon == 11 or mon == 12:
+ tmp_negative_indices = np.add(negative_indices,0)
+ tmp_positive_indices = np.add(positive_indices,0)
+ if mon == 1 or mon == 2 or mon == 3:
+ tmp_negative_indices = np.add(negative_indices,1)
+ tmp_positive_indices = np.add(positive_indices,1)
+
+ nina_tmp = anom.sel(time=anom.time.dt.year.isin([tmp_negative_indices]))
+ nino_tmp = anom.sel(time=anom.time.dt.year.isin([tmp_positive_indices]))
+
+ t,p = stats.ttest_ind(nina_tmp.values,nino_tmp.values,axis=0,nan_policy='omit')
+
+ sigs_out.append(np.subtract(1,p))
+ diff_out.append(np.subtract(nina_tmp.mean('time').values,nino_tmp.mean('time').values))
+ nina_out.append(nina_tmp.mean('time').values)
+ nino_out.append(nino_tmp.mean('time').values)
+
+ clim.close()
+ anom.close()
+ nina_tmp.close()
+ nino_tmp.close()
+ ps.close()
+
+ nina_out = np.array(nina_out)
+ nino_out = np.array(nino_out)
+ diff_out = np.array(diff_out)
+ sigs_out = np.array(sigs_out)
+ clim_out = np.array(clim_out)
+
+ ############# Begin the plotting ############
+
+ fig, ax = plt.subplots()
+
+ mpl.rcParams['font.sans-serif'].insert(0, 'Arial')
+
+ vmin = -10
+ vmax = 10
+ vlevs = np.linspace(vmin,vmax,num=21)
+ vlevs = [v for v in vlevs if v != 0]
+ ticks = [vmin,vmin/2,0,vmax/2,vmax]
+
+ cmin = 900
+ cmax = 1100
+ clevs = np.linspace(cmin,cmax,num=21)
+
+ plt.suptitle('Nina - Nino sea level pressure (hPa)',fontsize=12,fontweight='normal')
+
+ # Add colormap #
+
+ from palettable.colorbrewer.diverging import RdBu_11
+ cmap1=RdBu_11.mpl_colormap.reversed()
+
+ lons = ps.lon.values
+ lats = ps.lat.values
+
+ cols = [0,1,2,3,4]
+
+ for i in cols:
+
+ print (i)
+
+ ########
+ # Nina #
+ ########
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(0,cols[i]), projection=projection)
+ ax1.set_extent(axes, ccrs.PlateCarree())
+
+ plt.title('%s' % titles[i],fontsize=10,y=0.93,fontweight='normal')
+
+ # Plot style features #
+
+ ax1.coastlines(linewidth=0.25)
+ theta = np.linspace(0, 2*np.pi, 100)
+ center, radius = [0.5, 0.5], 0.5
+ verts = np.vstack([np.sin(theta), np.cos(theta)]).T
+ circle = mpath.Path(verts * radius + center)
+ ax1.set_boundary(circle, transform=ax1.transAxes)
+ pos1 = ax1.get_position()
+ plt.title("%s" % titles[i], fontsize=10,fontweight='normal',y=0.98)
+ cyclic_z, cyclic_lon = add_cyclic_point(nina_out[i], coord=lons)
+
+ # Plot anomalies #
+
+ contourf = ax1.contourf(cyclic_lon, lats, cyclic_z,transform=ccrs.PlateCarree(),cmap=cmap1,vmin=vmin,vmax=vmax,levels=vlevs,extend='both',zorder=1)
+
+ # Overlay the climatology #
+
+ cyclic_clim, cyclic_lon = add_cyclic_point(clim_out[i], coord=lons)
+ cs = ax1.contour(cyclic_lon, lats, cyclic_clim,transform=ccrs.PlateCarree(),colors='k',linewidths=0.5,vmin=cmin,vmax=cmax,levels=clevs,extend='both',zorder=3)
+
+ plt.rc('font',weight='normal')
+ plt.clabel(cs,cs.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+ plt.rc('font',weight='normal')
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('Nina (%s seasons)' % int(len(negative_indices)),fontsize=10)
+ ax2.spines['top'].set_visible(False)
+ ax2.spines['right'].set_visible(False)
+ ax2.spines['bottom'].set_visible(False)
+ ax2.spines['left'].set_visible(False)
+ ax2.get_yaxis().set_ticks([])
+
+ ########
+ # Nino #
+ ########
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(1,cols[i]), projection=projection)
+ ax1.set_extent(axes, ccrs.PlateCarree())
+
+ # Plot style features #
+
+ ax1.coastlines(linewidth=0.25)
+ theta = np.linspace(0, 2*np.pi, 100)
+ center, radius = [0.5, 0.5], 0.5
+ verts = np.vstack([np.sin(theta), np.cos(theta)]).T
+ circle = mpath.Path(verts * radius + center)
+ ax1.set_boundary(circle, transform=ax1.transAxes)
+ pos1 = ax1.get_position()
+ plt.title("%s" % titles[i], fontsize=10,fontweight='normal',y=0.98)
+ cyclic_z, cyclic_lon = add_cyclic_point(nino_out[i], coord=lons)
+
+ # Plot anomalies #
+
+ contourf = ax1.contourf(cyclic_lon, lats, cyclic_z,transform=ccrs.PlateCarree(),cmap=cmap1,vmin=vmin,vmax=vmax,levels=vlevs,extend='both',zorder=1)
+
+ # Overlay the climatology #
+
+ cyclic_clim, cyclic_lon = add_cyclic_point(clim_out[i], coord=lons)
+ cs = ax1.contour(cyclic_lon, lats, cyclic_clim,transform=ccrs.PlateCarree(),colors='k',linewidths=0.5,vmin=cmin,vmax=cmax,levels=clevs,extend='both',zorder=3)
+
+ plt.rc('font',weight='normal')
+ plt.clabel(cs,cs.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+ plt.rc('font',weight='normal')
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('Nino (%s seasons)' % int(len(positive_indices)),fontsize=10)
+ ax2.spines['top'].set_visible(False)
+ ax2.spines['right'].set_visible(False)
+ ax2.spines['bottom'].set_visible(False)
+ ax2.spines['left'].set_visible(False)
+ ax2.get_yaxis().set_ticks([])
+
+ ##############
+ # Difference #
+ ##############
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(2,cols[i]), projection=projection)
+ ax1.set_extent(axes, ccrs.PlateCarree())
+
+ # Plot style features #
+
+ ax1.coastlines(linewidth=0.25)
+ theta = np.linspace(0, 2*np.pi, 100)
+ center, radius = [0.5, 0.5], 0.5
+ verts = np.vstack([np.sin(theta), np.cos(theta)]).T
+ circle = mpath.Path(verts * radius + center)
+ ax1.set_boundary(circle, transform=ax1.transAxes)
+ pos1 = ax1.get_position()
+ plt.title("%s" % titles[i], fontsize=10,fontweight='normal',y=0.98)
+ cyclic_z, cyclic_lon = add_cyclic_point(diff_out[i], coord=lons)
+
+ # Plot anomalies #
+
+ contourf = ax1.contourf(cyclic_lon, lats, cyclic_z,transform=ccrs.PlateCarree(),cmap=cmap1,vmin=vmin,vmax=vmax,levels=vlevs,extend='both',zorder=1)
+
+ # Statistical significance #
+
+ sig_levs = [0.95,1]
+ mpl.rcParams['hatch.linewidth'] = 0.2
+ cyclic_sig, cyclic_lontmp = add_cyclic_point(sigs_out[i], coord=lons)
+ ax1.contourf(cyclic_lon, lats, cyclic_sig,transform=ccrs.PlateCarree(),colors='black',vmin=0.95,vmax=1,levels=sig_levs,hatches=['......','......'],alpha=0.0,zorder=2)
+
+ # Overlay the climatology #
+
+ cyclic_clim, cyclic_lon = add_cyclic_point(clim_out[i], coord=lons)
+ cs = ax1.contour(cyclic_lon, lats, cyclic_clim,transform=ccrs.PlateCarree(),colors='k',linewidths=0.5,vmin=cmin,vmax=cmax,levels=clevs,extend='both',zorder=3)
+
+ plt.rc('font',weight='normal')
+ plt.clabel(cs,cs.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+ plt.rc('font',weight='normal')
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('Nina - Nino',fontsize=10)
+ ax2.spines['top'].set_visible(False)
+ ax2.spines['right'].set_visible(False)
+ ax2.spines['bottom'].set_visible(False)
+ ax2.spines['left'].set_visible(False)
+ ax2.get_yaxis().set_ticks([])
+
+ # Add colorbar #
+
+ cb_ax = fig.add_axes([0.35, 0.05, 0.30, 0.015])
+ cbar = fig.colorbar(contourf, cax=cb_ax, ticks=ticks,orientation='horizontal')
+ cbar.ax.tick_params(labelsize=8, width=1)
+ cbar.ax.set_xticklabels(ticks,weight='normal')
+
+
+ plt.subplots_adjust(top=0.86,bottom=0.09,hspace=0.3,wspace=0.0,left=0.02,right=0.94)
+
+ return fig, ax
\ No newline at end of file
diff --git a/diagnostics/stc_qbo_enso/stc_qbo_enso_plottingcodeqbo.py b/diagnostics/stc_qbo_enso/stc_qbo_enso_plottingcodeqbo.py
new file mode 100644
index 000000000..7b311821a
--- /dev/null
+++ b/diagnostics/stc_qbo_enso/stc_qbo_enso_plottingcodeqbo.py
@@ -0,0 +1,774 @@
+'''
+This module contains functions used in the Stratospheric QBO and ENSO POD.
+
+Contains:
+ qbo_slp: plots sea level pressure response to QBO as a function of month and QBO phase
+ qbo_uzm: plots the zonal-mean zonal wind response to QBO as a function of month and QBO phase
+ qbo_vt: plots the zonally averaged eddy heat flux response to the QBO as a function of month and QBO phase
+'''
+
+import numpy as np
+import matplotlib as mpl
+import matplotlib.pyplot as plt
+import cartopy.crs as ccrs
+import matplotlib.path as mpath
+from cartopy.util import add_cyclic_point
+
+from scipy import stats
+
+##################################################################################################
+##################################################################################################
+##################################################################################################
+
+def qbo_uzm(uzm,negative_indices,positive_indices,QBOisobar,titles,plot_months,axes):
+
+ r""" Compute the zonal mean zonal wind response to the QBO. Anomalies are defined as
+ deviations from the seasonal cycle and composites of anomalies are made during easterly QBO,
+ westerly QBO, and then their difference is shown. Stippling is used to denote statistical
+ significance on the EQBO minus WQBO composites.
+
+ Parameters
+ ----------
+ uzm : xarray.DataArray
+ The zonal mean zonal wind.
+
+ negative_indices : list
+ A list of easterly QBO years.
+
+ positive_indices : list
+ A list of westerly QBO years.
+
+ QBOisobar : int
+ An integer defined by the user in the config.jsonc file specifying what isobar
+ is used to index the QBO
+
+ titles : list of strings
+ A list of month names that will be used as the titles for each column of subplots
+
+ plot_months : list
+ A list of numbers corresponding to each month (e.g., 10 = october)
+
+ axes : A list of numbers used to set the pressure and latitude limits of the subplots.
+ [0,90,1000,1] are used for the N. hemisphere and [-90,0,1000,1] for S. hemisphere
+
+ Returns
+ -------
+ 3 row by 6 column plot of easterly QBO uzm anomalies (top row), westerly QBO uzm
+ anomalies (middle), and their difference (bottom row) with stippling highlighting differences
+ between EQBO and WQBO winds statistically significant at the 95% level using a two-sided t-test.
+
+ Notes
+ -----
+ The input field uzm is assumed to have dimensions named "lat" and "lev".
+ E.g., if your data has dimensions "latitude" and/or "level",
+ use the rename method:
+ ds.rename({'latitude':'lat','level':'lev'})
+ """
+
+ eqbo_out = []
+ wqbo_out = []
+ diff_out = []
+ sigs_out = []
+ clim_out = []
+
+ for mon in plot_months:
+
+ clim = uzm.sel(time=uzm.time.dt.month.isin([mon]))
+ clim_out.append(clim.mean('time').values)
+
+ anom = clim.groupby("time.month") - clim.groupby("time.month").mean("time")
+
+ if mon == 7 or mon == 8 or mon == 9 or mon == 10 or mon == 11 or mon == 12:
+ tmp_negative_indices = np.add(negative_indices,0)
+ tmp_positive_indices = np.add(positive_indices,0)
+ if mon == 1 or mon == 2 or mon == 3:
+ tmp_negative_indices = np.add(negative_indices,1)
+ tmp_positive_indices = np.add(positive_indices,1)
+
+ eqbo_tmp = anom.sel(time=anom.time.dt.year.isin([tmp_negative_indices]))
+ wqbo_tmp = anom.sel(time=anom.time.dt.year.isin([tmp_positive_indices]))
+
+ t,p = stats.ttest_ind(eqbo_tmp.values,wqbo_tmp.values,axis=0,nan_policy='omit')
+
+ sigs_out.append(np.subtract(1,p))
+ diff_out.append(np.subtract(eqbo_tmp.mean('time').values,wqbo_tmp.mean('time').values))
+ eqbo_out.append(eqbo_tmp.mean('time').values)
+ wqbo_out.append(wqbo_tmp.mean('time').values)
+
+ clim.close()
+ anom.close()
+ eqbo_tmp.close()
+ wqbo_tmp.close()
+ uzm.close()
+
+ eqbo_out = np.array(eqbo_out)
+ wqbo_out = np.array(wqbo_out)
+ diff_out = np.array(diff_out)
+ sigs_out = np.array(sigs_out)
+ clim_out = np.array(clim_out)
+
+ ############# Begin the plotting ############
+
+ fig, ax = plt.subplots()
+
+ mpl.rcParams['font.sans-serif'].insert(0, 'Arial')
+
+ vmin = -10
+ vmax = 10
+ vlevs = np.linspace(vmin,vmax,num=21)
+ vlevs = [v for v in vlevs if v != 0]
+ ticks = [vmin,vmin/2,0,vmax/2,vmax]
+
+ cmin = -200
+ cmax = 200
+ clevs = np.linspace(cmin,cmax,num=41)
+ clevs = [v for v in clevs if v != 0]
+
+ plt.suptitle('QBO (5S-5N index @ %s hPa) zonal-mean zonal wind (m/s)' % int(QBOisobar),fontsize=12,fontweight='normal')
+
+ # Add colormap #
+ from palettable.colorbrewer.diverging import RdBu_11
+ cmap1=RdBu_11.mpl_colormap.reversed()
+
+ x, y = np.meshgrid(uzm.lat.values,uzm.lev.values)
+
+ cols = [0,1,2,3,4]
+
+ for i in cols:
+
+ # eqbo #
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(0,cols[i]))
+ plt.title('%s' % titles[i],fontsize=10,y=0.93,fontweight='normal')
+
+ cs = plt.contourf(x,y,eqbo_out[i],cmap=cmap1,levels=vlevs,extend="both",vmin=vmin,vmax=vmax,zorder=1)
+
+ mpl.rcParams["lines.linewidth"] = 0.2
+ mpl.rcParams["lines.dashed_pattern"] = 10, 3
+ black = plt.contour(x,y,clim_out[i],colors='k',levels=clevs,extend="both",vmin=cmin,vmax=cmax,zorder=3)
+ plt.clabel(black,black.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+
+ plt.semilogy()
+ yticks = [1,5,10,100,1000]
+ plt.yticks(yticks,yticks,fontsize=6,fontweight='normal')
+ xticks = [-90,-60,-30,0,30,60,90]
+ plt.xticks(xticks,xticks,fontsize=6,fontweight='normal')
+ plt.gca().invert_yaxis()
+ plt.axis(axes)
+ if i == 0:
+ plt.ylabel('Pressure (hPa)',fontsize=8,fontweight='normal')
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('eqbo (%s seasons)' % int(len(negative_indices)),fontsize=10)
+
+ # wqbo #
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(1,cols[i]))
+
+ cs = plt.contourf(x,y,wqbo_out[i],cmap=cmap1,levels=vlevs,extend="both",vmin=vmin,vmax=vmax,zorder=1)
+
+ mpl.rcParams["lines.linewidth"] = 0.2
+ mpl.rcParams["lines.dashed_pattern"] = 10, 3
+ black = plt.contour(x,y,clim_out[i],colors='k',levels=clevs,extend="both",vmin=cmin,vmax=cmax,zorder=3)
+ plt.clabel(black,black.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+
+ plt.semilogy()
+ yticks = [1,5,10,100,1000]
+ plt.yticks(yticks,yticks,fontsize=6,fontweight='normal')
+ xticks = [-90,-60,-30,0,30,60,90]
+ plt.xticks(xticks,xticks,fontsize=6,fontweight='normal')
+ plt.gca().invert_yaxis()
+ plt.axis(axes)
+ if i == 0:
+ plt.ylabel('Pressure (hPa)',fontsize=8,fontweight='normal')
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('wqbo (%s seasons)' % int(len(positive_indices)),fontsize=10)
+
+ # Diff: eqbo minus wqbo #
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(2,cols[i]))
+
+ cs = plt.contourf(x,y,diff_out[i],cmap=cmap1,levels=vlevs,extend="both",vmin=vmin,vmax=vmax,zorder=1)
+
+ mpl.rcParams["lines.linewidth"] = 0.2
+ mpl.rcParams["lines.dashed_pattern"] = 10, 3
+ black = plt.contour(x,y,clim_out[i],colors='k',levels=clevs,extend="both",vmin=cmin,vmax=cmax,zorder=3)
+ plt.clabel(black,black.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+
+ plt.semilogy()
+ yticks = [1,5,10,100,1000]
+ plt.yticks(yticks,yticks,fontsize=6,fontweight='normal')
+ xticks = [-90,-60,-30,0,30,60,90]
+ plt.xticks(xticks,xticks,fontsize=6,fontweight='normal')
+ plt.gca().invert_yaxis()
+ plt.axis(axes)
+ if i == 0:
+ plt.ylabel('Pressure (hPa)',fontsize=8,fontweight='normal')
+ plt.xlabel('Latitude',fontsize=8,fontweight='normal')
+
+ sig_levs = [0.95,1]
+ mpl.rcParams['hatch.linewidth'] = 0.2
+ plt.contourf(x,y,sigs_out[i],colors='black',vmin=0.95,vmax=1,levels=sig_levs,hatches=['......','......'],alpha=0.0)
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('eqbo - wqbo',fontsize=10)
+
+ # Add colorbar #
+
+ cb_ax = fig.add_axes([0.365, 0.04, 0.30, 0.015])
+ cbar = fig.colorbar(cs, cax=cb_ax, ticks=ticks,orientation='horizontal')
+ cbar.ax.tick_params(labelsize=8, width=1)
+ cbar.ax.set_xticklabels(ticks,weight='normal')
+
+ plt.subplots_adjust(top=0.86,bottom=0.16,hspace=0.5,wspace=0.55,left=0.08,right=0.95)
+ return fig, ax
+
+##################################################################################################
+##################################################################################################
+##################################################################################################
+
+def qbo_vt(vt,negative_indices,positive_indices,QBOisobar,titles,plot_months,axes):
+
+ r""" Compute the zonal mean eddy heat flux response to the QBO. Anomalies are defined as
+ deviations from the seasonal cycle and composites of anomalies are made during easterly QBO,
+ westerly QBO, and then their difference is shown. Stippling is used to denote statistical
+ significance on the EQBO minus WQBO composites.
+
+ Parameters
+ ----------
+ vt : xarray.DataArray
+ The zonal mean eddy heat flux. This quantity is calculated using the
+ compute_total_eddy_heat_flux function given in the driver script stc_qbo_enso.py
+
+ negative_indices : list
+ A list of easterly QBO years.
+
+ positive_indices : list
+ A list of westerly QBO years.
+
+ QBOisobar : int
+ An integer defined by the user in the config.jsonc file specifying what isobar
+ is used to index the QBO
+
+ titles : list of strings
+ A list of month names that will be used as the titles for each column of subplots
+
+ plot_months : list
+ A list of numbers corresponding to each month (e.g., 10 = october)
+
+ axes : A list of numbers used to set the pressure and latitude limits of the subplots.
+ [0,90,1000,1] are used for the N. hemisphere and [-90,0,1000,1] for S. hemisphere
+
+ Returns
+ -------
+ 3 row by 6 column plot of easterly QBO vt anomalies (top row), westerly QBO vt
+ anomalies (middle), and their difference (bottom row) with stippling highlighting differences
+ between EQBO and WQBO winds statistically significant at the 95% level using a two-sided t-test.
+
+ Notes
+ -----
+ The input field vt is assumed to have dimensions named "lat" and "lev".
+ E.g., if your data has dimensions "latitude" and/or "level",
+ use the rename method:
+ ds.rename({'latitude':'lat','level':'lev'})
+ """
+
+ eqbo_out = []
+ wqbo_out = []
+ diff_out = []
+ sigs_out = []
+ clim_out = []
+
+ for mon in plot_months:
+
+ clim = vt.sel(time=vt.time.dt.month.isin([mon]))
+ clim_out.append(clim.mean('time').values)
+
+ anom = clim.groupby("time.month") - clim.groupby("time.month").mean("time")
+
+ # For NH, QBO index is based on Oct-Nov year. The plot will show Oct-Feb. Note that Jan-Feb are selected using QBO index year + 1
+ # For SH, QBO index is based on Jul-Aug year. The plot will show Jul-Nov.
+
+ if mon == 7 or mon == 8 or mon == 9 or mon == 10 or mon == 11 or mon == 12:
+ tmp_negative_indices = np.add(negative_indices,0)
+ tmp_positive_indices = np.add(positive_indices,0)
+ if mon == 1 or mon == 2 or mon == 3:
+ tmp_negative_indices = np.add(negative_indices,1)
+ tmp_positive_indices = np.add(positive_indices,1)
+
+ eqbo_tmp = anom.sel(time=anom.time.dt.year.isin([tmp_negative_indices]))
+ wqbo_tmp = anom.sel(time=anom.time.dt.year.isin([tmp_positive_indices]))
+
+ t,p = stats.ttest_ind(eqbo_tmp.values,wqbo_tmp.values,axis=0,nan_policy='omit')
+
+ sigs_out.append(np.subtract(1,p))
+ diff_out.append(np.subtract(eqbo_tmp.mean('time').values,wqbo_tmp.mean('time').values))
+ eqbo_out.append(eqbo_tmp.mean('time').values)
+ wqbo_out.append(wqbo_tmp.mean('time').values)
+
+ clim.close()
+ anom.close()
+ eqbo_tmp.close()
+ wqbo_tmp.close()
+ vt.close()
+
+ eqbo_out = np.array(eqbo_out)
+ wqbo_out = np.array(wqbo_out)
+ diff_out = np.array(diff_out)
+ sigs_out = np.array(sigs_out)
+ clim_out = np.array(clim_out)
+
+ ############# Begin the plotting ############
+
+ fig, ax = plt.subplots()
+
+ mpl.rcParams['font.sans-serif'].insert(0, 'Arial')
+
+ blevs = []
+
+ blevs.append(-2)
+ blevs.append(-6)
+ blevs.append(-10)
+ blevs.append(-25)
+ blevs.append(-50)
+ blevs.append(-100)
+
+ blevs.append(2)
+ blevs.append(6)
+ blevs.append(10)
+ blevs.append(25)
+ blevs.append(50)
+ blevs.append(100)
+
+ blevs = np.sort(blevs)
+ print (blevs)
+
+ cmin = -200
+ cmax = 200
+ clevs = np.linspace(cmin,cmax,num=41)
+ clevs = [v for v in clevs if v != 0]
+
+ plt.suptitle('QBO (5S-5N index @ %s hPa) zonal-mean eddy heat flux (Km/s)' % int(QBOisobar),fontsize=12,fontweight='normal')
+
+ # Add colormap #
+ from palettable.colorbrewer.diverging import RdBu_11
+ cmap1=RdBu_11.mpl_colormap.reversed()
+
+ x, y = np.meshgrid(vt.lat.values,vt.lev.values)
+
+ cols = [0,1,2,3,4]
+
+ for i in cols:
+
+ print (i)
+
+ # eqbo #
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(0,cols[i]))
+ plt.title('%s' % titles[i],fontsize=10,y=0.93,fontweight='normal')
+
+ cs = plt.contourf(x,y,eqbo_out[i],blevs,norm = mpl.colors.SymLogNorm(linthresh=2,linscale=1,vmin=-100,vmax=100),cmap=cmap1,extend="both",zorder=1)
+
+ mpl.rcParams["lines.linewidth"] = 0.2
+ mpl.rcParams["lines.dashed_pattern"] = 10, 3
+ black = plt.contour(x,y,clim_out[i],colors='k',levels=clevs,extend="both",vmin=cmin,vmax=cmax,zorder=3)
+ plt.clabel(black,black.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+
+ plt.semilogy()
+ yticks = [1,5,10,100,1000]
+ plt.yticks(yticks,yticks,fontsize=6,fontweight='normal')
+ xticks = [-90,-60,-30,0,30,60,90]
+ plt.xticks(xticks,xticks,fontsize=6,fontweight='normal')
+ plt.gca().invert_yaxis()
+ plt.axis(axes)
+ if i == 0:
+ plt.ylabel('Pressure (hPa)',fontsize=8,fontweight='normal')
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('eqbo (%s seasons)' % int(len(negative_indices)),fontsize=10)
+
+ # wqbo #
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(1,cols[i]))
+
+ cs = plt.contourf(x,y,wqbo_out[i],blevs,norm = mpl.colors.SymLogNorm(linthresh=2,linscale=1,vmin=-100,vmax=100),cmap=cmap1,extend="both",zorder=1)
+
+ mpl.rcParams["lines.linewidth"] = 0.2
+ mpl.rcParams["lines.dashed_pattern"] = 10, 3
+ black = plt.contour(x,y,clim_out[i],colors='k',levels=clevs,extend="both",vmin=cmin,vmax=cmax,zorder=3)
+ plt.clabel(black,black.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+
+ plt.semilogy()
+ yticks = [1,5,10,100,1000]
+ plt.yticks(yticks,yticks,fontsize=6,fontweight='normal')
+ xticks = [-90,-60,-30,0,30,60,90]
+ plt.xticks(xticks,xticks,fontsize=6,fontweight='normal')
+ plt.gca().invert_yaxis()
+ plt.axis(axes)
+ if i == 0:
+ plt.ylabel('Pressure (hPa)',fontsize=8,fontweight='normal')
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('wqbo (%s seasons)' % int(len(positive_indices)),fontsize=10)
+
+ # Diff: eqbo minus wqbo #
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(2,cols[i]))
+
+ cs = plt.contourf(x,y,diff_out[i],blevs,norm = mpl.colors.SymLogNorm(linthresh=2,linscale=1,vmin=-100,vmax=100),cmap=cmap1,extend="both",zorder=1)
+
+ mpl.rcParams["lines.linewidth"] = 0.2
+ mpl.rcParams["lines.dashed_pattern"] = 10, 3
+ black = plt.contour(x,y,clim_out[i],colors='k',levels=clevs,extend="both",vmin=cmin,vmax=cmax,zorder=3)
+ plt.clabel(black,black.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+
+ plt.semilogy()
+ yticks = [1,5,10,100,1000]
+ plt.yticks(yticks,yticks,fontsize=6,fontweight='normal')
+ xticks = [-90,-60,-30,0,30,60,90]
+ plt.xticks(xticks,xticks,fontsize=6,fontweight='normal')
+ plt.gca().invert_yaxis()
+ plt.axis(axes)
+ if i == 0:
+ plt.ylabel('Pressure (hPa)',fontsize=8,fontweight='normal')
+ plt.xlabel('Latitude',fontsize=8,fontweight='normal')
+
+ sig_levs = [0.95,1]
+ mpl.rcParams['hatch.linewidth'] = 0.2
+ plt.contourf(x,y,sigs_out[i],colors='black',vmin=0.95,vmax=1,levels=sig_levs,hatches=['......','......'],alpha=0.0)
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('eqbo - wqbo',fontsize=10)
+
+ # Add colorbar #
+
+ oticks = [-100,-50,-25,-10,-6,-2,2,6,10,25,50,100]
+ cb_ax = fig.add_axes([0.365, 0.04, 0.30, 0.015])
+ cbar = fig.colorbar(cs, cax=cb_ax, ticks=oticks,orientation='horizontal')
+ cbar.ax.tick_params(labelsize=8, width=1)
+ cbar.ax.set_xticklabels(oticks,weight='normal')
+
+ plt.subplots_adjust(top=0.86,bottom=0.16,hspace=0.5,wspace=0.55,left=0.08,right=0.95)
+
+ return fig, ax
+##################################################################################################
+##################################################################################################
+##################################################################################################
+
+def qbo_slp(ps,negative_indices,positive_indices,QBOisobar,titles,plot_months,projection,axes):
+
+ r""" Compute the sea level pressure response to QBO. Anomalies are defined as
+ deviations from the seasonal cycle and composites of anomalies are made during easterly QBO years,
+ westerly QBO years, and then their difference is shown. Stippling is used to denote statistical
+ significance on the EQBO minus WQBO composites.
+
+ Parameters
+ ----------
+ ps : xarray.DataArray
+ The sea level pressure.
+
+ negative_indices : list
+ A list of easterly QBO years.
+
+ positive_indices : list
+ A list of westerly QBO years.
+
+ QBOisobar : int
+ An integer defined by the user in the config.jsonc file specifying what isobar
+ is used to index the QBO
+
+ titles : list of strings
+ A list of month names that will be used as the titles for each column of subplots
+
+ plot_months : list
+ A list of numbers corresponding to each month (e.g., 10 = october)
+
+ projection : ccrs.NorthPolarStereo() or ccrs.SouthPolarStereo()
+
+ axes : A list of numbers used to set the longitude and latitude bounds of the subplots.
+ [-180, 180, 20, 90] are used for the N. hemisphere and [-180, 180, -90, -20] for S. hemisphere
+
+ Returns
+ -------
+ 3 row by 6 column plot of easterly QBO ps anomalies (top row), westerly QBO ps anomalies (middle),
+ and their difference (bottom row) with stippling highlighting differences between EQBO
+ and WQBO winds statistically significant at the 95% level using a two-sided t-test.
+
+ Notes
+ -----
+ The input field ps is assumed to have dimensions named "lat" and "lon".
+ E.g., if your data has dimensions "latitude" and/or "level",
+ use the rename method:
+ ds.rename({'latitude':'lat','longitude':'lon'})
+
+ The input field ps is expected to have units of hPa. Directly below, the code
+ will check to see if the units are Pa instead, and if they are, convert them to hPa.
+ """
+
+ if getattr(ps,'units') == 'Pa':
+ print(f'**Converting pressure levels to hPa')
+ ps.attrs['units'] = 'hPa'
+ ps.values[:] = ps.values/100.
+
+ print (np.nanmin(ps.values))
+ print (np.nanmedian(ps.values))
+ print (np.nanmean(ps.values))
+ print (np.nanmax(ps.values))
+
+ eqbo_out = []
+ wqbo_out = []
+ diff_out = []
+ sigs_out = []
+ clim_out = []
+
+ for mon in plot_months:
+
+ clim = ps.sel(time=ps.time.dt.month.isin([mon]))
+ clim_out.append(clim.mean('time').values)
+
+ anom = clim.groupby("time.month") - clim.groupby("time.month").mean("time")
+
+ if mon == 7 or mon == 8 or mon == 9 or mon == 10 or mon == 11 or mon == 12:
+ tmp_negative_indices = np.add(negative_indices,0)
+ tmp_positive_indices = np.add(positive_indices,0)
+ if mon == 1 or mon == 2 or mon == 3:
+ tmp_negative_indices = np.add(negative_indices,1)
+ tmp_positive_indices = np.add(positive_indices,1)
+
+ eqbo_tmp = anom.sel(time=anom.time.dt.year.isin([tmp_negative_indices]))
+ wqbo_tmp = anom.sel(time=anom.time.dt.year.isin([tmp_positive_indices]))
+
+ t,p = stats.ttest_ind(eqbo_tmp.values,wqbo_tmp.values,axis=0,nan_policy='omit')
+
+ sigs_out.append(np.subtract(1,p))
+ diff_out.append(np.subtract(eqbo_tmp.mean('time').values,wqbo_tmp.mean('time').values))
+ eqbo_out.append(eqbo_tmp.mean('time').values)
+ wqbo_out.append(wqbo_tmp.mean('time').values)
+
+ clim.close()
+ anom.close()
+ eqbo_tmp.close()
+ wqbo_tmp.close()
+ ps.close()
+
+ eqbo_out = np.array(eqbo_out)
+ wqbo_out = np.array(wqbo_out)
+ diff_out = np.array(diff_out)
+ sigs_out = np.array(sigs_out)
+ clim_out = np.array(clim_out)
+
+ ############# Begin the plotting ############
+
+ fig, ax = plt.subplots()
+
+ mpl.rcParams['font.sans-serif'].insert(0, 'Arial')
+
+ vmin = -10
+ vmax = 10
+ vlevs = np.linspace(vmin,vmax,num=21)
+ vlevs = [v for v in vlevs if v != 0]
+ ticks = [vmin,vmin/2,0,vmax/2,vmax]
+
+ cmin = 900
+ cmax = 1100
+ clevs = np.linspace(cmin,cmax,num=21)
+
+ plt.suptitle('QBO (5S-5N index @ %s hPa) sea level pressure (hPa)' % QBOisobar,fontsize=12,fontweight='normal')
+
+ # Add colormap #
+
+ from palettable.colorbrewer.diverging import RdBu_11
+ cmap1=RdBu_11.mpl_colormap.reversed()
+
+ lons = ps.lon.values
+ lats = ps.lat.values
+
+ cols = [0,1,2,3,4]
+
+ for i in cols:
+
+ print (i)
+
+ ########
+ # eqbo #
+ ########
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(0,cols[i]), projection=projection)
+ ax1.set_extent(axes, ccrs.PlateCarree())
+ plt.title('%s' % titles[i],fontsize=10,y=0.93,fontweight='normal')
+
+ # Plot style features #
+
+ ax1.coastlines(linewidth=0.25)
+ theta = np.linspace(0, 2*np.pi, 100)
+ center, radius = [0.5, 0.5], 0.5
+ verts = np.vstack([np.sin(theta), np.cos(theta)]).T
+ circle = mpath.Path(verts * radius + center)
+ ax1.set_boundary(circle, transform=ax1.transAxes)
+ pos1 = ax1.get_position()
+ plt.title("%s" % titles[i], fontsize=10,fontweight='normal',y=0.98)
+ cyclic_z, cyclic_lon = add_cyclic_point(eqbo_out[i], coord=lons)
+
+ # Plot anomalies #
+
+ contourf = ax1.contourf(cyclic_lon, lats, cyclic_z,transform=ccrs.PlateCarree(),cmap=cmap1,vmin=vmin,vmax=vmax,levels=vlevs,extend='both',zorder=1)
+
+ # Overlay the climatology #
+
+ cyclic_clim, cyclic_lon = add_cyclic_point(clim_out[i], coord=lons)
+ cs = ax1.contour(cyclic_lon, lats, cyclic_clim,transform=ccrs.PlateCarree(),colors='k',linewidths=0.5,vmin=cmin,vmax=cmax,levels=clevs,extend='both',zorder=3)
+
+ plt.rc('font',weight='normal')
+ plt.clabel(cs,cs.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+ plt.rc('font',weight='normal')
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('eqbo (%s seasons)' % int(len(negative_indices)),fontsize=10)
+ ax2.spines['top'].set_visible(False)
+ ax2.spines['right'].set_visible(False)
+ ax2.spines['bottom'].set_visible(False)
+ ax2.spines['left'].set_visible(False)
+ ax2.get_yaxis().set_ticks([])
+
+ ########
+ # wqbo #
+ ########
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(1,cols[i]), projection=projection)
+ ax1.set_extent(axes, ccrs.PlateCarree())
+
+ # Plot style features #
+
+ ax1.coastlines(linewidth=0.25)
+ theta = np.linspace(0, 2*np.pi, 100)
+ center, radius = [0.5, 0.5], 0.5
+ verts = np.vstack([np.sin(theta), np.cos(theta)]).T
+ circle = mpath.Path(verts * radius + center)
+ ax1.set_boundary(circle, transform=ax1.transAxes)
+ pos1 = ax1.get_position()
+ plt.title("%s" % titles[i], fontsize=10,fontweight='normal',y=0.98)
+ cyclic_z, cyclic_lon = add_cyclic_point(wqbo_out[i], coord=lons)
+
+ # Plot anomalies #
+
+ contourf = ax1.contourf(cyclic_lon, lats, cyclic_z,transform=ccrs.PlateCarree(),cmap=cmap1,vmin=vmin,vmax=vmax,levels=vlevs,extend='both',zorder=1)
+
+ # Overlay the climatology #
+
+ cyclic_clim, cyclic_lon = add_cyclic_point(clim_out[i], coord=lons)
+ cs = ax1.contour(cyclic_lon, lats, cyclic_clim,transform=ccrs.PlateCarree(),colors='k',linewidths=0.5,vmin=cmin,vmax=cmax,levels=clevs,extend='both',zorder=3)
+
+ plt.rc('font',weight='normal')
+ plt.clabel(cs,cs.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+ plt.rc('font',weight='normal')
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('wqbo (%s seasons)' % int(len(positive_indices)),fontsize=10)
+ ax2.spines['top'].set_visible(False)
+ ax2.spines['right'].set_visible(False)
+ ax2.spines['bottom'].set_visible(False)
+ ax2.spines['left'].set_visible(False)
+ ax2.get_yaxis().set_ticks([])
+
+ ##############
+ # Difference #
+ ##############
+
+ ax1 = plt.subplot2grid(shape=(3,5), loc=(2,cols[i]), projection=projection)
+ ax1.set_extent(axes, ccrs.PlateCarree())
+
+ # Plot style features #
+
+ ax1.coastlines(linewidth=0.25)
+ theta = np.linspace(0, 2*np.pi, 100)
+ center, radius = [0.5, 0.5], 0.5
+ verts = np.vstack([np.sin(theta), np.cos(theta)]).T
+ circle = mpath.Path(verts * radius + center)
+ ax1.set_boundary(circle, transform=ax1.transAxes)
+ pos1 = ax1.get_position()
+ plt.title("%s" % titles[i], fontsize=10,fontweight='normal',y=0.98)
+ cyclic_z, cyclic_lon = add_cyclic_point(diff_out[i], coord=lons)
+
+ # Plot anomalies #
+
+ contourf = ax1.contourf(cyclic_lon, lats, cyclic_z,transform=ccrs.PlateCarree(),cmap=cmap1,vmin=vmin,vmax=vmax,levels=vlevs,extend='both',zorder=1)
+
+ # Statistical significance #
+
+ sig_levs = [0.95,1]
+ mpl.rcParams['hatch.linewidth'] = 0.2
+ cyclic_sig, cyclic_lontmp = add_cyclic_point(sigs_out[i], coord=lons)
+ ax1.contourf(cyclic_lon, lats, cyclic_sig,transform=ccrs.PlateCarree(),colors='black',vmin=0.95,vmax=1,levels=sig_levs,hatches=['......','......'],alpha=0.0,zorder=2)
+
+ # Overlay the climatology #
+
+ cyclic_clim, cyclic_lon = add_cyclic_point(clim_out[i], coord=lons)
+ cs = ax1.contour(cyclic_lon, lats, cyclic_clim,transform=ccrs.PlateCarree(),colors='k',linewidths=0.5,vmin=cmin,vmax=cmax,levels=clevs,extend='both',zorder=3)
+
+ plt.rc('font',weight='normal')
+ plt.clabel(cs,cs.levels[:],inline=1,fmt='%1.0f',fontsize=4,colors='k',inline_spacing=1)
+ plt.rc('font',weight='normal')
+
+ if i == 4:
+ ax2 = ax1.twinx()
+ yticks = [0,0.5,1.0]
+ ylabels = ['','','']
+ ax2.set_yticks(yticks)
+ ax2.set_yticklabels(ylabels, fontsize=8, fontweight = 'normal')
+ ax2.set_ylabel('eqbo - wqbo',fontsize=10)
+ ax2.spines['top'].set_visible(False)
+ ax2.spines['right'].set_visible(False)
+ ax2.spines['bottom'].set_visible(False)
+ ax2.spines['left'].set_visible(False)
+ ax2.get_yaxis().set_ticks([])
+
+ # Add colorbar #
+
+ cb_ax = fig.add_axes([0.35, 0.05, 0.30, 0.015])
+ cbar = fig.colorbar(contourf, cax=cb_ax, ticks=ticks,orientation='horizontal')
+ cbar.ax.tick_params(labelsize=8, width=1)
+ cbar.ax.set_xticklabels(ticks,weight='normal')
+
+
+ plt.subplots_adjust(top=0.86,bottom=0.09,hspace=0.3,wspace=0.0,left=0.02,right=0.94)
+
+ return fig, ax
\ No newline at end of file
From 5948f3db54e6dec568b7c49037f9bf9999d7e360 Mon Sep 17 00:00:00 2001
From: Jess <20195932+wrongkindofdoctor@users.noreply.github.com>
Date: Wed, 31 Jan 2024 06:29:38 -0500
Subject: [PATCH 3/7] Delete extraneous config file from last merge
---
config_file.jsonc | 125 ----------------------------------------------
1 file changed, 125 deletions(-)
delete mode 100644 config_file.jsonc
diff --git a/config_file.jsonc b/config_file.jsonc
deleted file mode 100644
index 4f8f0aa7d..000000000
--- a/config_file.jsonc
+++ /dev/null
@@ -1,125 +0,0 @@
-// Configuration for MDTF-diagnostics driver script self-test.
-//
-// Copy this file and customize the settings as needed to run the framework on
-// your own model output without repeating command-line options. Pass it to the
-// framework at the end of the command line (positionally) or with the
-// -f/--input-file flag. Any other explicit command line options will override
-// what's listed here.
-//
-// All text to the right of an unquoted "//" is a comment and ignored, as well
-// as blank lines (JSONC quasi-standard.)
-{
- "case_list" : [
- // The cases below correspond to the different sample model data sets. Note
- // that the MDTF package does not currently support analyzing multiple
- // models in a single invocation. Comment out or delete the first entry and
- // uncomment the second to run NOAA-GFDL-AM4 only for the MJO_prop_amp POD,
- // and likewise for the SM_ET_coupling POD.
- {
- "CASENAME" : "CESM2-WACCM_CMIP_historical_r1i1p1f1",
- "model" : "CESM2-WACCM_CMIP_historical_r1i1p1f1",
- "convention" : "CMIP",
- "FIRSTYR" : 1979,
- "LASTYR" : 2014,
- "QBOisobar" : 30,
- "pod_list": ["stc_qbo_enso"]
- }
- // {
- // "CASENAME" : "GFDL.CM4.c96L32.am4g10r8",
- // "model" : "AM4",
- // "convention" : "GFDL",
- // "FIRSTYR" : 1,
- // "LASTYR" : 10,
- // "pod_list" : ["MJO_prop_amp"]
- // }
- // {
- // "CASENAME" : "Lmon_GISS-E2-H_historical_r1i1p1",
- // "model" : "CMIP",
- // "convention" : "CMIP",
- // "FIRSTYR" : 1951,
- // "LASTYR" : 2005,
- // "pod_list" : ["SM_ET_coupling"]
- // }
- // {
- // "CASENAME" : "NCAR-CAM5.timeslice",
- // "model" : "CESM",
- // "convention" : "CMIP",
- // "FIRSTYR" : 2000,
- // "LASTYR" : 2004,
- // "pod_list": ["example"]
- // }
- ],
- // PATHS ---------------------------------------------------------------------
- // Location of supporting data downloaded when the framework was installed.
-
- // If a relative path is given, it's resolved relative to the MDTF-diagnostics
- // code directory. Environment variables (eg, $HOME) can be referenced with a
- // "$" and will be expended to their current values when the framework runs.
-
- // Parent directory containing observational data used by individual PODs.
- "OBS_DATA_ROOT": "../inputdata/obs_data",
-
- // Parent directory containing results from different models.
- //"MODEL_DATA_ROOT": "/Volumes/Personal-Folders/CCP-Amy/CMIP6_fromZac/data-for-amy/",
- //"MODEL_DATA_ROOT": "/Users/delsbury/Desktop/mdtf/scratch/",
- "MODEL_DATA_ROOT": "/Users/delsbury/Desktop/mdtf/inputdata/model/",
-
- // Working directory. Defaults to OUTPUT_DIR if blank.
- "WORKING_DIR": "../wkdir",
-
- // Directory to write output. The results of each run of the framework will be
- // put in a subdirectory of this directory.
- "OUTPUT_DIR": "../wkdir",
-
- // Location of the Anaconda/miniconda installation to use for managing
- // dependencies (path returned by running `conda info --base`.) If empty,
- // framework will attempt to determine location of system's conda installation.
- "conda_root": "/Users/delsbury/opt/anaconda3",
-
- // Directory containing the framework-specific conda environments. This should
- // be equal to the "--env_dir" flag passed to conda_env_setup.sh. If left
- // blank, the framework will look for its environments in the system default
- // location.
- "conda_env_root": "/Users/delsbury/opt/anaconda3/envs",
-
- // SETTINGS ------------------------------------------------------------------
- // Any command-line option recognized by the mdtf script (type `mdtf --help`)
- // can be set here, in the form "flag name": "desired setting".
-
- // Method used to fetch model data.
- "data_manager": "Local_File",
-
- // Type of data that POD(s) will analyze
- // "single_run" (default) or "multi_run"
- "data_type": "single_run",
-
- // Method used to manage dependencies.
- "environment_manager": "Conda",
-
- // Settings affecting what output is generated:
-
- // Set to true to have PODs save postscript figures in addition to bitmaps.
- "save_ps": false,
-
- // Set to true to have PODs save netCDF files of processed data.
- "save_nc": true,
-
- // Set to true to save HTML and bitmap plots in a .tar file.
- "make_variab_tar": false,
-
- // Set to true to overwrite results in OUTPUT_DIR; otherwise results saved
- // under a unique name.
- "overwrite": true,
-
- // Settings used in debugging:
-
- // Log verbosity level.
- "verbose": 1,
-
- // Set to true for framework test. Data is fetched but PODs are not run.
- "test_mode": false,
-
- // Set to true for framework test. No external commands are run and no remote
- // data is copied. Implies test_mode.
- "dry_run": false
-}
From 4c4e91f619ed66e94305133b75fb39c6853cc37a Mon Sep 17 00:00:00 2001
From: Jess <20195932+wrongkindofdoctor@users.noreply.github.com>
Date: Wed, 31 Jan 2024 06:36:49 -0500
Subject: [PATCH 4/7] Add stc_qbo_enso POD to readme.md table
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 0af8b335d..8db5a1948 100644
--- a/README.md
+++ b/README.md
@@ -32,6 +32,7 @@ and a link to the full documentation for each currently-supported POD.
| [Soil Moisture-Evapotranspiration coupling](https://github.com/NOAA-GFDL/MDTF-diagnostics/blob/main/diagnostics/SM_ET_coupling/doc/SM_ET_coupling.rst) | Eric Wood (Princeton) |
| [Stratosphere-Troposphere Coupling: Annular Modes](https://github.com/NOAA-GFDL/MDTF-diagnostics/blob/main/diagnostics/stc_annular_modes/doc/stc_annular_modes.rst) | Amy H. Butler (NOAA CSL), Zachary D. Lawrence (CIRES/NOAA PSL) |
| [Stratosphere-Troposphere Coupling: Eddy Heat Fluxes](https://github.com/NOAA-GFDL/MDTF-diagnostics/blob/main/diagnostics/stc_eddy_heat_fluxes/doc/stc_eddy_heat_fluxes.rst) | Amy H. Butler (NOAA CSL), Zachary D. Lawrence (CIRES/NOAA PSL) |
+| [Stratosphere-Troposphere Coupling: QBO and ENSO stratospheric teleconnections](https://github.com/NOAA-GFDL/MDTF-diagnostics/blob/main/diagnostics/stc_qbo_enso/doc/stc_qbo_enso.rst) | Amy H. Butler (NOAA CSL), Zachary D. Lawrence (CIRES/NOAA PSL), Dillon Elsbury (NOAA) |
| [Stratosphere-Troposphere Coupling: Stratospheric Ozone and Circulation](https://github.com/NOAA-GFDL/MDTF-diagnostics/blob/main/diagnostics/stc_eddy_heat_fluxes/doc/stc_ozone.rst) | Amy H. Butler (NOAA CSL), Zachary D. Lawrence (CIRES/NOAA PSL) |
| [Stratosphere-Troposphere Coupling: Stratospheric Polar Vortex Extremes](https://github.com/NOAA-GFDL/MDTF-diagnostics/blob/main/diagnostics/stc_spv_extremes/doc/stc_spv_extremes.rst) | Amy H. Butler (NOAA CSL), Zachary D. Lawrence (CIRES/NOAA PSL) |
| [Stratosphere-Troposphere Coupling: Vertical Wave Coupling](https://github.com/NOAA-GFDL/MDTF-diagnostics/blob/main/diagnostics/stc_vert_wave_coupling/doc/stc_vert_wave_coupling.rst) | Amy H. Butler (NOAA CSL), Zachary D. Lawrence (CIRES/NOAA PSL) |
From 8778a3a3353ae4f7776a80aeeea693b74718da97 Mon Sep 17 00:00:00 2001
From: Jess <20195932+wrongkindofdoctor@users.noreply.github.com>
Date: Fri, 2 Feb 2024 13:16:08 -0500
Subject: [PATCH 5/7] Update fieldlist tables (#506)
* Update fieldlist_CMIP.jsonc
add rldcs and rldscs to CMIP fieldlist
* Update fieldlist_GFDL.jsonc
add clear sky radiation and lhflx vars to GFDL fieldlist
* Update fieldlist_NCAR.jsonc
add radiation variables to NCAR fieldlist
* add hfls to GFDL fieldlist and fix typos
* fix typo in GFDL fieldlist realm entry
---
data/fieldlist_CMIP.jsonc | 16 +++++++
data/fieldlist_GFDL.jsonc | 90 +++++++++++++++++++++++++++++++++++++++
data/fieldlist_NCAR.jsonc | 60 ++++++++++++++++++++++++++
3 files changed, 166 insertions(+)
diff --git a/data/fieldlist_CMIP.jsonc b/data/fieldlist_CMIP.jsonc
index 25144e7a8..2214efb2b 100644
--- a/data/fieldlist_CMIP.jsonc
+++ b/data/fieldlist_CMIP.jsonc
@@ -200,6 +200,22 @@
"units": "W m-2",
"ndim": 3
},
+ "rldcs": {
+ "realm": "atmos",
+ "standard_name": "downwelling_longwave_flux_in_air_assuming_clear_sky",
+ "units": "W m-2",
+ "long_name": "Downwelling Clear-Sky Longwave Radiation",
+ "positive": "down",
+ "scalar_coord_templates" : {"plev": "rldcs{value}"},
+ "ndim": 4
+ },
+ "rldscs": {
+ "realm": "atmos",
+ "standard_name": "surface_downwelling_longwave_flux_in_air_assuming_clear_sky",
+ "units": "W m-2",
+ "long_name": "Surface Downwelling Clear-Sky Longwave Radiation",
+ "ndim": 3
+ },
"rlut": {
"standard_name": "toa_outgoing_longwave_flux",
"units": "W m-2",
diff --git a/data/fieldlist_GFDL.jsonc b/data/fieldlist_GFDL.jsonc
index 90cfea356..e13a994d2 100644
--- a/data/fieldlist_GFDL.jsonc
+++ b/data/fieldlist_GFDL.jsonc
@@ -153,6 +153,96 @@
"units": "W m-2",
"ndim": 3
},
+ "shflx": {
+ "standard_name": "sensible_heat_flux",
+ "realm" : "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "hfls": {
+ "standard_name": "surface_upward_latent_heat_flux",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "lwflx": {
+ "standard_name": "net_longwave_flux",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "olr_clr": {
+ "standard_name": "clearsky_outgoing_longwave_radiation",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "swdn_sfc_ad_clr": {
+ "standard_name": "clear_sky_outgoing_longwave_radiation",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "swdn_sfc_clr": {
+ "standard_name": "clear_sky_SW_flux_down_at_surface_without_aerosol",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "swdn_toa_clr": {
+ "standard_name": "clear_sky_SW_flux_down_at_TOA",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "swup_sfc_ad_clr": {
+ "standard_name": "clearsky_SW_flux_up_at_surface_without_aerosol",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "swup_sfc_clr": {
+ "standard_name": "clear_sky_SW_flux_up_at_surface",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "swup_toa_ad_clr": {
+ "standard_name": "clear_sky_SW_flux_up_at_TOA_without_aerosol",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "swup_toa_clr": {
+ "standard_name": "clear_sky_SW_flux_up_at_TOA",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "lwdn_sfc_clr": {
+ "standard_name": "clea_rsky_LW_flux_down_at_surface",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "lwsfc_ad_clr": {
+ "standard_name": "clear_sky_Net_LW_flux_at_surface_without_aerosol",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "lwtoa_ad_clr": {
+ "standard_name": "clear_sky_Net_LW_flux_at_TOA_without_aerosol",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "lwup_sfc_clr": {
+ "standard_name": "clear_sky_LW_flux_up_at_surface",
+ "realm": "atmos",
+ "units": "W m-2",
+ "ndim": 3
+ },
// Variables for AMOC_3D_Structure module:
// "uo": {
// NB: need to perform rotation to get from u,v?
diff --git a/data/fieldlist_NCAR.jsonc b/data/fieldlist_NCAR.jsonc
index 709fdbc06..a51014815 100644
--- a/data/fieldlist_NCAR.jsonc
+++ b/data/fieldlist_NCAR.jsonc
@@ -167,6 +167,66 @@
"units": "W m-2",
"ndim": 3
},
+ "FSNS": {
+ "standard_name": "net_solar_flux_at_surface",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "FLNS": {
+ "standard_name": "net_longwave_flux_at_surface",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "FLNSC": {
+ "standard_name": "clearsky_net_longwave_flux_at_surface",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "FSDS": {
+ "standard_name": "downwelling_solar_flux_at_surface",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "FLDSC": {
+ "standard_name": "clearsky_downwelling_longwave_flux_at_surface",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "FLUTC": {
+ "standard_name": "clearsky_upwelling_longwave_flux_at_top_of_model",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "FLNTC": {
+ "standard_name": "clearsky_toa_outgoing_longwave_flux",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "FSNSC": {
+ "standard_name": "clearsky_net_solar_flux_at_surface",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "FSDSC": {
+ "standard_name": "clearsky_downwelling_solar_flux_at_surface",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "FSNTOA": {
+ "standard_name": "net_solar_flux_at_top_of_atmosphere",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "FSUTOA": {
+ "standard_name": "upwelling_solar_flux_at_top_of_atmosphere",
+ "units": "W m-2",
+ "ndim": 3
+ },
+ "FSNTOAC": {
+ "standard_name": "clearsky_net_solar_flux_at_top_of_atmosphere",
+ "units": "W m-2",
+ "ndim": 3
+ },
"SHFLX": {
"standard_name": "surface_upward_sensible_heat_flux",
"units": "W m-2",
From d74af2e9a9e1d87388a172b39b9918f7c25fcb8a Mon Sep 17 00:00:00 2001
From: Jess <20195932+wrongkindofdoctor@users.noreply.github.com>
Date: Mon, 5 Feb 2024 13:58:53 -0500
Subject: [PATCH 6/7] Forcing feedback fork (#508)
* create forcing_feedback.rst file
* add forcing_feedback html template
* add forcing_feedback.py driver script
* add forcing_feedback_kernelcalcs.py
* add forcing_feedback_plot.py
* add forcing_feedback_util.py
* add obs_processing_script.py
* add forcing_feedback POD settings file
* Update README.md
---
README.md | 1 +
.../forcing_feedback/doc/forcing_feedback.rst | 114 ++++
.../forcing_feedback/forcing_feedback.html | 67 ++
.../forcing_feedback/forcing_feedback.py | 77 +++
.../forcing_feedback_kernelcalcs.py | 347 ++++++++++
.../forcing_feedback/forcing_feedback_plot.py | 251 ++++++++
.../forcing_feedback/forcing_feedback_util.py | 600 ++++++++++++++++++
.../forcing_feedback/obs_processing_script.py | 214 +++++++
diagnostics/forcing_feedback/settings.jsonc | 111 ++++
9 files changed, 1782 insertions(+)
create mode 100644 diagnostics/forcing_feedback/doc/forcing_feedback.rst
create mode 100644 diagnostics/forcing_feedback/forcing_feedback.html
create mode 100644 diagnostics/forcing_feedback/forcing_feedback.py
create mode 100644 diagnostics/forcing_feedback/forcing_feedback_kernelcalcs.py
create mode 100644 diagnostics/forcing_feedback/forcing_feedback_plot.py
create mode 100644 diagnostics/forcing_feedback/forcing_feedback_util.py
create mode 100644 diagnostics/forcing_feedback/obs_processing_script.py
create mode 100644 diagnostics/forcing_feedback/settings.jsonc
diff --git a/README.md b/README.md
index 8db5a1948..2f208a245 100644
--- a/README.md
+++ b/README.md
@@ -20,6 +20,7 @@ and a link to the full documentation for each currently-supported POD.
| [Diurnal Cycle of Precipitation](https://www.cgd.ucar.edu/cms/bundy/Projects/diagnostics/mdtf/mdtf_figures/MDTF_QBOi.EXP1.AMIP.001.save/precip_diurnal_cycle/precip_diurnal_cycle.html) | Rich Neale (NCAR) |
| [Eulerian Storm Track](https://github.com/NOAA-GFDL/MDTF-diagnostics/blob/main/diagnostics/eulerian_storm_track/doc/eulerian_storm_track.rst) | James Booth (CUNY), Jeyavinoth Jeyaratnam |
| [Extratropical Variance (EOF 500hPa Height)](https://www.cgd.ucar.edu/cms/bundy/Projects/diagnostics/mdtf/mdtf_figures/MDTF_QBOi.EXP1.AMIP.001.save/EOF_500hPa/EOF_500hPa.html) | CESM/AMWG (NCAR) |
+| [Forcing Feedback Diagnostic](https://github.com/NOAA-GFDL/MDTF-diagnostics/blob/main/diagnostics/forcing_feedback/doc/forcing_feedback.rst) | Brian Soden (U. Miami), Ryan Kramer|
| [Mixed Layer Depth](https://github.com/NOAA-GFDL/MDTF-diagnostics/blob/main/diagnostics/mixed_layer_depth/doc/mixed_layer_depth.rst) | Cecilia Bitz (U. Washington), Lettie Roach |
| [MJO Propagation and Amplitude ](https://www.cgd.ucar.edu/cms/bundy/Projects/diagnostics/mdtf/mdtf_figures/MDTF_GFDL.CM4.c96L32.am4g10r8/MJO_prop_amp/MJO_prop_amp.html)| Xianan Jiang (UCLA) |
| [MJO Spectra and Phasing](https://www.cgd.ucar.edu/cms/bundy/Projects/diagnostics/mdtf/mdtf_figures/MDTF_QBOi.EXP1.AMIP.001.save/MJO_suite/MJO_suite.html) | CESM/AMWG (NCAR) |
diff --git a/diagnostics/forcing_feedback/doc/forcing_feedback.rst b/diagnostics/forcing_feedback/doc/forcing_feedback.rst
new file mode 100644
index 000000000..b0eba25ce
--- /dev/null
+++ b/diagnostics/forcing_feedback/doc/forcing_feedback.rst
@@ -0,0 +1,114 @@
+Forcing Feedback Diagnostic Package
+============================================================
+Last update: 12/21/2023
+
+The Forcing Feedback Diagnostic package evaluates a model's radiative forcing and radiative feedbacks. This is a commong framework for understanding the radiative constraints on a model's climate sensitivity and is outlined in detail by :ref:`Sherwood et al. (2015) <1>`, among many others. To compute radiative feedbacks, anomalies of temperature, specific humidity and surface albedo are translated into radiative anomalies by multiplying them by radiative kernels developed from the CloudSat/CALIPSO Fluxes and Heating Rates product (:ref:`Kramer et al. 2019 <2>`).These radiative anomalies are regressed against the model's global-mean surface temperature anomalies to estimate the radiative feedbacks. Cloud radiative feedbacks are computed as the change in cloud radiative effects from the model's TOA radiative flux variables, corrected for cloud masking using the kernel-derived non-cloud radiative feedbacks (:ref:`Soden et al. 2008 <3>`). The Instantaneous Radiative Forcing is computed first under clear-sky conditions by subtracting kernel-derivred clear-sky radiative feedbacks from the clear-sky TOA radiative imbalance diagnosed from the model's radiative flux variables. The all-sky Instantaneous Radiative Forcing is estimated by dividing the clear-sky Instantaneous Radiative Forcing by a cloud masking constant (:ref:`Kramer et al. 2021 <4>`). All radiative quantities in this package are defined at the TOA and positive represents an increase in net downwelling or a radiative heating of the planet.
+
+
+Contact info
+------------
+
+- PI of the project: Brian Soden, University of Miami (bsoden@rsmas.miami.edu);
+- Current developer: Ryan Kramer (ryan.kramer@noaa.gov)
+
+Open source copyright agreement
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This package is distributed under the LGPLv3 license (see LICENSE.txt).
+
+Functionality
+-------------
+
+The currently package consists of:
+- a Python script (forcing_feedback.py), which sets up the directories and calls\.\.\.
+- \.\.\. an Python script (forcing_feedback_kernelcalcs.py) which reads the data, performs the calculations and
+saves the results to temporary netcdfs./././.
+- \.\.\. Finally, a Python script (forcing_feedback_plots.py) reads in the temporary results,
+observational radiative forcing and feedbacks and creates plots. Throughout the package, the scripts use Python
+functions defined in a third Python script (forcing_feedback_util.py)
+
+As a module of the MDTF code package, all scripts of this package can be found
+under ``mdtf/MDTF_$ver/var_code/forcing_feedback``
+and pre-digested observational data and radiative kernels (in netcdf format) under
+``mdtf/inputdata/obs_data/forcing_feedback``
+Place your input data at: ``mdtf/inputdata/model/$model_name/mon/``
+
+Required programming language and libraries
+-------------------------------------------
+
+Python is required to run the diagnostic.
+
+The part of the package written in Python requires packages os, sys, numpy, xarray, scipy, matplotlib,
+cartopy and dask. These Python packages are already included in the standard Anaconda installation
+
+Required model output variables
+-------------------------------
+
+The following three 3-D (lat-lon-time), monthly model fields are required:
+- surface skin temperature ("ts" in CMIP conventions)
+- TOA incident shortwave radiation ("rsdt")
+- TOA outgoing all-sky shortwave radiation ("rsut")
+- TOA outgoing clear-sky shortwave radiation ("rsutcs")
+- TOA outgoing all-sky longwave radiation ("rlut")
+- TOA outgoing clear-sky longwave radiation ("rlutcs")
+- Surface downwelling all-sky shortwave radiation ("rsds")
+- Surface downwelling clear-sky shortwave radiation ("rsdscs")
+- Surface upwelling all-sky shortwave radiation ("rsus")
+- Surface upwelling clear-sky shortwave radiation ("rsuscs")
+
+The following 4-D (lat-lon-level-time), monthly model fields are requied:
+- Air temperature ("ta" in CMIP conventions)
+- Specific humidity ("hus")
+
+The observational estimates (see below) are for 2003-2014. The start date is based on data availability while the end
+date was selected to match the end date of relevant CMIP6 simulations. For an ideal comparison,
+the model data used in this POD should cover the same period and have realistic,
+historical forcing boundary conditions. However, this package will still have value as a "gut check" for any simulation,
+especially with respect to radiative feedbacks, which often exhibit similar characteristics regardless of the
+forcing scenario.
+
+
+More about the diagnostic
+-------------------------
+
+a) Choice of reference dataset
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+While total TOA radiative changes are directly observed, the radiative feedback and radiative forcing components are
+not. Therefore, in this package the observational estimates of radiative feedbacks and radiative forcing are derived
+by multiplying data from ERA5 Reanalysis by the CloudSat/CALIPSO radiative kernels mentioned above.
+Global-mean surface temperature anomalies from ERA5 are used to compute the radiative feedbacks from the kernel-derived
+radiative anomalies as described above. To diagnose the instantaneous radiative forcing, the kernel-derived,
+clear-sky estimates of radiative feedbacks are subtracted by a measure of the total radiative anomalies at the TOA.
+For the observational dataset used here, that total radiative anomaly estimates is from CERES.
+The methods for diagnosing these radiative changes in observations are outlined by :ref:`Kramer et al. 2021 <4>`
+and :ref:`He et al. 2021 <5>`
+
+References
+----------
+
+ .. _1:
+
+1. Sherwood, S. C., Bony, S., Boucher, O., Bretherton, C., Forster, P. M., Gregory, J. M., & Stevens, B. (2015).
+Adjustments in the Forcing-Feedback Framework for Understanding Climate Change.
+*Bulletin of the American Meteorological Society*, **96** (2), 217–228. https://doi.org/10.1175/BAMS-D-13-00167.1
+
+ .. _2:
+
+2. Kramer, R. J., Matus, A. V., Soden, B. J., & L’Ecuyer, T. S. (2019).
+Observation‐Based Radiative Kernels From CloudSat/CALIPSO. *Journal of Geophysical Research: Atmospheres*,
+2018JD029021. https://doi.org/10.1029/2018JD029021
+
+ .. _3:
+
+3. Soden, B. J., Held, I. M., Colman, R., Shell, K. M., Kiehl, J. T., & Shields, C. A. (2008).
+Quantifying Climate Feedbacks Using Radiative Kernels. *Journal of Climate*, **21** (14), 3504–3520.
+https://doi.org/10.1175/2007JCLI2110.1
+
+ .. _4:
+
+4. Kramer, R.J, He, H., Soden, B.J., Oreopoulos, R.J., Myhre, G., Forster, P.F., & Smith, C.J.
+(2021) Observational Evidence of Increasing Global Radiative Forcing. *Geophys. Res. Lett.*, **48** (7),
+e2020GL091585. https://doi.org/10.1029/2020GL091585
+
+ .. _5:
diff --git a/diagnostics/forcing_feedback/forcing_feedback.html b/diagnostics/forcing_feedback/forcing_feedback.html
new file mode 100644
index 000000000..e7addf9da
--- /dev/null
+++ b/diagnostics/forcing_feedback/forcing_feedback.html
@@ -0,0 +1,67 @@
+
+
+
+Forcing and Feedbacks
+
+
+The Forcing and Feedback module uses radiative kernels to compute the model's Instantaneous Radiative Forcing and the
+ Planck, lapse rate, water vapor, surface albedo and cloud radiative feedbacks. The total radiative change
+ (in W/m2/K) is also computed. These quantities are defined at the top-of-the-atmosphere (TOA) and decomposed
+ into longwave (LW) and shortwave (SW) components when applicable. Radiative feedbacks are diagnosed by regressing
+ local radiative anomalies against global-mean surface temperature.
+
+These results can be used to understand what is driving the a model's particular climate sensitivity and more broadly
+ provide insights on how changes in the model's atmospheric state alter the model's energy budget.
+
+
+
+
+
+
+< color=navy> Forcing and Feedbacks
+ | {CASENAME} vs. Observations
+ |
+Global-Mean Total Radiation Change
+ | plot
+ |
+
+Global-Mean Instantaneous Radiative Forcing
+ | plot
+ |
+
+Global-Mean Longwave Feedbacks
+ | plot
+ |
+
+Global-Mean Shortwave Feedbacks
+ | plot
+ |
+
+Comparison with CMIP6 Models
+ | plot
+ |
+
+Map of Temperature Feedback
+ | plot
+ |
+
+Map of Water Vapor Feedback
+ | plot
+ |
+
+Map of Surface Albedo Feedback
+ | plot
+ |
+
+Map of Cloud Feedback
+ | plot
+ |
+
+Map of Total Radiation Change
+ | plot
+ |
+
+Map of Instantaneous Radiative Forcing Trend
+ | plot
+ |
+
diff --git a/diagnostics/forcing_feedback/forcing_feedback.py b/diagnostics/forcing_feedback/forcing_feedback.py
new file mode 100644
index 000000000..23b968bf0
--- /dev/null
+++ b/diagnostics/forcing_feedback/forcing_feedback.py
@@ -0,0 +1,77 @@
+# This file is part of the forcing_feedback module of the MDTF code package (see mdtf/MDTF_v2.0/LICENSE.txt)
+
+# ======================================================================
+# forcing_feedback.py
+#
+# Forcing Feedback Diagnostic Package
+#
+# The forcing feedback diagnostic package uses radiative kernels to compute radiative forcing and radiative
+# feedback terms.
+#
+# Version 1 05-Sept-2023 Ryan Kramer (NOAA/GFDL prev. NASA GSFC/UMBC)
+# PI: Brian Soden (University of Miami; bsoden@rsmas.miami.edu)
+# Current developer: Ryan Kramer (ryan.kramer@noaa.gov)
+#
+# This package and the MDTF code package are distributed under the LGPLv3 license
+# (see LICENSE.txt).
+#
+#
+# As a module of the MDTF code package, all scripts of this package can be found under
+# mdtf/MDTF_$ver/var_code/forcing_feedback**
+# and pre-digested radiative kernels used in the calculations under
+# mdtf/inputdata/obs_data/forcing_feedback
+# (**$ver depends on the actual version of the MDTF code package
+#
+# This package is written in Python 3 and requires the following Python packages:
+# os,sys,numpy,xarray,scipy,matplotlib,cartopy,dask
+#
+# The following 4-D (lon-lat-pressure levels-time) monthly-mean model fields
+# are required:
+# (1) air temperature (units: K)
+# (2) specific humidity (units: kg/kg)
+#
+# The following 3-D (lon-lat-time) monthly-mean model fields are required:
+# (1) surface temperature (units: K)
+# (2) TOA longwave and shortwave radiative flux diagostics (TOA SW upwellling, TOA SW downwelling, etc.)
+# for longwave and shortwave and for all-sky and clear-sky conditions when applicable
+# (3) Surface shortwave radiative flux diagnostics (Surface SW Upwelling, Surface SW downwelling)
+# for all-sky and clear-sky conditions
+#
+#
+##################################
+# forcing_feedback
+#
+# Created By: Ryan J. Kramer, Brian J. Soden
+#
+#
+# This is the main script for the forcing_feedback Toolkit. With some user-specified details
+# this Toolkit uses radiative kernels to compute instantaneous radiative forcing and radiative feedbacks
+# for a single model. Further details are in the module's documentation at ../doc.
+#
+##################################
+
+import os
+
+if not os.path.isfile(os.environ["OBS_DATA"] + "/forcing_feedback_kernels.nc"):
+ print("Kernel file is missing. POD will not work!")
+
+else:
+
+ try:
+ os.system("python " + os.environ["POD_HOME"] + "/" + "forcing_feedback_kernelcalcs.py")
+ print('Working Directory is ' + os.environ['WK_DIR'])
+ print('Forcing Feedback POD is executing')
+ except RuntimeError as e1:
+ print('WARNING', e1.errno, e1.strerror)
+ print("**************************************************")
+ print("Kernel calculations (forcing_feedback_kernelcalcs.py) are NOT Executing as Expected!")
+
+ try:
+ os.system("python " + os.environ["POD_HOME"] + "/" + "forcing_feedback_plot.py")
+ print('Generating Forcing Feedback POD plots')
+ except RuntimeError as e2:
+ print('WARNING', e2.errno, e2.strerror)
+ print("**************************************************")
+ print("Plotting functions (forcing_feedback_plots.py) are NOT Executing as Expected!")
+
+ print("Last log message by Forcing Feedback POD: finished executing")
diff --git a/diagnostics/forcing_feedback/forcing_feedback_kernelcalcs.py b/diagnostics/forcing_feedback/forcing_feedback_kernelcalcs.py
new file mode 100644
index 000000000..7bd0e4d46
--- /dev/null
+++ b/diagnostics/forcing_feedback/forcing_feedback_kernelcalcs.py
@@ -0,0 +1,347 @@
+# This file is part of the forcing_feedback module of the MDTF code package (see mdtf/MDTF_v2.0/LICENSE.txt)
+
+# ======================================================================
+# forcingfluxanom_kernelcalcs_final.py
+#
+# Called by forcingfluxanom_xr_final.py
+# Reads in and (as needed) regrids radiative kernels, reads in model data, computes radiative flux kernel calculations
+#
+# Forcing Feedback Diagnositic Package
+#
+# This file is part of the Forcing and Feedback Diagnostic Package
+# and the MDTF code package. See LICENSE.txt for the license.
+#
+
+import os
+import numpy as np
+import xarray as xr
+from scipy.interpolate import interp1d
+
+# Import functions specific to this toolkit
+from forcing_feedback_util import var_anom4D
+from forcing_feedback_util import fluxanom_calc_4D
+from forcing_feedback_util import fluxanom_calc_3D
+from forcing_feedback_util import esat_coef
+from forcing_feedback_util import latlonr3_3D4D
+from forcing_feedback_util import feedback_regress
+
+# ======================================================================
+
+# Read in radiative kernels
+kernnc = xr.open_dataset(os.environ["OBS_DATA"] + "/forcing_feedback_kernels.nc")
+lat_kern = kernnc['latitude'].values
+lon_kern = kernnc['longitude'].values
+lev_kern = kernnc['plev'].values
+time_kern_TOA = kernnc['time'].values
+lw_t_kern_TOA = kernnc['lw_t'].values # LW all-sky air temperature kernel
+lwclr_t_kern_TOA = kernnc['lwclr_t'].values # LW clear-sky air temperature kernel
+lw_ts_kern_TOA = kernnc['lw_ts'].values # LW all-sky surface temperature kernel
+lwclr_ts_kern_TOA = kernnc['lwclr_ts'].values # LW clear-sky surface temperature kernel
+lw_q_kern_TOA = kernnc['lw_q'].values # LW all-sky water vapor radiative kernel
+lwclr_q_kern_TOA = kernnc['lwclr_q'].values # LW clear-sky water vapor radiative kernel
+sw_q_kern_TOA = kernnc['sw_q'].values # SW all-sky water vapor radiative kernel
+swclr_q_kern_TOA = kernnc['swclr_q'].values # SW clear-sky water vapor radiative kernel
+sw_a_kern_TOA = kernnc['sw_a'].values # SW all-sky surface albedo radiative kernel
+swclr_a_kern_TOA = kernnc['swclr_a'].values # SW clear-sky surface albedo radiative kernel
+ps_kern_TOA = kernnc['PS'].values # Radiative kernel surface pressure
+
+# close kernel file
+kernnc.close()
+
+# Read in model output
+
+varnames = ["ta", "ts", "hus", "rsus", "rsds", "rsuscs", "rsdscs", "rsdt", "rsut", "rsutcs", "rlut", "rlutcs"]
+model_mainvar_pert = {}
+i = 0
+for varname in varnames:
+ nc = xr.open_dataset(os.environ[varname.upper() + "_FILE"])
+ model_mainvar_pert[os.environ[varname + "_var"]] = nc[os.environ[varname + "_var"]]
+ if i == 0:
+ lat_model = nc[os.environ["lat_coord"]].values
+ lon_model = nc[os.environ["lon_coord"]].values
+ lev_model = nc[os.environ["plev_coord"]].values
+ time_model = nc[os.environ["time_coord"]].values
+ nc.close()
+ i += 1
+i = None
+
+model_mainvar_climo = model_mainvar_pert
+
+# To avoid issues with interpreting different model time formats, just create new variable.
+time_model = np.arange(len(time_model)) + 1
+
+# Regrid kernels using function to match horizontal grid of model/observational data
+lw_t_kern_TOA = latlonr3_3D4D(lw_t_kern_TOA, lat_kern, lon_kern, lat_model, lon_model,
+ model_mainvar_climo[os.environ["ta_var"]])
+lwclr_t_kern_TOA = latlonr3_3D4D(lwclr_t_kern_TOA, lat_kern, lon_kern, lat_model, lon_model,
+ model_mainvar_climo[os.environ["ta_var"]])
+lw_ts_kern_TOA = latlonr3_3D4D(lw_ts_kern_TOA, lat_kern, lon_kern, lat_model, lon_model,
+ model_mainvar_climo[os.environ["ts_var"]])
+lwclr_ts_kern_TOA = latlonr3_3D4D(lwclr_ts_kern_TOA, lat_kern, lon_kern, lat_model, lon_model,
+ model_mainvar_climo[os.environ["ts_var"]])
+lw_q_kern_TOA = latlonr3_3D4D(lw_q_kern_TOA, lat_kern, lon_kern, lat_model, lon_model,
+ model_mainvar_climo[os.environ["ta_var"]])
+lwclr_q_kern_TOA = latlonr3_3D4D(lwclr_q_kern_TOA, lat_kern, lon_kern, lat_model, lon_model,
+ model_mainvar_climo[os.environ["ta_var"]])
+sw_q_kern_TOA = latlonr3_3D4D(sw_q_kern_TOA, lat_kern, lon_kern, lat_model, lon_model,
+ model_mainvar_climo[os.environ["ta_var"]])
+swclr_q_kern_TOA = latlonr3_3D4D(swclr_q_kern_TOA, lat_kern, lon_kern, lat_model, lon_model, \
+ model_mainvar_climo[os.environ["ta_var"]])
+sw_a_kern_TOA = latlonr3_3D4D(sw_a_kern_TOA, lat_kern, lon_kern, lat_model, lon_model, \
+ model_mainvar_climo[os.environ["ts_var"]])
+swclr_a_kern_TOA = latlonr3_3D4D(swclr_a_kern_TOA, lat_kern, lon_kern, lat_model, lon_model, \
+ model_mainvar_climo[os.environ["ts_var"]])
+ps_kern_TOA = latlonr3_3D4D(ps_kern_TOA, lat_kern, lon_kern, lat_model, lon_model, \
+ model_mainvar_climo[os.environ["ts_var"]])
+
+# Kernels have now been regridded to the model/observation grid
+lat_kern = lat_model
+lon_kern = lon_model
+
+# If necessary, conduct vertical interpolation
+for varname in varnames:
+ if len(model_mainvar_pert[os.environ[varname + "_var"]].shape) == 4:
+ # If vertical coordinates for kernel and model data differ, check if just a difference in units,
+ # check if one is flipped and finally, flip or interpolate as needed.
+ if not np.array_equal(lev_model, lev_kern) and not np.array_equal(lev_model / 100, lev_kern) \
+ and not np.array_equal(lev_model * 100, lev_kern):
+ if np.array_equal(lev_model, lev_kern[::-1]) or np.array_equal(lev_model / 100, lev_kern[::-1]) \
+ or np.array_equal(lev_model * 100, lev_kern[::-1]):
+ model_mainvar_pert[os.environ[varname + "_var"]] = \
+ model_mainvar_pert[os.environ[varname + "_var"]][:, ::-1, ...]
+ model_mainvar_climo[os.environ[varname + "_var"]] = \
+ model_mainvar_climo[os.environ[varname + "_var"]][:, ::-1, ...]
+ else:
+ if np.max(lev_model) / np.max(lev_kern) > 10:
+ lev_model = lev_model / 100 # scale units down
+ if np.max(lev_model) / np.max(lev_kern) < 0.1:
+ lev_model = lev_model * 100 # scale units up
+ f = interp1d(np.log(lev_model), model_mainvar_pert[os.environ[varname + "_var"]],
+ axis=1, bounds_error=False, fill_value='extrapolate')
+ model_mainvar_pert[os.environ[varname + "_var"]] = f(np.log(lev_kern))
+ f = None
+ f = interp1d(np.log(lev_model), model_mainvar_climo[os.environ[varname + "_var"]],
+ axis=1, bounds_error=False, fill_value='extrapolate')
+ model_mainvar_climo[os.environ[varname + "_var"]] = f(np.log(lev_kern))
+
+# Pressure of upper boundary of each vertical layer
+pt = (lev_kern[1:] + lev_kern[:-1]) / 2
+pt = np.append(pt, 0)
+# Pressure of lower boundary of each vertical layer
+pb = pt[:-1]
+pb = np.insert(pb, 0, 1000)
+# Pressure thickness of each vertical level
+dp = pb - pt
+
+sk = lw_t_kern_TOA.shape
+sp = model_mainvar_pert[os.environ["ta_var"]].shape
+
+# Determine thickness of lowest layer, dependent on local surface pressure
+dp_sfc = dp[0] + (ps_kern_TOA - pb[0])
+dp_sfc[ps_kern_TOA >= pt[0]] = 0
+
+# Repeat lowest layer thicknesses to match size of model data for kernel calculations
+dp_sfc = np.repeat(dp_sfc[np.newaxis, :, :, :], int(sp[0] / 12), axis=0)
+
+# Compute lapse rate and uniform warming
+ta_pert = np.asarray(model_mainvar_pert[os.environ["ta_var"]])
+ta_climo = np.asarray(model_mainvar_climo[os.environ["ta_var"]])
+ts_pert = np.asarray(model_mainvar_pert[os.environ["ts_var"]])
+ts_climo = np.asarray(model_mainvar_climo[os.environ["ts_var"]])
+s = ta_pert.shape
+
+pl_pert = np.repeat(ts_pert[:, np.newaxis, :, :], s[1], axis=1)
+s = ta_climo.shape
+pl_climo = np.repeat(ts_climo[:, np.newaxis, :, :], s[1], axis=1)
+
+ta_anom = var_anom4D(ta_pert, ta_climo)
+pl_anom = var_anom4D(pl_pert, pl_climo)
+lr_anom = ta_anom - pl_anom
+
+# Compute Planck Radiative Flux Anomalies
+[fluxanom_pl_tot_TOA_tropo, fluxanom_pl_clr_TOA_tropo, fluxanom_pl_tot_TOA_strato, fluxanom_pl_clr_TOA_strato] = \
+ fluxanom_calc_4D(pl_anom, lw_t_kern_TOA, lwclr_t_kern_TOA, dp_sfc, lev_kern, lat_kern, ps_kern_TOA)
+
+# Compute Surface Temperature (surface Planck) Flux Anomalies
+[fluxanom_pl_sfc_tot_TOA, fluxanom_pl_sfc_clr_TOA] = fluxanom_calc_3D(ts_pert, ts_climo, lw_ts_kern_TOA,
+ lwclr_ts_kern_TOA)
+
+# Comput Lapse Rate Radiative Flux Anomalies
+[fluxanom_lr_tot_TOA_tropo, fluxanom_lr_clr_TOA_tropo, fluxanom_lr_tot_TOA_strato, fluxanom_lr_clr_TOA_strato] = \
+ fluxanom_calc_4D(lr_anom, lw_t_kern_TOA, lwclr_t_kern_TOA, dp_sfc, lev_kern, lat_kern, ps_kern_TOA)
+
+# Compute water vapor change
+hus_pert = np.asarray(model_mainvar_pert[os.environ["hus_var"]])
+hus_pert[hus_pert < 0] = 0
+hus_climo = np.asarray(model_mainvar_climo[os.environ["hus_var"]])
+hus_climo[hus_climo < 0] = 0
+
+# Calculations necessary to convert units of water vapor change to match kernels
+shp = ta_climo.shape
+ta_ratio_climo = np.squeeze(np.nanmean(np.reshape(ta_climo, (int(shp[0] / 12), 12, shp[1], shp[2], shp[3])),
+ axis=0))
+es_ratio = esat_coef(ta_ratio_climo + 1) / esat_coef(ta_ratio_climo)
+
+es_ratio_pert = np.reshape(np.repeat(es_ratio[np.newaxis, ...], int(ta_pert.shape[0] / 12), axis=0),
+ (ta_pert.shape[0], shp[1], shp[2], shp[3]))
+
+# Log of water vapor
+q_pert = np.log(hus_pert) / (es_ratio_pert - 1)
+es_ratio_climo = np.reshape(np.repeat(es_ratio[np.newaxis, ...], int(shp[0] / 12), axis=0),
+ (shp[0], shp[1], shp[2], shp[3]))
+
+q_climo = np.log(hus_climo) / (es_ratio_climo - 1)
+es_ratio, ta_ratio_climo, hus_pert, hus_climo, ta_pert, ta_climo = None, None, None, None, None, None
+
+q_anom = var_anom4D(q_pert, q_climo)
+
+# Compute SW Water Vapor Radiative Flux Anomalies
+
+[fluxanom_sw_q_tot_TOA_tropo, fluxanom_sw_q_clr_TOA_tropo, fluxanom_sw_q_tot_TOA_strato, fluxanom_sw_q_clr_TOA_strato] \
+ = fluxanom_calc_4D(q_anom, sw_q_kern_TOA, swclr_q_kern_TOA, dp_sfc, lev_kern, lat_kern, ps_kern_TOA)
+
+# Compute LW Water Vapor Radiative Flux Anomalies
+[fluxanom_lw_q_tot_TOA_tropo, fluxanom_lw_q_clr_TOA_tropo, fluxanom_lw_q_tot_TOA_strato, fluxanom_lw_q_clr_TOA_strato] \
+ = fluxanom_calc_4D(q_anom, lw_q_kern_TOA, lwclr_q_kern_TOA, dp_sfc, lev_kern, lat_kern, ps_kern_TOA)
+
+fluxanom_sw_q_tot_TOA_tropo[fluxanom_sw_q_tot_TOA_tropo == float('Inf')] = np.nan
+fluxanom_sw_q_clr_TOA_tropo[fluxanom_sw_q_clr_TOA_tropo == float('Inf')] = np.nan
+fluxanom_lw_q_tot_TOA_tropo[fluxanom_lw_q_tot_TOA_tropo == float('Inf')] = np.nan
+fluxanom_lw_q_clr_TOA_tropo[fluxanom_lw_q_clr_TOA_tropo == float('Inf')] = np.nan
+fluxanom_sw_q_tot_TOA_tropo[fluxanom_sw_q_tot_TOA_tropo == -float('Inf')] = np.nan
+fluxanom_sw_q_clr_TOA_tropo[fluxanom_sw_q_clr_TOA_tropo == -float('Inf')] = np.nan
+fluxanom_lw_q_tot_TOA_tropo[fluxanom_lw_q_tot_TOA_tropo == -float('Inf')] = np.nan
+fluxanom_lw_q_clr_TOA_tropo[fluxanom_lw_q_clr_TOA_tropo == -float('Inf')] = np.nan
+
+fluxanom_sw_q_tot_TOA_strato[fluxanom_sw_q_tot_TOA_strato == float('Inf')] = np.nan
+fluxanom_sw_q_clr_TOA_strato[fluxanom_sw_q_clr_TOA_strato == float('Inf')] = np.nan
+fluxanom_lw_q_tot_TOA_strato[fluxanom_lw_q_tot_TOA_strato == float('Inf')] = np.nan
+fluxanom_lw_q_clr_TOA_strato[fluxanom_lw_q_clr_TOA_strato == float('Inf')] = np.nan
+fluxanom_sw_q_tot_TOA_strato[fluxanom_sw_q_tot_TOA_strato == -float('Inf')] = np.nan
+fluxanom_sw_q_clr_TOA_strato[fluxanom_sw_q_clr_TOA_strato == -float('Inf')] = np.nan
+fluxanom_lw_q_tot_TOA_strato[fluxanom_lw_q_tot_TOA_strato == -float('Inf')] = np.nan
+fluxanom_lw_q_clr_TOA_strato[fluxanom_lw_q_clr_TOA_strato == -float('Inf')] = np.nan
+
+# Compute surface albedo change
+alb_pert_tot = np.asarray(model_mainvar_pert[os.environ["rsus_var"]] / model_mainvar_pert[os.environ["rsds_var"]])
+alb_climo_tot = np.asarray(model_mainvar_climo[os.environ["rsus_var"]] / model_mainvar_climo[os.environ["rsds_var"]])
+alb_pert_clr = np.asarray(model_mainvar_pert[os.environ["rsuscs_var"]] / model_mainvar_pert[os.environ["rsdscs_var"]])
+alb_climo_clr = np.asarray(
+ model_mainvar_climo[os.environ["rsuscs_var"]] / model_mainvar_climo[os.environ["rsdscs_var"]])
+
+alb_pert_tot[np.isinf(alb_pert_tot)] = 0
+alb_climo_tot[np.isinf(alb_climo_tot)] = 0
+alb_pert_clr[np.isinf(alb_pert_clr)] = 0
+alb_climo_clr[np.isinf(alb_climo_clr)] = 0
+alb_pert_tot[alb_pert_tot > 1] = 0
+alb_pert_tot[alb_pert_tot < 0] = 0
+alb_climo_tot[alb_climo_tot > 1] = 0
+alb_climo_tot[alb_climo_tot < 0] = 0
+alb_pert_clr[alb_pert_clr > 1] = 0
+alb_pert_clr[alb_pert_clr < 0] = 0
+alb_climo_clr[alb_climo_clr > 1] = 0
+alb_climo_clr[alb_climo_clr < 0] = 0
+# Compute Surface Albedo Radiative Flux Anomalies
+
+
+[fluxanom_a_sfc_tot_TOA, fluxanom_a_sfc_clr_TOA] = \
+ fluxanom_calc_3D(alb_pert_tot, alb_climo_tot, sw_a_kern_TOA / .01, swclr_a_kern_TOA / .01, alb_pert_clr,
+ alb_climo_clr)
+
+# Compute NET Radiative Flux Anomalies
+Rtot_LW_TOA_pert = np.asarray(-model_mainvar_pert[os.environ["rlut_var"]])
+Rtot_SW_TOA_pert = np.asarray(model_mainvar_pert[os.environ["rsdt_var"]] - model_mainvar_pert[os.environ["rsut_var"]])
+Rtot_LW_TOA_climo = np.asarray(-model_mainvar_climo[os.environ["rlut_var"]])
+Rtot_SW_TOA_climo = np.asarray(
+ model_mainvar_climo[os.environ["rsdt_var"]] - model_mainvar_climo[os.environ["rsut_var"]])
+Rclr_LW_TOA_pert = np.asarray(-model_mainvar_pert[os.environ["rlutcs_var"]])
+Rclr_SW_TOA_pert = np.asarray(model_mainvar_pert[os.environ["rsdt_var"]] - model_mainvar_pert[os.environ["rsutcs_var"]])
+Rclr_LW_TOA_climo = np.asarray(-model_mainvar_climo[os.environ["rlutcs_var"]])
+Rclr_SW_TOA_climo = np.asarray(
+ model_mainvar_climo[os.environ["rsdt_var"]] - model_mainvar_climo[os.environ["rsutcs_var"]])
+
+# TOA
+[fluxanom_Rtot_LW_TOA, fluxanom_Rclr_LW_TOA] = \
+ fluxanom_calc_3D(Rtot_LW_TOA_pert, Rtot_LW_TOA_climo, np.ones(lw_ts_kern_TOA.shape),
+ np.ones(lwclr_ts_kern_TOA.shape),
+ Rclr_LW_TOA_pert, Rclr_LW_TOA_climo)
+
+[fluxanom_Rtot_SW_TOA, fluxanom_Rclr_SW_TOA] = \
+ fluxanom_calc_3D(Rtot_SW_TOA_pert, Rtot_SW_TOA_climo, np.ones(lw_ts_kern_TOA.shape),
+ np.ones(lwclr_ts_kern_TOA.shape),
+ Rclr_SW_TOA_pert, Rclr_SW_TOA_climo)
+# TOA CRE
+fluxanom_Rcre_LW_TOA = fluxanom_Rtot_LW_TOA - fluxanom_Rclr_LW_TOA
+fluxanom_Rcre_SW_TOA = fluxanom_Rtot_SW_TOA - fluxanom_Rclr_SW_TOA
+
+# Compute Instantaneous Radiative Forcing as difference between NET Radiative Flux Anomalies and
+# the sum of all individual radiative flux anomalies. Total-sky IRF computed as
+# Clear-Sky IRF divided by cloud masking constant. NOTE, these cloud masking constants may not apply
+# to user's specific model experiment.
+IRF_lw_clr_TOA = fluxanom_Rclr_LW_TOA - fluxanom_pl_clr_TOA_tropo - fluxanom_lr_clr_TOA_tropo - \
+ fluxanom_lw_q_clr_TOA_tropo - fluxanom_pl_sfc_clr_TOA - fluxanom_pl_clr_TOA_tropo - \
+ fluxanom_lr_clr_TOA_strato - fluxanom_lw_q_clr_TOA_strato
+IRF_lw_tot_TOA = IRF_lw_clr_TOA / np.double(os.environ["LW_CLOUDMASK"])
+IRF_sw_clr_TOA = (fluxanom_Rclr_SW_TOA - fluxanom_sw_q_clr_TOA_tropo - fluxanom_a_sfc_clr_TOA -
+ fluxanom_sw_q_clr_TOA_strato)
+IRF_sw_tot_TOA = IRF_sw_clr_TOA / np.double(os.environ["SW_CLOUDMASK"])
+
+# Compute Cloud Radiative Flux Anomalies as dCRE minus correction for cloud masking using
+# kernel-derived IRF and individual flux anomalies (See e.g. Soden et al. 2008)
+
+fluxanom_lw_c_TOA = fluxanom_Rcre_LW_TOA - (fluxanom_pl_tot_TOA_tropo - fluxanom_pl_clr_TOA_tropo) \
+ - (fluxanom_pl_tot_TOA_strato - fluxanom_pl_clr_TOA_strato) \
+ - (fluxanom_pl_sfc_tot_TOA - fluxanom_pl_sfc_clr_TOA) \
+ - (fluxanom_lr_tot_TOA_tropo - fluxanom_lr_clr_TOA_tropo) \
+ - (fluxanom_lr_tot_TOA_strato - fluxanom_lr_clr_TOA_strato) \
+ - (fluxanom_lw_q_tot_TOA_tropo - fluxanom_lw_q_clr_TOA_tropo) \
+ - (fluxanom_lw_q_tot_TOA_strato - fluxanom_lw_q_clr_TOA_strato) \
+ - (IRF_lw_tot_TOA - IRF_lw_clr_TOA)
+
+fluxanom_sw_c_TOA = fluxanom_Rcre_SW_TOA - (fluxanom_sw_q_tot_TOA_tropo - fluxanom_sw_q_clr_TOA_tropo) \
+ - (fluxanom_sw_q_tot_TOA_strato - fluxanom_sw_q_clr_TOA_strato) \
+ - (fluxanom_a_sfc_tot_TOA - fluxanom_a_sfc_clr_TOA) \
+ - (IRF_sw_tot_TOA - IRF_sw_clr_TOA)
+
+# Regress radiative flux anomalies with global-mean dTs to compute feedbacks
+# within feedback_regress function, results saved to a netcdf
+
+# Planck Feedback
+feedback_regress(fluxanom_pl_tot_TOA_tropo + fluxanom_pl_sfc_tot_TOA, ts_pert, ts_climo, lat_kern, lon_kern,
+ 'Planck')
+
+# Lapse Rate Feedback
+feedback_regress(fluxanom_lr_tot_TOA_tropo, ts_pert, ts_climo, lat_kern, lon_kern, 'LapseRate')
+
+# LW Water vapor Feedback
+feedback_regress(fluxanom_lw_q_tot_TOA_tropo, ts_pert, ts_climo, lat_kern, lon_kern, 'LW_WaterVapor')
+
+# SW Water vapor Feedback
+feedback_regress(fluxanom_sw_q_tot_TOA_tropo, ts_pert, ts_climo, lat_kern, lon_kern, 'SW_WaterVapor')
+
+# Surface Albedo Feedback
+feedback_regress(fluxanom_a_sfc_tot_TOA, ts_pert, ts_climo, lat_kern, lon_kern, 'SfcAlbedo')
+
+# Total stratospheric Feedback
+feedback_regress(fluxanom_pl_tot_TOA_strato + fluxanom_lr_tot_TOA_strato + fluxanom_lw_q_tot_TOA_strato + \
+ fluxanom_sw_q_tot_TOA_strato, ts_pert, ts_climo, lat_kern, lon_kern, 'StratFB')
+
+# LW Cloud Feedback
+feedback_regress(fluxanom_lw_c_TOA, ts_pert, ts_climo, lat_kern, lon_kern, 'LW_Cloud')
+
+# SW Cloud Feedback
+feedback_regress(fluxanom_sw_c_TOA, ts_pert, ts_climo, lat_kern, lon_kern, 'SW_Cloud')
+
+# LW Total Radiative Anomalies
+feedback_regress(fluxanom_Rtot_LW_TOA, ts_pert, ts_climo, lat_kern, lon_kern, 'LW_Rad')
+
+# SW Total Radiative Anomalies
+feedback_regress(fluxanom_Rtot_SW_TOA, ts_pert, ts_climo, lat_kern, lon_kern, 'SW_Rad')
+
+shp = ts_pert.shape
+xtime = np.repeat(np.repeat(np.arange(shp[0])[..., np.newaxis], shp[1], axis=1)[..., np.newaxis], shp[2], axis=2)
+# LW IRF (trendlines, regressed against time)
+feedback_regress(IRF_lw_tot_TOA, xtime, xtime * 0, lat_kern, lon_kern, 'LW_IRF')
+
+# SW IRF (trendlines, regressed against time)
+feedback_regress(IRF_sw_tot_TOA, xtime, xtime * 0, lat_kern, lon_kern, 'SW_IRF')
diff --git a/diagnostics/forcing_feedback/forcing_feedback_plot.py b/diagnostics/forcing_feedback/forcing_feedback_plot.py
new file mode 100644
index 000000000..865c46def
--- /dev/null
+++ b/diagnostics/forcing_feedback/forcing_feedback_plot.py
@@ -0,0 +1,251 @@
+# This file is part of the forcing_feedback module of the MDTF code package (see mdtf/MDTF_v2.0/LICENSE.txt)
+
+# ======================================================================
+# forcing_feedback_plot.py
+#
+# Called by forcing_feedback.py
+# Reads in observations and temporary model results of individual 2D feedbacks and IRF, produces maps of each and
+# a bar graph summarizing global-mean values. Also creates a dotplot, comparing results against CMIP6 suite of models
+#
+# Forcing Feedback Diagnositic Package
+#
+# This file is part of the Forcing and Feedback Diagnostic Package
+# and the MDTF code package. See LICENSE.txt for the license.
+#
+#
+
+import os
+import numpy as np
+import xarray as xr
+import matplotlib as mpl
+
+mpl.use('Agg')
+import matplotlib.pyplot as plt
+
+from forcing_feedback_util import globemean_2D
+from forcing_feedback_util import bargraph_plotting
+from forcing_feedback_util import map_plotting_4subs
+from forcing_feedback_util import map_plotting_2subs
+
+# Read in observational data
+nc_obs = xr.open_dataset(os.environ["OBS_DATA"] + "/forcing_feedback_obs.nc")
+lat_obs = nc_obs.lat.values
+lon_obs = nc_obs.lon.values
+llons_obs, llats_obs = np.meshgrid(lon_obs, lat_obs)
+
+# Read in model results
+
+nc_pl = xr.open_dataset(os.environ["WK_DIR"] + "/model/netCDF/fluxanom2D_Planck.nc")
+nc_lr = xr.open_dataset(os.environ["WK_DIR"] + "/model/netCDF/fluxanom2D_LapseRate.nc")
+nc_lw_q = xr.open_dataset(os.environ["WK_DIR"] + "/model/netCDF/fluxanom2D_LW_WaterVapor.nc")
+nc_sw_q = xr.open_dataset(os.environ["WK_DIR"] + "/model/netCDF/fluxanom2D_SW_WaterVapor.nc")
+nc_alb = xr.open_dataset(os.environ["WK_DIR"] + "/model/netCDF/fluxanom2D_SfcAlbedo.nc")
+nc_lw_c = xr.open_dataset(os.environ["WK_DIR"] + "/model/netCDF/fluxanom2D_LW_Cloud.nc")
+nc_sw_c = xr.open_dataset(os.environ["WK_DIR"] + "/model/netCDF/fluxanom2D_SW_Cloud.nc")
+nc_lw_irf = xr.open_dataset(os.environ["WK_DIR"] + "/model/netCDF/fluxanom2D_LW_IRF.nc")
+nc_sw_irf = xr.open_dataset(os.environ["WK_DIR"] + "/model/netCDF/fluxanom2D_SW_IRF.nc")
+nc_lw_netrad = xr.open_dataset(os.environ["WK_DIR"] + "/model/netCDF/fluxanom2D_LW_Rad.nc")
+nc_sw_netrad = xr.open_dataset(os.environ["WK_DIR"] + "/model/netCDF/fluxanom2D_SW_Rad.nc")
+nc_strat = xr.open_dataset(os.environ["WK_DIR"] + "/model/netCDF/fluxanom2D_StratFB.nc")
+
+lat_model = nc_sw_irf.lat.values
+weights_model = np.cos(np.deg2rad(lat_model))
+weights_obs = np.cos(np.deg2rad(lat_obs))
+
+# Global-mean barplot comparison
+# Figure 1: Total Radiation
+LW_RA_Model = globemean_2D(nc_lw_netrad.LW_Rad.values, weights_model)
+SW_RA_Model = globemean_2D(nc_sw_netrad.SW_Rad.values, weights_model)
+Net_RA_Model = LW_RA_Model + SW_RA_Model
+bars1 = [Net_RA_Model, LW_RA_Model, SW_RA_Model]
+LW_RA_Obs = globemean_2D(nc_obs.LW_Rad.values, weights_obs)
+SW_RA_Obs = globemean_2D(nc_obs.SW_Rad.values, weights_obs)
+Net_RA_Obs = LW_RA_Obs + SW_RA_Obs
+bars2 = [Net_RA_Obs, LW_RA_Obs, SW_RA_Obs]
+units = 'W/$m^2$/K'
+legendnames = ['Net Radiation', 'LW Rad', 'SW Rad']
+filename = 'Rad'
+bargraph_plotting(bars1, bars2, units, legendnames, filename)
+
+# Figure 2: IRF
+LW_IRF_Model = 12 * globemean_2D(nc_lw_irf.LW_IRF.values, weights_model) # converting from W/m2/month to W/m2/yr
+SW_IRF_Model = 12 * globemean_2D(nc_sw_irf.SW_IRF.values, weights_model)
+Net_IRF_Model = LW_IRF_Model + SW_IRF_Model
+bars1 = [Net_IRF_Model, LW_IRF_Model, SW_IRF_Model]
+LW_IRF_Obs = 12 * globemean_2D(nc_obs.LW_IRF.values, weights_obs)
+SW_IRF_Obs = 12 * globemean_2D(nc_obs.SW_IRF.values, weights_obs)
+Net_IRF_Obs = LW_IRF_Obs + SW_IRF_Obs
+bars2 = [Net_IRF_Obs, LW_IRF_Obs, SW_IRF_Obs]
+units = 'W/$m^2/yr$'
+legendnames = ['Net IRF', 'LW IRF', 'SW IRF']
+filename = 'IRF'
+bargraph_plotting(bars1, bars2, units, legendnames, filename)
+
+# Figure 3: Longwave Radiative Feedbacks
+PL_Model = globemean_2D(nc_pl.Planck.values, weights_model)
+LR_Model = globemean_2D(nc_lr.LapseRate.values, weights_model)
+LWWV_Model = globemean_2D(nc_lw_q.LW_WaterVapor.values, weights_model)
+LWC_Model = globemean_2D(nc_lw_c.LW_Cloud.values, weights_model)
+bars1 = [PL_Model, LR_Model, LWWV_Model, LWC_Model]
+PL_Obs = globemean_2D(nc_obs.Planck.values, weights_obs)
+LR_Obs = globemean_2D(nc_obs.LapseRate.values, weights_obs)
+LWWV_Obs = globemean_2D(nc_obs.LW_WaterVapor.values, weights_obs)
+LWC_Obs = globemean_2D(nc_obs.LW_Cloud.values, weights_obs)
+bars2 = [PL_Obs, LR_Obs, LWWV_Obs, LWC_Obs]
+units = 'W/$m^2$/K'
+legendnames = ['Planck', 'Lapse Rate', 'LW Water Vapor', ' LW Cloud']
+filename = 'LWFB'
+bargraph_plotting(bars1, bars2, units, legendnames, filename)
+
+# Figure 4: Shortwave Radiative Feedbacks
+Alb_Model = globemean_2D(nc_alb.SfcAlbedo.values, weights_model)
+SWWV_Model = globemean_2D(nc_sw_q.SW_WaterVapor.values, weights_model)
+SWC_Model = globemean_2D(nc_sw_c.SW_Cloud.values, weights_model)
+bars1 = [Alb_Model, SWWV_Model, SWC_Model]
+Alb_Obs = globemean_2D(nc_obs.SfcAlbedo.values, weights_obs)
+SWWV_Obs = globemean_2D(nc_obs.SW_WaterVapor.values, weights_obs)
+SWC_Obs = globemean_2D(nc_obs.SW_Cloud.values, weights_obs)
+bars2 = [Alb_Obs, SWWV_Obs, SWC_Obs]
+units = 'W/$m^2$/K'
+legendnames = ['Sfc. Albedo', 'SW Water Vapor', ' SW Cloud']
+filename = 'SWFB'
+bargraph_plotting(bars1, bars2, units, legendnames, filename)
+
+Strat_Model = globemean_2D(nc_strat.StratFB.values, weights_model)
+# Strat_Obs = globemean_2D(nc_obs.StratFB.values,weights_obs)
+
+# CMIP6 values. IRF already multiplied by 12
+CMIP6vals = np.loadtxt(os.environ["OBS_DATA"] + "/CldFB_MDTF.txt")
+
+# Create scatter plot with CMIP6 data. One iteration only
+plt.figure(1)
+f, (ax1, ax2) = plt.subplots(1, 2, gridspec_kw={'width_ratios': [3, 1]})
+# Add in each CMIP6 value model-by-model
+for m in range(CMIP6vals.shape[0] - 1):
+ ax1.scatter(np.arange(CMIP6vals.shape[1] - 1), CMIP6vals[m, :-1], c='k', label='_nolegend_')
+# For legend purposes, add in last CMIP6 model separately
+ax1.scatter(np.arange(CMIP6vals.shape[1] - 1), CMIP6vals[m + 1, :-1], c='k', label='CMIP6 (Hist. 2003-2014)')
+# Add in values from POD user's model
+ax1.scatter(np.arange(CMIP6vals.shape[1] - 1), np.asarray([Net_RA_Model, LWC_Model + SWC_Model,
+ PL_Model + LR_Model + LWWV_Model + SWWV_Model + Alb_Model]),
+ c='b', label='Your Model')
+# Add in Observational Values
+ax1.scatter(np.arange(CMIP6vals.shape[1] - 1), np.asarray([Net_RA_Obs,
+ LWC_Obs + SWC_Obs,
+ PL_Obs + LR_Obs + LWWV_Obs + SWWV_Obs + Alb_Obs]), c='r',
+ label='Obs.')
+# Creating axis labels for dotplot
+ax1.set_ylabel('W/$m^2$/K')
+xterms = ['$\Delta{R}_{tot}$', '$\lambda_{cloud}$', '$\lambda_{noncloud}$']
+ax1.set_xticks([r for r in range(CMIP6vals.shape[1] - 1)], xterms)
+ax1.legend(loc='lower center')
+for m in range(CMIP6vals.shape[0]):
+ ax2.scatter(1, CMIP6vals[m, -1], c='k', label='_nolegend_')
+ax2.scatter(1, Net_IRF_Model, c='b', label='_nolegend_')
+ax2.scatter(1, Net_IRF_Obs, c='r', label='_nolegend_')
+ax2.set_ylabel('W/$m^2$/yr')
+xterms = ['', 'IRF', '']
+ax2.set_xticks([r for r in range(len(xterms))], xterms)
+plt.tight_layout()
+plt.savefig(os.environ['WK_DIR'] + '/model/PS/forcing_feedback_CMIP6scatter.eps')
+plt.close()
+
+if np.max(nc_sw_irf.lon.values) >= 300: # convert 0-360 lon to -180-180 lon for plotting
+ lon1 = np.mod((nc_sw_irf.lon.values + 180), 360) - 180
+ lon1a = lon1[0:int(len(lon1) / 2)]
+ lon1b = lon1[int(len(lon1) / 2):]
+ lon_model = np.concatenate((lon1b, lon1a))
+else:
+ lon_model = nc_sw_irf.lon.values
+llons_model, llats_model = np.meshgrid(lon_model, lat_model)
+
+lon_originalmodel = nc_sw_irf.lon.values
+
+# Produce maps of the radiative feedbacks and IRF tends, comparing model results to observations
+
+# Temperature Feedback
+levels_1 = np.arange(-6, 6.0001, 1)
+variablename_1 = 'Planck'
+modelvariable_1 = nc_pl.Planck.values
+obsvariable_1 = nc_obs.Planck.values
+levels_2 = np.arange(-6, 6.0001, 1)
+variablename_2 = 'Lapse Rate'
+modelvariable_2 = nc_lr.LapseRate.values
+obsvariable_2 = nc_obs.LapseRate.values
+units = 'W/$m^2$/K'
+filename = 'Temperature'
+map_plotting_4subs(levels_1, levels_2, variablename_1, modelvariable_1, lon_originalmodel,
+ lon_model, lat_model, lon_obs, lat_obs, obsvariable_1, variablename_2,
+ modelvariable_2, obsvariable_2, units, filename)
+
+# Water Vapor Feedback
+levels_1 = np.arange(-6, 6.0001, 1)
+variablename_1 = 'LW Water Vapor'
+modelvariable_1 = nc_lw_q.LW_WaterVapor.values
+obsvariable_1 = nc_obs.LW_WaterVapor.values
+levels_2 = np.arange(-1, 1.0001, 0.2)
+variablename_2 = 'SW Water Vapor'
+modelvariable_2 = nc_sw_q.SW_WaterVapor.values
+obsvariable_2 = nc_obs.SW_WaterVapor.values
+units = 'W/$m^2$/K'
+filename = 'WaterVapor'
+map_plotting_4subs(levels_1, levels_2, variablename_1, modelvariable_1, lon_originalmodel,
+ lon_model, lat_model, lon_obs, lat_obs, obsvariable_1,
+ variablename_2, modelvariable_2, obsvariable_2, units, filename)
+
+# Surface Albedo Feedback
+levels_1 = np.arange(-6, 6.0001, 1)
+variablename_1 = 'Sfc. Albedo'
+modelvariable_1 = nc_alb.SfcAlbedo.values
+obsvariable_1 = nc_obs.SfcAlbedo.values
+units = 'W/$m^2$/K'
+filename = 'SfcAlbedo'
+map_plotting_2subs(levels_1, variablename_1, modelvariable_1, lon_originalmodel,
+ lon_model, lat_model, lon_obs, lat_obs, obsvariable_1, units, filename)
+
+# Cloud Feedback
+levels_1 = np.arange(-16, 16.0001, 2)
+variablename_1 = 'LW Cloud'
+modelvariable_1 = nc_lw_c.LW_Cloud.values
+obsvariable_1 = nc_obs.LW_Cloud.values
+levels_2 = np.arange(-16, 16.0001, 2)
+variablename_2 = 'SW Cloud'
+modelvariable_2 = nc_sw_c.SW_Cloud.values
+obsvariable_2 = nc_obs.SW_Cloud.values
+units = 'W/$m^2$/K'
+filename = 'Cloud'
+map_plotting_4subs(levels_1, levels_2, variablename_1, modelvariable_1, lon_originalmodel,
+ lon_model, lat_model, lon_obs, lat_obs, obsvariable_1,
+ variablename_2, modelvariable_2, obsvariable_2, units, filename)
+
+# Rad Feedback
+levels_1 = np.arange(-24, 24.0001, 2)
+variablename_1 = 'LW Total Rad'
+modelvariable_1 = nc_lw_netrad.LW_Rad.values
+obsvariable_1 = nc_obs.LW_Rad.values
+levels_2 = np.arange(-24, 24.0001, 2)
+variablename_2 = 'SW Total Rad'
+modelvariable_2 = nc_sw_netrad.SW_Rad.values
+obsvariable_2 = nc_obs.SW_Rad.values
+units = 'W/$m^2$/K'
+filename = 'Rad'
+map_plotting_4subs(levels_1, levels_2, variablename_1, modelvariable_1, lon_originalmodel,
+ lon_model, lat_model, lon_obs, lat_obs, obsvariable_1,
+ variablename_2, modelvariable_2, obsvariable_2, units, filename)
+
+# IRF Trend
+# levels_1 = np.arange(-0.015,0.0150001,0.0015)
+levels_1 = np.arange(-0.030, 0.0300001, 0.0030)
+variablename_1 = 'LW IRF'
+modelvariable_1 = 12 * nc_lw_irf.LW_IRF.values
+obsvariable_1 = 12 * nc_obs.LW_IRF.values
+# levels_2 = np.arange(-0.015,0.0150001,0.0015)
+levels_2 = np.arange(-0.030, 0.0300001, 0.0030)
+variablename_2 = 'SW IRF'
+modelvariable_2 = 12 * nc_sw_irf.SW_IRF.values
+obsvariable_2 = 12 * nc_obs.SW_IRF.values
+units = 'W/$m^2$/yr'
+filename = 'IRF'
+map_plotting_4subs(levels_1, levels_2, variablename_1, modelvariable_1, lon_originalmodel,
+ lon_model, lat_model, lon_obs, lat_obs, obsvariable_1,
+ variablename_2, modelvariable_2, obsvariable_2, units, filename)
diff --git a/diagnostics/forcing_feedback/forcing_feedback_util.py b/diagnostics/forcing_feedback/forcing_feedback_util.py
new file mode 100644
index 000000000..6396bb862
--- /dev/null
+++ b/diagnostics/forcing_feedback/forcing_feedback_util.py
@@ -0,0 +1,600 @@
+# This file is part of the forcing_feedback module of the MDTF code package (see mdtf/MDTF_v2.0/LICENSE.txt)
+
+# ======================================================================
+# forcing_feedback_util_tropseperate.py
+#
+# Provide functions called by forcing_feedback.py
+#
+# This file is part of the Forcing Feedback Diagnostic Package and the
+# MDTF code package. See LICENSE.txt for the license.
+#
+# Including:
+# (1) fluxanom_calc_4D
+# (2) fluxanom_calc_3D
+# (3) esat_coef
+# (4) latlonr3_3D4D
+# (5) globemean_2D
+# (6) globemean_3D
+# (7) fluxanom_nc_create
+# (8) feedback_regress
+# (9) bargraph_plotting
+# (10) map_plotting_4subs
+# (11) map_plotting_2subs
+#
+# ======================================================================
+# Import standard Python packages
+
+import os
+import numpy as np
+import numpy.ma as ma
+import dask.array as da
+import xarray as xr
+from scipy.interpolate import griddata
+import matplotlib as mpl
+
+mpl.use('Agg')
+import matplotlib.pyplot as plt
+import matplotlib.ticker as mticker
+import cartopy.crs as ccrs
+from cartopy.mpl.gridliner import LATITUDE_FORMATTER
+from cartopy.util import add_cyclic_point
+
+
+# =======================================================
+# var_anom4D
+
+def var_anom4D(var_pert, var_base):
+ sp = var_pert.shape
+ sb = var_base.shape
+
+ if len(sp) != 4 or len(sb) != 4:
+ print("An input variable is not 4D! Function will not execute")
+ else:
+
+ # Prep variable to analyze on a monthly-basis
+
+ var_pert_re = np.reshape(var_pert, (int(sp[0] / 12), 12, sp[1], sp[2], sp[3]))
+ var_base_re = np.reshape(var_base, (int(sb[0] / 12), 12, sb[1], sb[2], sb[3]))
+ var_base_tmean = np.repeat(np.squeeze(np.nanmean(var_base_re, axis=0))[np.newaxis, :, :, :], \
+ int(sp[0] / 12), axis=0)
+ var_anom = da.from_array(var_pert_re - var_base_tmean, chunks=(5, 5, sp[1], sp[2], sp[3]))
+
+ return var_anom
+
+
+# ======================================================================
+# fluxanom_calc_4D
+
+def fluxanom_calc_4D(var_anom, tot_kern, clr_kern, dpsfc, levs, lats, psk):
+ """
+ Computes anomalies of radiatively-relevant 4D climate variables and multiplies
+ by radiative kernel to convert to radiative flux change. Performs clear- and
+ all-sky calculations
+
+ """
+
+ # Pressure of upper boundary of each vertical layer
+ pt = (levs[1:] + levs[:-1]) / 2
+ pt = np.append(pt, 0)
+ # Pressure of lower boundary of each vertical layer
+ pb = pt[:-1]
+ pb = np.insert(pb, 0, 1000)
+ # Pressure thickness of each vertical level
+ dp = pb - pt
+
+ sp = var_anom.shape
+
+ # Create indices to seperate troposphere from stratosphere
+ frac_tropo = np.zeros((sp[0], sp[1], sp[2] - 1, sp[3], sp[4]))
+ frac_strato = np.zeros((sp[0], sp[1], sp[2] - 1, sp[3], sp[4]))
+ tropopause = (100 + np.absolute(lats) * 200 / 90)
+ tropopause_mat = np.tile(tropopause, (sp[0], sp[1], sp[2] - 1, sp[4], 1)).transpose(0, 1, 2, 4, 3)
+ ptop_mat = np.tile(pt[1:], (sp[0], sp[1], sp[3], sp[4], 1)).transpose(0, 1, 4, 2, 3)
+ pbot_mat = np.tile(pb[1:], (sp[0], sp[1], sp[3], sp[4], 1)).transpose(0, 1, 4, 2, 3)
+ psk_mat = np.tile(psk, (sp[0], sp[2] - 1, 1, 1, 1)).transpose(0, 2, 1, 3, 4)
+
+ frac_tropo[(pbot_mat <= psk_mat) & (ptop_mat >= tropopause_mat)] = 1
+ frachold = (pbot_mat - tropopause_mat) / (pbot_mat - ptop_mat)
+ frac_tropo[(pbot_mat >= tropopause_mat) & (ptop_mat <= tropopause_mat)] = frachold[
+ (pbot_mat >= tropopause_mat) & (ptop_mat <= tropopause_mat)]
+ frachold = (psk_mat - ptop_mat) / (pbot_mat - ptop_mat)
+ frac_tropo[(pbot_mat >= psk_mat) & (ptop_mat <= psk_mat)] = frachold[(pbot_mat >= psk_mat) & (ptop_mat <= psk_mat)]
+
+ frachold = (tropopause_mat - ptop_mat) / (pbot_mat - ptop_mat)
+ frac_strato[(pbot_mat >= tropopause_mat) & (ptop_mat <= tropopause_mat)] = frachold[
+ (pbot_mat >= tropopause_mat) & (ptop_mat <= tropopause_mat)]
+ frac_strato[(pbot_mat <= tropopause_mat) & (ptop_mat <= tropopause_mat)] = 1
+ frachold, tropopause_mat, ptop_mat, pbot_mat, psk_mat = None, None, None, None, None
+
+ if len(sp) != 5:
+ print("An input variable is not 4D! Function will not execute")
+ else:
+
+ tot_kern = da.from_array(np.squeeze(np.repeat(tot_kern[np.newaxis, ...], sp[0], axis=0)),
+ chunks=(5, 5, sp[2], sp[3], sp[4]))
+ clr_kern = da.from_array(np.squeeze(np.repeat(clr_kern[np.newaxis, ...], sp[0], axis=0)),
+ chunks=(5, 5, sp[2], sp[3], sp[4]))
+ dp_mat = da.from_array(np.squeeze(np.repeat(np.repeat(np.repeat(np.repeat( \
+ dp[np.newaxis, 1:], 12, axis=0)[np.newaxis, ...], sp[0], axis=0)[:, :, :, np.newaxis], \
+ sp[3], axis=3)[:, :, :, :, np.newaxis], sp[4], axis=4)),
+ chunks=(5, 5, sp[2], sp[3], sp[4]))
+ frac_tropo = da.from_array(frac_tropo, chunks=(5, 5, sp[2], sp[3], sp[4]))
+ frac_strato = da.from_array(frac_strato, chunks=(5, 5, sp[2], sp[3], sp[4]))
+ dpsfc = da.from_array(dpsfc, chunks=(5, 5, sp[3], sp[4]))
+
+ # Calculate flux anomaly for all levels except first level above surface - total-sky, troposphere
+ flux_tot_tropo = (tot_kern[:, :, 1:, :, :] * frac_tropo * dp_mat * var_anom[:, :, 1:, :, :] / 100)
+ flux_tot_strato = (tot_kern[:, :, 1:, :, :] * frac_strato * dp_mat * var_anom[:, :, 1:, :, :] / 100)
+
+ # Calculate flux anomaly for level above surface
+
+ flux_tot_tropo_bottom = (tot_kern[:, :, 0, :, :] \
+ * dpsfc * var_anom[:, :, 0, :, :] / 100)
+
+ flux_tot_strato_bottom = (tot_kern[:, :, 0, :, :] \
+ * dpsfc * var_anom[:, :, 0, :, :] / 100)
+
+ # Calculate flux anomaly for all levels except first level above surface - clear-sky, troposphere
+ flux_clr_tropo = (clr_kern[:, :, 1:, :, :] * frac_tropo * dp_mat * var_anom[:, :, 1:, :, :] / 100)
+
+ flux_clr_strato = (clr_kern[:, :, 1:, :, :] * frac_tropo * dp_mat * var_anom[:, :, 1:, :, :] / 100)
+
+ # Calculate flux anomaly for level above surface
+ flux_clr_tropo_bottom = (clr_kern[:, :, 0, :, :] \
+ * dpsfc * var_anom[:, :, 0, :, :] / 100)
+
+ flux_clr_strato_bottom = (clr_kern[:, :, 0, :, :] \
+ * dpsfc * var_anom[:, :, 0, :, :] / 100)
+
+ frac_tropo, frac_strato = None, None
+
+ # Reshape fluxanom variables and vertically integrate
+ flux_tot_tropo = da.append(flux_tot_tropo, flux_tot_tropo_bottom[:, :, np.newaxis, ...], axis=2)
+ flux_tot_strato = da.append(flux_tot_strato, flux_tot_strato_bottom[:, :, np.newaxis, ...], axis=2)
+ flux_tot_strato = da.append(flux_tot_strato, flux_tot_strato_bottom[:, :, np.newaxis, ...], axis=2)
+ flux_tot_strato = da.append(flux_tot_strato, flux_tot_strato_bottom[:, :, np.newaxis, ...], axis=2)
+
+ flux_tot_tropo = np.reshape(np.squeeze(np.nansum(flux_tot_tropo, axis=2)), (sp[0] * sp[1], sp[3], sp[4]))
+ flux_clr_tropo = np.reshape(np.squeeze(np.nansum(flux_clr_tropo, axis=2)), (sp[0] * sp[1], sp[3], sp[4]))
+ flux_tot_strato = np.reshape(np.squeeze(np.nansum(flux_tot_strato, axis=2)), (sp[0] * sp[1], sp[3], sp[4]))
+ flux_clr_strato = np.reshape(np.squeeze(np.nansum(flux_clr_strato, axis=2)), (sp[0] * sp[1], sp[3], sp[4]))
+
+ return np.asarray(flux_tot_tropo), np.asarray(flux_clr_tropo), np.asarray(flux_tot_strato), np.asarray(
+ flux_clr_strato)
+
+
+# ======================================================================
+# fluxanom_calc_3D
+
+def fluxanom_calc_3D(var_pert_tot, var_base_tot, tot_kern, clr_kern, var_pert_clr=None, var_base_clr=None):
+ """
+
+ Computes anomalies of radiatively-relevant 3D climate variable and multiplies
+ by radiative kernel to convert to radiative flux change. Clear- and all-sky calculations
+ Note var_*_clr not always used. Specifically an option for clear-sky albedo calculations.
+
+ """
+
+ sp = var_pert_tot.shape
+ sb = var_base_tot.shape
+ skt = tot_kern.shape
+ skc = clr_kern.shape
+
+ flux_sfc_tot = np.zeros((int(sp[0] / 12), 12, sp[1], sp[2]))
+ flux_sfc_clr = np.zeros((int(sp[0] / 12), 12, sp[1], sp[2]))
+ if len(skt) != 3 or len(skc) != 3 or len(sp) != 3 or len(sb) != 3:
+ print("An input variable is not 3D! Function will not execute")
+ else:
+
+ # Prep variable to analyze on a monthly-basis
+ var_pert_tot_re = np.reshape(var_pert_tot, (int(sp[0] / 12), 12, sp[1], sp[2]))
+ var_base_tot_re = np.reshape(var_base_tot, (int(sb[0] / 12), 12, sb[1], sb[2]))
+
+ if var_pert_clr is not None:
+ var_pert_clr_re = np.reshape(var_pert_clr, (int(sp[0] / 12), 12, sp[1], sp[2]))
+ if var_base_clr is not None:
+ var_base_clr_re = np.reshape(var_base_clr, (int(sb[0] / 12), 12, sb[1], sb[2]))
+
+ for m in range(0, 12):
+
+ # Conduct calculations by month, using m index to isolate data accordingly
+ # Create climatology by average all timesteps in the var_base variable
+ var_base_tot_m_tmean = np.squeeze(np.nanmean(var_base_tot_re[:, m, :, :], axis=0))
+ var_pert_tot_m = np.squeeze(var_pert_tot_re[:, m, :, :])
+
+ if var_base_clr is not None:
+ var_base_clr_m_tmean = np.squeeze(np.mean(var_base_clr_re[:, m, :, :], axis=0))
+ var_pert_clr_m = np.squeeze(var_pert_clr_re[:, m, :, :])
+
+ # Compute anomalies
+ var_tot_anom = var_pert_tot_m - np.repeat(var_base_tot_m_tmean[np.newaxis, :, :], int(sp[0] / 12), axis=0)
+
+ if var_base_clr is not None:
+ var_clr_anom = var_pert_clr_m - np.repeat(var_base_clr_m_tmean[np.newaxis, :, :], int(sp[0] / 12),
+ axis=0)
+
+ # Compute flux anomaly - total-sky
+ flux_sfc_tot[:, m, :, :] = np.squeeze(
+ np.repeat(tot_kern[np.newaxis, m, :, :], int(sp[0] / 12), axis=0)) * var_tot_anom
+
+ # Compute flux anomaly - clear-sky
+ if var_base_clr is not None:
+ flux_sfc_clr[:, m, :, :] = np.squeeze(
+ np.repeat(clr_kern[np.newaxis, m, :, :], int(sp[0] / 12), axis=0)) * var_clr_anom
+ else:
+ flux_sfc_clr[:, m, :, :] = np.squeeze(
+ np.repeat(clr_kern[np.newaxis, m, :, :], int(sp[0] / 12), axis=0)) * var_tot_anom
+
+ # Reshape flux anomalies
+ flux_sfc_tot = np.reshape(flux_sfc_tot, (sp[0], sp[1], sp[2]))
+ flux_sfc_clr = np.reshape(flux_sfc_clr, (sp[0], sp[1], sp[2]))
+
+ return flux_sfc_tot, flux_sfc_clr
+
+
+# ======================================================================
+# esat_coef
+
+def esat_coef(temp):
+ """
+
+ Computes the saturation vapor pressure coefficient necessary for water vapor
+ radiative flux calculations
+
+ """
+
+ tc = temp - 273
+ aw = np.array([6.11583699, 0.444606896, 0.143177157e-01,
+ 0.264224321e-03, 0.299291081e-05, 0.203154182e-07,
+ 0.702620698e-10, 0.379534310e-13, -0.321582393e-15])
+ ai = np.array([6.11239921, 0.443987641, 0.142986287e-01,
+ 0.264847430e-03, 0.302950461e-05, 0.206739458e-07,
+ 0.640689451e-10, -0.952447341e-13, -0.976195544e-15])
+ esat_water = aw[0]
+ esat_ice = ai[0]
+
+ for z in range(1, 9):
+ esat_water = esat_water + aw[z] * (tc ** (z))
+ esat_ice = esat_ice + ai[z] * (tc ** (z))
+
+ esat = esat_ice
+ b = np.where(tc > 0)
+ esat[b] = esat_water[b]
+
+ return esat
+
+
+# ======================================================================
+# latlonr3_3D4D
+#
+
+def latlonr3_3D4D(variable, lat_start, lon_start, lat_end, lon_end, kern):
+ """
+
+ Reformats, reorders and regrids lat,lon so model data matches kernel data grid
+
+ """
+
+ # Check of start and end lat is in similar order. If not, flip.
+ if ((lat_start[0] > lat_start[-1]) and (lat_end[0] < lat_end[-1])) or \
+ ((lat_start[0] < lat_start[-1]) and (lat_end[0] > lat_end[-1])):
+ lat_start = np.flipud(lat_start)
+ variable = variable[..., ::-1, :]
+
+ # Check if start and end lon both are 0-360 or both -180-180. If not, make them the same
+ if ((np.max(lon_start) >= 300) and (np.max(lon_end) > 100 and np.max(lon_end) < 300)):
+
+ lon1 = np.mod((lon_start + 180), 360) - 180
+
+ lon1a = lon1[0:len(lon1) / 2]
+ lon1b = lon1[len(lon1) / 2:]
+ start1a = variable[..., 0:len(lon1) / 2]
+ start1b = variable[..., len(lon1) / 2:]
+ lon_start = np.concatenate((lon1b, lon1a))
+ variable = np.concatenate((start1b, start1a), axis=-1)
+ elif ((np.max(lon_start) > 100 and np.max(lon_start) < 300) and (np.max(lon_end) >= 300)):
+ lon1 = np.mod(lon_start, 360)
+
+ lon1a = lon1[0:len(lon1) / 2]
+ lon1b = lon1[len(lon1) / 2:]
+ start1a = variable[..., 0:len(lon1) / 2]
+ start1b = variable[..., len(lon1) / 2:]
+ lon_start = np.concatenate((lon1b, lon1a))
+ variable = np.concatenate((start1b, start1a), axis=-1)
+
+ # If, after above change (or after skipping that step), start and lat are in different order, flip.
+ if ((lon_start[0] > lon_start[-1]) and (lon_end[0] < lon_end[-1])) or \
+ ((lon_start[0] < lon_start[-1]) and (lon_end[0] > lon_end[-1])):
+ lon_start = np.flipud(lon_start)
+ variable = variable[..., ::-1]
+
+ # Now that latitudes and longitudes have similar order and format, regrid.
+ Y_start, X_start = np.meshgrid(lat_start, lon_start)
+ Y_kern, X_kern = np.meshgrid(lat_end, lon_end)
+
+ if len(variable.shape) == 3: # For 3D data
+ shp_start = variable.shape
+ shp_kern = kern.shape
+ variable_new = np.empty((shp_start[0], shp_kern[1], shp_kern[2])) * np.nan
+ for kk in range(shp_start[0]):
+ variable_new[kk, :, :] = griddata((Y_start.flatten(), \
+ X_start.flatten()), np.squeeze(variable[kk, :, :]).T.flatten(), \
+ (Y_kern.flatten(), X_kern.flatten()), fill_value=np.nan).reshape(
+ shp_kern[2], shp_kern[1]).T
+ elif len(variable.shape) == 4: # For 4D data
+ shp_start = variable.shape
+ shp_kern = kern.shape
+ variable_new = np.empty((shp_start[0], shp_start[1], shp_kern[2], shp_kern[3])) * np.nan
+ for ll in range(shp_start[1]):
+ for kk in range(shp_start[0]):
+ variable_new[kk, ll, :, :] = griddata((Y_start.flatten(),
+ X_start.flatten()),
+ np.squeeze(variable[kk, ll, :, :]).T.flatten(),
+ (Y_kern.flatten(), X_kern.flatten()), fill_value=np.nan).reshape(
+ shp_kern[3], shp_kern[2]).T
+
+ return variable_new
+
+
+# ======================================================================
+# globemean_2D
+#
+
+def globemean_2D(var, w):
+ """
+
+ Compute cosine weighted global-mean over a 2D variable
+
+ """
+
+ var_mask = ma.masked_array(var, ~np.isfinite(var))
+ var_mean = np.squeeze(np.average(np.nanmean(var_mask, axis=1), weights=w))
+
+ return var_mean
+
+
+# ======================================================================
+# globemean_3D
+#
+
+def globemean_3D(var, w):
+ """
+
+ Compute cosine weighted global-mean over a 3D variable
+
+ """
+
+ var_mask = ma.masked_array(var, ~np.isfinite(var))
+ var_mean = np.squeeze(np.average(np.nanmean(var_mask, axis=2), axis=1, weights=w))
+
+ return var_mean
+
+
+# ======================================================================
+# fluxanom_nc_create
+#
+
+def fluxanom_nc_create(variable, lat, lon, fbname):
+ """
+
+ Saves 2D feedback or forcing variables into a NetCDF
+
+ """
+ var = xr.DataArray(variable, coords=[lat, lon], dims=['lat', 'lon'], name=fbname)
+ var.to_netcdf(os.environ['WK_DIR'] + '/model/netCDF/fluxanom2D_' + fbname + '.nc')
+
+ return None
+
+
+# ======================================================================
+# feedback_regress
+#
+# Regeresses radiative flux anomalies with global-mean dTs to compute 2D feedback
+def feedback_regress(fluxanom, tspert, tsclimo, lat, lon, fbname):
+ """
+
+ Regresses radiative flux anomalies with global-mean dTs to compute 2D feedback
+
+
+ """
+
+ sp = tspert.shape
+ sc = tsclimo.shape
+
+ tsclimo_re = np.squeeze(np.nanmean(np.reshape(tsclimo, \
+ (int(sc[0] / 12), 12, sc[1], sc[2])), axis=0))
+
+ tsanom = tspert - np.reshape(np.repeat(tsclimo_re[np.newaxis, ...], int(sp[0] / 12), \
+ axis=0), (sp[0], sp[1], sp[2]))
+
+ weights = np.repeat(np.cos(np.deg2rad(lat))[np.newaxis, :], sp[0], axis=0)
+ tsanom_globemean = globemean_3D(tsanom, weights)
+ tsanom_re = np.repeat(np.repeat(tsanom_globemean[:, np.newaxis], \
+ sp[1], axis=1)[..., np.newaxis], sp[2], axis=2)
+
+ tsanom_re_timemean = np.nanmean(tsanom_re, axis=0)
+ tsanom_re_std = np.nanstd(tsanom_re, axis=0)
+ fluxanom_timemean = np.nanmean(fluxanom, axis=0)
+
+ n = np.sum(~np.isnan(tsanom_re), axis=0)
+ cov = np.nansum((tsanom_re - tsanom_re_timemean) * \
+ (fluxanom - fluxanom_timemean), axis=0) / n
+ slopes = cov / (tsanom_re_std ** 2)
+
+ fluxanom_nc_create(slopes, lat, lon, fbname)
+
+ return slopes
+
+
+# ======================================================================
+# bargraph_plotting
+#
+
+def bargraph_plotting(model_bar, obs_bar, var_units, var_legnames, var_filename):
+ """
+
+ Used for plotting the first four figures generated by forcing_feedback_plots.py.
+ Shows global-mean results from the model and observations
+
+ """
+
+ barWidth = 0.125
+ r1 = np.arange(len(model_bar))
+ r2 = [x + barWidth for x in r1]
+ plt.bar(r1, model_bar, color='blue', width=barWidth, edgecolor='white', \
+ label='Model')
+ plt.bar(r2, obs_bar, color='red', width=barWidth, edgecolor='white', \
+ label='Observations')
+ plt.axhline(0, color='black', lw=1)
+ plt.ylabel(var_units)
+ plt.xticks([r + barWidth for r in range(len(model_bar))], var_legnames)
+ plt.legend(loc="upper right")
+ plt.savefig(os.environ['WK_DIR'] + '/model/PS/forcing_feedback_globemean_' + var_filename + '.eps')
+ plt.close()
+
+ return None
+
+
+# ======================================================================
+# map_plotting_4subs
+#
+# Function for producing figured with 4 subplot maps generated by
+# forcing_feedback_plots.py
+
+def map_plotting_4subs(cbar_levs1, cbar_levs2, var1_name, var1_model, \
+ model_origlon, lon_m, lat_m, lon_o, lat_o, var1_obs, var2_name, \
+ var2_model, var2_obs, var_units, var_filename):
+ """
+
+ Function for producing figured with 4 subplot maps generated by
+ forcing_feedback_plots.py
+
+
+ """
+
+ fig, axs = plt.subplots(2, 2, subplot_kw=dict(projection= \
+ ccrs.PlateCarree(central_longitude=180)), figsize=(8, 8))
+
+ axs[0, 0].set_title(var1_name + ' - Model')
+ if np.max(model_origlon) > 300: # convert 0-360 lon to -180-180 lon for plotting
+ start1a = var1_model[..., 0:int(len(model_origlon) / 2)]
+ start1b = var1_model[..., int(len(model_origlon) / 2):]
+ var1_model = np.concatenate((start1b, start1a), axis=1)
+ var1_model, lon_m180 = add_cyclic_point(var1_model, coord=lon_m)
+ cs = axs[0, 0].contourf(lon_m180, lat_m, var1_model, cmap=plt.cm.RdBu_r, \
+ transform=ccrs.PlateCarree(), vmin=cbar_levs1[0], \
+ vmax=cbar_levs1[-1], levels=cbar_levs1, extend='both')
+ axs[0, 0].coastlines()
+ g1 = axs[0, 0].gridlines(linestyle=':')
+ g1.xlines = False
+ g1.ylabels_left = True
+ g1.ylocator = mticker.FixedLocator(np.arange(-60, 61, 30))
+ g1.yformatter = LATITUDE_FORMATTER
+ if not np.all(cbar_levs1 == cbar_levs2):
+ cbar = plt.colorbar(cs, ax=axs[0, 0], orientation='horizontal', aspect=25)
+ cbar.set_label(var_units)
+
+ axs[0, 1].set_title(var1_name + ' - Obs.')
+ var1_obs, lon_o180 = add_cyclic_point(var1_obs, coord=lon_o)
+ cs = axs[0, 1].contourf(lon_o180, lat_o, var1_obs, cmap=plt.cm.RdBu_r, \
+ transform=ccrs.PlateCarree(), vmin=cbar_levs1[0], \
+ vmax=cbar_levs1[-1], levels=cbar_levs1, extend='both')
+ axs[0, 1].coastlines()
+ g1 = axs[0, 1].gridlines(linestyle=':')
+ g1.xlines = False
+ if np.all(cbar_levs1 == cbar_levs2) == False:
+ cbar = plt.colorbar(cs, ax=axs[0, 1], orientation='horizontal', aspect=25)
+ cbar.set_label(var_units)
+
+ axs[1, 0].set_title(var2_name + ' - Model')
+ if np.max(model_origlon) > 300: # convert 0-360 lon to -180-180 lon for plotting
+ start1a = var2_model[..., 0:int(len(model_origlon) / 2)]
+ start1b = var2_model[..., int(len(model_origlon) / 2):]
+ var2_model = np.concatenate((start1b, start1a), axis=1)
+ var2_model, lon_m180 = add_cyclic_point(var2_model, coord=lon_m)
+ cs = axs[1, 0].contourf(lon_m180, lat_m, var2_model, cmap=plt.cm.RdBu_r, \
+ transform=ccrs.PlateCarree(), vmin=cbar_levs2[0], \
+ vmax=cbar_levs2[-1], levels=cbar_levs2, extend='both')
+ axs[1, 0].coastlines()
+ g1 = axs[1, 0].gridlines(linestyle=':')
+ g1.xlines = False
+ g1.ylabels_left = True
+ g1.ylocator = mticker.FixedLocator(np.arange(-60, 61, 30))
+ g1.yformatter = LATITUDE_FORMATTER
+ if not np.all(cbar_levs1 == cbar_levs2):
+ cbar = plt.colorbar(cs, ax=axs[1, 0], orientation='horizontal', aspect=25)
+ cbar.set_label(var_units)
+
+ axs[1, 1].set_title(var2_name + ' - Obs.')
+ var2_obs, lon_o180 = add_cyclic_point(var2_obs, coord=lon_o)
+ cs = axs[1, 1].contourf(lon_o180, lat_o, var2_obs, cmap=plt.cm.RdBu_r, \
+ transform=ccrs.PlateCarree(), vmin=cbar_levs2[0], \
+ vmax=cbar_levs2[-1], levels=cbar_levs2, extend='both')
+ axs[1, 1].coastlines()
+ g1 = axs[1, 1].gridlines(linestyle=':')
+ g1.xlines = False
+ if not np.all(cbar_levs1 == cbar_levs2):
+ cbar = plt.colorbar(cs, ax=axs[1, 1], orientation='horizontal', aspect=25)
+ cbar.set_label(var_units)
+
+ if np.all(cbar_levs1 == cbar_levs2):
+ cbar = plt.colorbar(cs, ax=axs.ravel(), orientation='horizontal', aspect=25)
+ cbar.set_label(var_units)
+ plt.savefig(os.environ['WK_DIR'] + '/model/PS/forcing_feedback_maps_' + \
+ var_filename + '.eps', bbox_inches='tight')
+ plt.close()
+
+ return None
+
+
+# ======================================================================
+# map_plotting_2subs
+#
+# Function for producing figured with 2 subplot maps generated by
+# forcing_feedback_plots.py
+
+def map_plotting_2subs(cbar_levs, var_name, var_model,
+ model_origlon, lon_m, lat_m, lon_o, lat_o, var_obs,
+ var_units, var_filename):
+ """
+
+ Function for producing figured with 2 subplot maps generated by
+ forcing_feedback_plots.py
+
+ """
+
+ fig, axs = plt.subplots(1, 2, subplot_kw=dict(projection= \
+ ccrs.PlateCarree(central_longitude=180)))
+
+ axs[0].set_title(var_name + ' - Model')
+ if np.max(model_origlon) > 300: # convert 0-360 lon to -180-180 lon for plotting
+ start1a = var_model[..., 0:int(len(model_origlon) / 2)]
+ start1b = var_model[..., int(len(model_origlon) / 2):]
+ var_model = np.concatenate((start1b, start1a), axis=1)
+ var_model, lon_m180 = add_cyclic_point(var_model, coord=lon_m)
+ axs[0].contourf(lon_m180, lat_m, var_model, cmap=plt.cm.RdBu_r,
+ transform=ccrs.PlateCarree(), vmin=cbar_levs[0],
+ vmax=cbar_levs[-1], levels=cbar_levs, extend='both')
+ axs[0].coastlines()
+ g1 = axs[0].gridlines(linestyle=':')
+ g1.xlines = False
+ g1.ylabels_left = True
+ g1.ylocator = mticker.FixedLocator(np.arange(-60, 61, 30))
+ g1.yformatter = LATITUDE_FORMATTER
+
+ axs[1].set_title(var_name + ' - Obs.')
+ var_obs, lon_o180 = add_cyclic_point(var_obs, coord=lon_o)
+ cs = axs[1].contourf(lon_o180, lat_o, var_obs, cmap=plt.cm.RdBu_r,
+ transform=ccrs.PlateCarree(), vmin=cbar_levs[0],
+ vmax=cbar_levs[-1], levels=cbar_levs, extend='both')
+ axs[1].coastlines()
+ g1 = axs[1].gridlines(linestyle=':')
+ g1.xlines = False
+
+ cbar = plt.colorbar(cs, ax=axs.ravel(), orientation='horizontal', aspect=25)
+ cbar.set_label(var_units)
+ plt.savefig(os.environ['WK_DIR'] + '/model/PS/forcing_feedback_maps_' + \
+ var_filename + '.eps', bbox_inches='tight')
+ plt.close()
+
+ return None
diff --git a/diagnostics/forcing_feedback/obs_processing_script.py b/diagnostics/forcing_feedback/obs_processing_script.py
new file mode 100644
index 000000000..8e5d71aac
--- /dev/null
+++ b/diagnostics/forcing_feedback/obs_processing_script.py
@@ -0,0 +1,214 @@
+import sys
+import xarray as xr
+import pandas as pd
+import numpy as np
+
+np.set_printoptions(threshold=sys.maxsize)
+import numpy.ma as ma
+
+
+# ======================================================================
+# globemean_3D
+#
+# Compute cosine weighted global-mean
+def globemean_3D(var, w):
+ var_mask = ma.masked_array(var, ~np.isfinite(var))
+ var_mean = np.squeeze(np.average(np.nanmean(var_mask, axis=2), axis=1, weights=w))
+
+ return var_mean
+
+
+def globemean_2D(var, w):
+ var_mask = ma.masked_array(var, ~np.isfinite(var))
+ var_mean = np.squeeze(np.average(np.nanmean(var_mask, axis=1), weights=w))
+
+ return var_mean
+
+
+# ======================================================================
+# feedback_regress
+#
+# Regeresses radiative flux anomalies with global-mean dTs to compute 2D feedback
+def feedback_regress(fluxanom, tspert, tsclimo, lat, lon, fbname):
+ sp = tspert.shape
+ sc = tsclimo.shape
+
+ tsclimo_re = np.squeeze(np.nanmean(np.reshape(tsclimo, (np.int(sc[0] / 12), 12, sc[1], sc[2])), axis=0))
+
+ tsanom = tspert - np.reshape(np.repeat(tsclimo_re[np.newaxis, ...], np.int(sp[0] / 12),
+ axis=0), (sp[0], sp[1], sp[2]))
+
+ weights = np.repeat(np.cos(np.deg2rad(lat))[np.newaxis, :], sp[0], axis=0)
+ tsanom_globemean = globemean_3D(tsanom, weights)
+ tsanom_re = np.repeat(np.repeat(tsanom_globemean[:, np.newaxis], sp[1], axis=1)[..., np.newaxis], sp[2], axis=2)
+
+ tsanom_re_timemean = np.nanmean(tsanom_re, axis=0)
+ tsanom_re_std = np.nanstd(tsanom_re, axis=0)
+ fluxanom_timemean = np.nanmean(fluxanom, axis=0)
+
+ n = np.sum(~np.isnan(tsanom_re), axis=0)
+ cov = np.nansum((tsanom_re - tsanom_re_timemean) * \
+ (fluxanom - fluxanom_timemean), axis=0) / n
+ slopes = cov / (tsanom_re_std ** 2)
+
+ slopes = xr.DataArray(slopes, coords=[lat, lon], dims=['lat', 'lon'], name=fbname)
+
+ return slopes
+
+
+kname = 'CloudSat'
+regime = 'TOA'
+
+# Read in reanalysis data and kernel-derived radiative anomalies and forcing,
+# which were computed using a slightly modified version of the MDTF "Forcing and Feedback"
+# module code. Then, regress against global-mean surface temperature, when applicable,
+# to compute radiative feedbacks using the "feedback_regress" function detailed above.
+# For Instantaneous Radiative Forcing, we just regress against time (trend)
+
+# Surface temperature
+
+ts_GISS_anoms = xr.open_dataset(
+ '/discover/nobackup/projects/geos5rad/rjkramer/GISStemp/gistemp1200_GHCNv4_ERSSTv5_MDTF.nc')
+ts_GISS_anoms = ts_GISS_anoms.assign_coords(
+ time=pd.date_range('1880-01', periods=len(ts_GISS_anoms.time.values), freq='M'))
+ts_GISS_anoms = ts_GISS_anoms.sel(time=slice('2003-01', '2014-12'))
+ts_GISS_anoms_climo = ts_GISS_anoms_climo = ts_GISS_anoms.sel(time=slice('2003-01', '2014-12'))
+ts_GISS_anoms_climo_climatology = ts_GISS_anoms_climo.groupby('time.month').mean('time')
+ts_GISS_anoms_anoms = ts_GISS_anoms.groupby('time.month') - ts_GISS_anoms_climo_climatology
+ts_pert = ts_GISS_anoms_anoms.tempanomaly.values
+ts_climo = ts_GISS_anoms_anoms.tempanomaly.values
+
+# Planck radiative anomalies
+inputfile = ('/gpfsm/dnb32/rjkramer/MERRAfb/results/' + regime + '_Fluxanom_planck_Results_K' + kname +
+ '_MERRAfb_wCERES_retry.nc')
+nc_pl_era = xr.open_dataset(inputfile)
+nc_pl_era = nc_pl_era.assign_coords(time=pd.date_range('2003-01', periods=len(nc_pl_era.time.values), freq='M')).sel(
+ time=slice('2003-01', '2014-12'))
+inputfile = None
+
+varhold = nc_pl_era.fluxanom_pl_sfc_tot.values + nc_pl_era.fluxanom_pl_trop_tot.values + nc_pl_era.fluxanom_pl_strat_tot.values
+fb_pl_era = feedback_regress(varhold, ts_pert, ts_climo, nc_pl_era.latitude.values, nc_pl_era.longitude.values,
+ 'Planck')
+varhold = None
+
+# Lapse Rate radiative anomalies
+inputfile = ('/gpfsm/dnb32/rjkramer/MERRAfb/results/' + regime + '_Fluxanom_lapserate_Results_K' + kname +
+ '_MERRAfb_wCERES_retry.nc')
+nc_lr_era = xr.open_dataset(inputfile)
+nc_lr_era = nc_lr_era.assign_coords(time=pd.date_range('2003-01', periods=len(nc_lr_era.time.values), freq='M')).sel(
+ time=slice('2003-01', '2014-12'))
+inputfile = None
+
+varhold = nc_lr_era.fluxanom_lr_trop_tot.values + nc_lr_era.fluxanom_lr_strat_tot.values
+fb_lr_era = feedback_regress(varhold, ts_pert, ts_climo, nc_lr_era.latitude.values, nc_lr_era.longitude.values,
+ 'LapseRate')
+varhold = None
+
+# Water vapor radiative anomalies
+inputfile = ('/gpfsm/dnb32/rjkramer/MERRAfb/results/' + regime + '_Fluxanom_watervapor_Results_K' + kname +
+ '_MERRAfb_wCERES_retry.nc')
+nc_wv_era = xr.open_dataset(inputfile)
+nc_wv_era = nc_wv_era.assign_coords(time=pd.date_range('2003-01', periods=len(nc_wv_era.time.values),
+ freq='M')).sel(time=slice('2003-01', '2014-12'))
+inputfile = None
+
+varhold = nc_wv_era.fluxanom_lw_q_trop_tot.values + nc_wv_era.fluxanom_lw_q_strat_tot.values
+fb_lw_q_era = feedback_regress(varhold, ts_pert, ts_climo, nc_wv_era.latitude.values, nc_wv_era.longitude.values,
+ 'LW_WaterVapor')
+varhold = None
+varhold = nc_wv_era.fluxanom_sw_q_trop_tot.values + nc_wv_era.fluxanom_sw_q_strat_tot.values
+fb_sw_q_era = feedback_regress(varhold, ts_pert, ts_climo, nc_wv_era.latitude.values, nc_wv_era.longitude.values,
+ 'SW_WaterVapor')
+varhold = None
+
+# Cloud radiative anomalies
+inputfile = ('/gpfsm/dnb32/rjkramer/MERRAfb/results/' + regime + '_Fluxanom_clouds_Results_K' + kname +
+ '_MERRAfb_wCERES_retry.nc')
+nc_c_era = xr.open_dataset(inputfile)
+nc_c_era = nc_c_era.assign_coords(time=pd.date_range('2003-01', periods=len(nc_c_era.time.values), freq='M')).sel(
+ time=slice('2003-01', '2014-12'))
+inputfile = None
+
+varhold = nc_c_era.fluxanom_lw_c.values
+fb_lw_c_era = feedback_regress(varhold, ts_pert, ts_climo, nc_c_era.latitude.values, nc_c_era.longitude.values,
+ 'LW_Cloud')
+varhold = None
+varhold = nc_c_era.fluxanom_sw_c.values
+fb_sw_c_era = feedback_regress(varhold, ts_pert, ts_climo, nc_c_era.latitude.values, nc_c_era.longitude.values,
+ 'SW_Cloud')
+varhold = None
+
+# Surface Albedo radiative anomalies
+inputfile = ('/gpfsm/dnb32/rjkramer/MERRAfb/results/' + regime + '_Fluxanom_albedo_Results_K' + kname +
+ '_MERRAfb_wCERES_retry.nc')
+nc_a_era = xr.open_dataset(inputfile)
+nc_a_era = nc_a_era.assign_coords(time=pd.date_range('2003-01', periods=len(nc_a_era.time.values), freq='M')).sel(
+ time=slice('2003-01', '2014-12'))
+inputfile = None
+
+varhold = nc_a_era.fluxanom_a_sfc_tot.values
+fb_a_era = feedback_regress(varhold, ts_pert, ts_climo, nc_a_era.latitude.values, nc_a_era.longitude.values,
+ 'SfcAlbedo')
+varhold = None
+
+# Total TOA radiative imbalance
+inputfile = ('/gpfsm/dnb32/rjkramer/MERRAfb/results/' + regime + '_Fluxanom_netrad_Results_K' + kname +
+ '_MERRAfb_wCERES_retry.nc')
+nc_netrad_era = xr.open_dataset(inputfile)
+nc_netrad_era = nc_netrad_era.assign_coords(
+ time=pd.date_range('2003-01', periods=len(nc_netrad_era.time.values), freq='M')).sel(
+ time=slice('2003-01', '2014-12'))
+inputfile = None
+
+varhold = nc_netrad_era.netrad_lw_tot.values
+fb_lw_netrad_era = feedback_regress(varhold, ts_pert, ts_climo, nc_netrad_era.latitude.values,
+ nc_netrad_era.longitude.values, 'LW_Rad')
+varhold = None
+varhold = nc_netrad_era.netrad_sw_tot.values
+fb_sw_netrad_era = feedback_regress(varhold, ts_pert, ts_climo, nc_netrad_era.latitude.values,
+ nc_netrad_era.longitude.values, 'SW_Rad')
+varhold = None
+
+# Instaneous Radiative Forcing
+inputfile = ('/gpfsm/dnb32/rjkramer/MERRAfb/results/' + regime + '_Fluxanom_IRF_Results_K' + kname +
+ '_MERRAfb_wCERES_retry.nc')
+nc_IRF_era = xr.open_dataset(inputfile)
+nc_IRF_era = nc_IRF_era.assign_coords(time=pd.date_range('2003-01', periods=len(nc_IRF_era.time.values),
+ freq='M')).sel(time=slice('2003-01', '2014-12'))
+inputfile = None
+
+varhold = nc_IRF_era.IRF_lw_tot.values
+shp = ts_pert.shape
+xtime = np.repeat(np.repeat(np.arange(shp[0])[..., np.newaxis], shp[1], axis=1)[..., np.newaxis], shp[2], axis=2)
+fb_lw_IRF_era = feedback_regress(varhold, xtime, xtime * 0, nc_IRF_era.latitude.values, nc_IRF_era.longitude.values,
+ 'LW_IRF')
+varhold = None
+varhold = nc_IRF_era.IRF_sw_tot.values
+fb_sw_IRF_era = feedback_regress(varhold, xtime, xtime * 0, nc_IRF_era.latitude.values, nc_IRF_era.longitude.values,
+ 'SW_IRF')
+varhold = None
+
+# Save 2D Radiative feedbacks (W/m2/K) and IRF trends (W/m2/month) to a netcdf for use in the MDTF Module
+ds = fb_pl_era.to_dataset()
+ds['LapseRate'] = fb_lr_era
+ds['LW_WaterVapor'] = fb_lw_q_era
+ds['SW_WaterVapor'] = fb_sw_q_era
+ds['SfcAlbedo'] = fb_a_era
+ds['LW_Cloud'] = fb_lw_c_era
+ds['SW_Cloud'] = fb_sw_c_era
+ds['LW_IRF'] = fb_lw_IRF_era # W/m2/month for now. converted to W/m2/yr later in POD
+ds['SW_IRF'] = fb_sw_IRF_era # W/m2/month for now. converted to W/m2/yr later in POD
+ds['LW_Rad'] = fb_lw_netrad_era
+ds['SW_Rad'] = fb_sw_netrad_era
+
+ds.to_netcdf('forcing_feedback_obs_MERRA2.nc')
+
+weights = np.cos(np.deg2rad(nc_IRF_era.latitude.values))
+print('Rad')
+print(globemean_2D(fb_lw_netrad_era + fb_sw_netrad_era, weights))
+print('CldFB')
+print(globemean_2D(fb_lw_c_era + fb_sw_c_era, weights))
+print('LR FB')
+print(globemean_2D(fb_lr_era, weights))
+print('IRF')
+print(globemean_2D(fb_lw_IRF_era + fb_sw_IRF_era, weights))
diff --git a/diagnostics/forcing_feedback/settings.jsonc b/diagnostics/forcing_feedback/settings.jsonc
new file mode 100644
index 000000000..0deac150f
--- /dev/null
+++ b/diagnostics/forcing_feedback/settings.jsonc
@@ -0,0 +1,111 @@
+{
+ "settings" : {
+ "driver" : "forcing_feedback.py",
+ "long_name" : "Radiative Forcing and Feedback Diagnostics",
+ "realm" : "atmos",
+ "runtime_requirements": {
+ "python3": ["os", "numpy", "xarray", "pandas", "netCDF4", "scipy", "matplotlib", "cartopy"]
+ },
+ "pod_env_vars" : {
+ "LW_CLOUDMASK": 1.24,
+ "SW_CLOUDMASK": 2.43
+ }
+ },
+ "data": {
+ "format": "any_netcdf_classic",
+ "rename_dimensions": false,
+ "rename_variables": false,
+ "multi_file_ok": false,
+ "frequency": "mon",
+ "min_frequency": "mon",
+ "max_frequency": "mon",
+ "min_duration": "5yr",
+ "max_duration": "any"
+ },
+ "dimensions": {
+ "lat": {"standard_name": "latitude"},
+ "lon": {"standard_name": "longitude"},
+ "plev": {
+ "standard_name": "air_pressure",
+ "units": "Pa",
+ "positive": "down",
+ "axis": "Z"
+ },
+ "time": {"standard_name": "time"}
+ },
+ "varlist" : {
+ "ts": {
+ "standard_name": "surface_temperature",
+ "units": "K",
+ "dimensions" : ["time", "lat", "lon"],
+ "freq": "mon"
+
+ },
+ "ta": {
+ "standard_name": "air_temperature",
+ "units": "K",
+ "dimensions" : ["time", "plev", "lat", "lon"],
+ "freq": "mon"
+ },
+ "hus": {
+ "standard_name": "specific_humidity",
+ "units": "1",
+ "dimensions" : ["time", "plev", "lat", "lon"],
+ "freq": "mon"
+ },
+ "rsus": {
+ "standard_name": "surface_upwelling_shortwave_flux_in_air",
+ "units": "W m-2",
+ "dimensions" : ["time", "lat", "lon"],
+ "freq": "mon"
+ },
+ "rsuscs": {
+ "standard_name": "surface_upwelling_shortwave_flux_in_air_assuming_clear_sky",
+ "units": "W m-2",
+ "dimensions" : ["time", "lat", "lon"],
+ "freq": "mon"
+ },
+ "rsds": {
+ "standard_name": "surface_downwelling_shortwave_flux_in_air",
+ "units": "W m-2",
+ "dimensions" : ["time", "lat", "lon"],
+ "freq": "mon"
+ },
+ "rsdscs": {
+ "standard_name": "surface_downwelling_shortwave_flux_in_air_assuming_clear_sky",
+ "units": "W m-2",
+ "dimensions" : ["time", "lat", "lon"],
+ "freq": "mon"
+ },
+ "rsdt": {
+ "standard_name": "toa_incoming_shortwave_flux",
+ "units": "W m-2",
+ "dimensions" : ["time", "lat", "lon"],
+ "freq": "mon"
+ },
+ "rsut": {
+ "standard_name": "toa_outgoing_shortwave_flux",
+ "units": "W m-2",
+ "dimensions" : ["time", "lat", "lon"],
+ "freq": "mon"
+ },
+ "rsutcs": {
+ "standard_name": "toa_outgoing_shortwave_flux_assuming_clear_sky",
+ "units": "W m-2",
+ "dimensions" : ["time", "lat", "lon"],
+ "freq": "mon"
+ },
+ "rlut": {
+ "standard_name": "toa_outgoing_longwave_flux",
+ "units": "W m-2",
+ "dimensions" : ["time", "lat", "lon"],
+ "freq": "mon"
+ },
+ "rlutcs": {
+ "standard_name": "toa_outgoing_longwave_flux_assuming_clear_sky",
+ "units": "W m-2",
+ "dimensions" : ["time", "lat", "lon"],
+ "freq": "mon"
+ }
+ }
+}
From c21ef23d9655e5d1dfbfcbdf2138e0b93133baec Mon Sep 17 00:00:00 2001
From: Jess <20195932+wrongkindofdoctor@users.noreply.github.com>
Date: Thu, 21 Mar 2024 16:50:44 -0400
Subject: [PATCH 7/7] Add jtmims to CODEOWNERS doc
---
CODEOWNERS | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/CODEOWNERS b/CODEOWNERS
index d4f159285..d5f5e0e80 100644
--- a/CODEOWNERS
+++ b/CODEOWNERS
@@ -1 +1 @@
-* @jkrasting @wrongkindofdoctor @aradhakrishnanGFDL
+* @jkrasting @wrongkindofdoctor @aradhakrishnanGFDL @jtmims