diff --git a/.gitignore b/.gitignore
index 4b998f4dcb..ca701132a7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -105,6 +105,13 @@ unit_test_build
/tools/mkmapdata/regrid.o*
/tools/mkmapdata/map*.nc
+# run_neon output directories
+/tools/site_and_regional/listing.csv
+/tools/site_and_regional/????/
+/tools/site_and_regional/????.ad/
+/tools/site_and_regional/????.postad/
+/tools/site_and_regional/????.transient/
+
# build output
*.o
*.mod
diff --git a/Externals.cfg b/Externals.cfg
index 491405a33b..2df854055e 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -23,7 +23,7 @@ required = True
local_path = components/mosart
protocol = git
repo_url = https://github.com/ESCOMP/MOSART
-tag = mosart1_0_45
+tag = mosart1_0_47
required = True
[mizuRoute]
diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm
index d9eeb1e5a0..c2a32da4ac 100755
--- a/bld/CLMBuildNamelist.pm
+++ b/bld/CLMBuildNamelist.pm
@@ -4560,6 +4560,8 @@ sub check_use_case_name {
} else {
$log->fatal_error($diestring);
}
+ } elsif ( $use_case =~ /^([0-9]+|PI)-PD_*($desc)_transient$/ ) {
+ # valid name
} elsif ( $use_case =~ /^([0-9]+)_*($desc)_control$/ ) {
# valid name
} elsif ( $use_case =~ /^($desc)_pd$/ ) {
diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml
index 2686d62b9a..c859fc13fc 100644
--- a/bld/namelist_files/namelist_defaults_ctsm.xml
+++ b/bld/namelist_files/namelist_defaults_ctsm.xml
@@ -25,6 +25,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
379.0
388.8
397.5
+408.83
284.7
284.7
@@ -1449,6 +1450,9 @@ lnd/clm2/surfdata_map/release-clm5.0.30/surfdata_ne0np4.CONUS.ne30x8_hist_78pfts
2101
2015
+2018
+2018
+
2010
2010
@@ -1601,6 +1605,9 @@ lnd/clm2/surfdata_map/release-clm5.0.30/surfdata_ne0np4.CONUS.ne30x8_hist_78pfts
2100
2015
+2018
+2018
+
2010
2010
@@ -1619,6 +1626,9 @@ lnd/clm2/surfdata_map/release-clm5.0.30/surfdata_ne0np4.CONUS.ne30x8_hist_78pfts
2000
2000
+2018
+2018
+
2010
2010
@@ -1706,6 +1716,12 @@ lnd/clm2/surfdata_map/release-clm5.0.30/surfdata_ne0np4.CONUS.ne30x8_hist_78pfts
2106
2015
+2018
+2018
+
+2010
+2010
+
2000
2000
diff --git a/bld/namelist_files/namelist_definition_ctsm.xml b/bld/namelist_files/namelist_definition_ctsm.xml
index a08795dd1f..bc1dd034aa 100644
--- a/bld/namelist_files/namelist_definition_ctsm.xml
+++ b/bld/namelist_files/namelist_definition_ctsm.xml
@@ -2057,7 +2057,7 @@ If 1, turn on the MEGAN model for BVOC's (Biogenic Volitile Organic Compounds)
+"PtVg,1000,850,1100,1350,1600,1850,1855,1865,1875,1885,1895,1905,1915,1925,1935,1945,1955,1965,1975,1979,1980,1982,1985,1995,2000,2005,2010,2013,2015,2018,2025,2035,2045,2055,2065,2075,2085,2095,2105">
Year to simulate and to provide datasets for (such as surface datasets, initial conditions, aerosol-deposition, Nitrogen deposition rates etc.)
A sim_year of 1000 corresponds to data used for testing only, NOT corresponding to any real datasets.
A sim_year greater than 2015 corresponds to ssp_rcp scenario data
diff --git a/bld/namelist_files/use_cases/2018-PD_transient.xml b/bld/namelist_files/use_cases/2018-PD_transient.xml
new file mode 100644
index 0000000000..d838efbd00
--- /dev/null
+++ b/bld/namelist_files/use_cases/2018-PD_transient.xml
@@ -0,0 +1,29 @@
+
+
+
+
+Simulate transient land-use, and aerosol deposition changes from 2018 to current day with a mix of historical data, and future scenario data
+
+
+
+2018
+
+1850-2100
+
+
+SSP3-7.0
+
+2018
+2022
+2018
+
+2018
+2022
+2018
+
+2018
+2022
+2018
+
+
diff --git a/bld/namelist_files/use_cases/2018_control.xml b/bld/namelist_files/use_cases/2018_control.xml
new file mode 100644
index 0000000000..e5e572d749
--- /dev/null
+++ b/bld/namelist_files/use_cases/2018_control.xml
@@ -0,0 +1,14 @@
+
+
+
+
+Conditions to simulate 2018 land-use
+
+2018
+
+constant
+
+
+SSP3-7.0
+
+
diff --git a/bld/namelist_files/use_cases/README b/bld/namelist_files/use_cases/README
index 4ccaf00bdc..e55fd9285b 100644
--- a/bld/namelist_files/use_cases/README
+++ b/bld/namelist_files/use_cases/README
@@ -17,6 +17,10 @@ Transient cases:
20thC$desc_transient (means nominal 1850-2000 although some datasets are 1850-2005)
+ or
+
+ yyyy-PD_$desc_transient (means nominal year yyyy through present day (PD) (with the year for PD advancing)
+
Control cases:
yyyy$desc_control
@@ -30,6 +34,7 @@ Where
yyyy = Simulation year (such as 1850 or 2000).
yyyy-yyyy = Range of simulation years to run over (i.e.. 1850-2000).
+yyyy-PD = Range of simulation years to run over until present day (i.e.. 2018-2024).
$ssp_rcp = Shared Socieconomic Pathway (SSP) Representative concentration pathway (RCP) description string
for future scenarios:
SSP#-#.# (for example: SSP5-8.5, SSP1-2.6, SSP4-6.0
diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl
index f6c8d75937..784d3b5e0c 100755
--- a/bld/unit_testers/build-namelist_test.pl
+++ b/bld/unit_testers/build-namelist_test.pl
@@ -393,7 +393,7 @@ sub cat_and_create_namelistinfile {
#
# Now run the site
#
- my $options = "-res CLM_USRDAT -clm_usr_name NEON -no-megan -bgc bgc -sim_year 2000 -infile $namelistfile";
+ my $options = "-res CLM_USRDAT -clm_usr_name NEON -no-megan -bgc bgc -sim_year 2018 -infile $namelistfile";
eval{ system( "$bldnml -envxml_dir . $options > $tempfile 2>&1 " ); };
is( $@, '', "options: $options" );
$cfiles->checkfilesexist( "$options", $mode );
diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml
index d3c2eda0a7..555a0ce9b5 100644
--- a/cime_config/config_component.xml
+++ b/cime_config/config_component.xml
@@ -290,10 +290,11 @@
char
+
ABBY,BLAN,CPER,DEJU,GRSM,HEAL,KONA,LENO,NIWO,ONAQ,PUUM,SERC,SRER,TALL,TREE,WOOD,
BARR,BONA,DCFS,DELA,GUAN,JERC,KONZ,MLBS,NOGP,ORNL,RMNP,SJER,STEI,TEAK,UKFS,WREF,
- BART,CLBJ,DSNY,HARV,JORN,LAJA,MOAB,OAES,OSBS,SCBI,SOAP,STER,TOOL,UNDE,YELL
+ BART,CLBJ,DSNY,HARV,JORN,LAJA,MOAB,OAES,OSBS,SCBI,SOAP,STER,TOOL,UNDE,YELL,
run_component_ctsm
diff --git a/cime_config/usermods_dirs/NEON/BARR/shell_commands b/cime_config/usermods_dirs/NEON/BARR/shell_commands
index 55037c6b37..713331c0ee 100644
--- a/cime_config/usermods_dirs/NEON/BARR/shell_commands
+++ b/cime_config/usermods_dirs/NEON/BARR/shell_commands
@@ -1,7 +1,11 @@
+#!/bin/bash
+
./xmlchange NEONSITE=BARR
./xmlchange PTS_LON=203.349781
./xmlchange PTS_LAT=71.281711
-./xmlchange RUN_STARTDATE=2019-01-01
-./xmlchange DATM_YR_ALIGN=2019,DATM_YR_START=2019,DATM_YR_END=2022
-./xmlchange STOP_N=39
-
+./xmlchange DATM_YR_ALIGN=2019,DATM_YR_START=2019
+# Different default start date and number of months to run for transient case
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange RUN_STARTDATE=2019-01-01
+ ./xmlchange STOP_N=39
+fi
diff --git a/cime_config/usermods_dirs/NEON/CPER/shell_commands b/cime_config/usermods_dirs/NEON/CPER/shell_commands
index 169b358a40..b6ccbcdf57 100644
--- a/cime_config/usermods_dirs/NEON/CPER/shell_commands
+++ b/cime_config/usermods_dirs/NEON/CPER/shell_commands
@@ -1,3 +1,6 @@
./xmlchange NEONSITE=CPER
./xmlchange PTS_LON=255.25545
./xmlchange PTS_LAT=40.81297
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange STOP_N=50
+fi
diff --git a/cime_config/usermods_dirs/NEON/GUAN/shell_commands b/cime_config/usermods_dirs/NEON/GUAN/shell_commands
index ee2eca82d9..4214a35e3c 100644
--- a/cime_config/usermods_dirs/NEON/GUAN/shell_commands
+++ b/cime_config/usermods_dirs/NEON/GUAN/shell_commands
@@ -1,6 +1,11 @@
+#!/bin/bash
+
./xmlchange NEONSITE=GUAN
./xmlchange PTS_LON=293.13112
./xmlchange PTS_LAT=17.96882
-./xmlchange RUN_STARTDATE=2019-01-01
./xmlchange DATM_YR_ALIGN=2019,DATM_YR_START=2019
-./xmlchange STOP_N=39
+# Different default start date and number of months to run for transient case
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange RUN_STARTDATE=2019-01-01
+ ./xmlchange STOP_N=39
+fi
diff --git a/cime_config/usermods_dirs/NEON/KONA/shell_commands b/cime_config/usermods_dirs/NEON/KONA/shell_commands
index c00e220e77..66f274dd36 100644
--- a/cime_config/usermods_dirs/NEON/KONA/shell_commands
+++ b/cime_config/usermods_dirs/NEON/KONA/shell_commands
@@ -1,3 +1,7 @@
./xmlchange NEONSITE=KONA
./xmlchange PTS_LON=263.38956
./xmlchange PTS_LAT=39.10828
+# Setup to run with prognostic crops for this site
+# If you want to explicitly run in SP mode or add other
+# options you'll need to add that after this...
+./xmlchange CLM_BLDNML_OPTS="--bgc bgc --crop"
diff --git a/cime_config/usermods_dirs/NEON/LAJA/shell_commands b/cime_config/usermods_dirs/NEON/LAJA/shell_commands
index 522818a697..a7bda447e6 100644
--- a/cime_config/usermods_dirs/NEON/LAJA/shell_commands
+++ b/cime_config/usermods_dirs/NEON/LAJA/shell_commands
@@ -1,8 +1,12 @@
+#!/bin/bash
./xmlchange NEONSITE=LAJA
./xmlchange PTS_LON=292.92392
./xmlchange PTS_LAT=18.02184
-./xmlchange RUN_STARTDATE=2019-01-01
./xmlchange DATM_YR_ALIGN=2019,DATM_YR_START=2019
-./xmlchange STOP_N=39
+# Different default start date and number of months to run for transient case
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange RUN_STARTDATE=2019-01-01
+ ./xmlchange STOP_N=39
+fi
diff --git a/cime_config/usermods_dirs/NEON/LENO/shell_commands b/cime_config/usermods_dirs/NEON/LENO/shell_commands
index 89583ed158..c5ae590186 100644
--- a/cime_config/usermods_dirs/NEON/LENO/shell_commands
+++ b/cime_config/usermods_dirs/NEON/LENO/shell_commands
@@ -1,7 +1,12 @@
+#!/bin/bash
+
./xmlchange NEONSITE=LENO
./xmlchange PTS_LON=271.83897
./xmlchange PTS_LAT=31.8531
-./xmlchange RUN_STARTDATE=2021-01-01
./xmlchange DATM_YR_ALIGN=2021,DATM_YR_START=2021
-./xmlchange STOP_N=15
+# Different default start date and number of months to run for transient case
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange RUN_STARTDATE=2021-01-01
+ ./xmlchange STOP_N=15
+fi
diff --git a/cime_config/usermods_dirs/NEON/MLBS/shell_commands b/cime_config/usermods_dirs/NEON/MLBS/shell_commands
index 9f70ecd662..d5de0f64eb 100644
--- a/cime_config/usermods_dirs/NEON/MLBS/shell_commands
+++ b/cime_config/usermods_dirs/NEON/MLBS/shell_commands
@@ -1,6 +1,10 @@
+#!/bin/bash
./xmlchange NEONSITE=MLBS
./xmlchange PTS_LON=279.47575
./xmlchange PTS_LAT=37.37783
-./xmlchange STOP_N=24
./xmlchange DATM_YR_END=2019
-
+# Different default number of months to run for transient case
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange DATM_YR_END=2020
+ ./xmlchange STOP_N=24
+fi
diff --git a/cime_config/usermods_dirs/NEON/MOAB/shell_commands b/cime_config/usermods_dirs/NEON/MOAB/shell_commands
index d91513a92c..96d0bcbe68 100644
--- a/cime_config/usermods_dirs/NEON/MOAB/shell_commands
+++ b/cime_config/usermods_dirs/NEON/MOAB/shell_commands
@@ -1,8 +1,10 @@
+#!/bin/bash
./xmlchange NEONSITE=MOAB
./xmlchange PTS_LON=250.61118
./xmlchange PTS_LAT=38.25136
-./xmlchange RUN_STARTDATE=2018-01-01
-./xmlchange DATM_YR_ALIGN=2018,DATM_YR_START=2018,DATM_YR_END=2020
-./xmlchange STOP_N=36
-
-
+./xmlchange DATM_YR_END=2020
+# Different default number of months to run for transient case
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange DATM_YR_END=2021
+ ./xmlchange STOP_N=36
+fi
diff --git a/cime_config/usermods_dirs/NEON/ONAQ/shell_commands b/cime_config/usermods_dirs/NEON/ONAQ/shell_commands
index f2e1640725..43dab69998 100644
--- a/cime_config/usermods_dirs/NEON/ONAQ/shell_commands
+++ b/cime_config/usermods_dirs/NEON/ONAQ/shell_commands
@@ -1,8 +1,10 @@
+#!/bin/bash
./xmlchange NEONSITE=ONAQ
./xmlchange PTS_LON=276.49815
./xmlchange PTS_LAT=35.68839
-./xmlchange RUN_STARTDATE=2018-01-01
-./xmlchange DATM_YR_ALIGN=2018,DATM_YR_START=2018,DATM_YR_END=2019
-./xmlchange STOP_N=24
-
-
+./xmlchange DATM_YR_END=2019
+# Different default number of months to run for transient case
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange DATM_YR_END=2020
+ ./xmlchange STOP_N=24
+fi
diff --git a/cime_config/usermods_dirs/NEON/SJER/shell_commands b/cime_config/usermods_dirs/NEON/SJER/shell_commands
index 9d3ee15a81..6e05d23792 100644
--- a/cime_config/usermods_dirs/NEON/SJER/shell_commands
+++ b/cime_config/usermods_dirs/NEON/SJER/shell_commands
@@ -1,8 +1,11 @@
+#!/bin/bash
./xmlchange NEONSITE=SJER
./xmlchange PTS_LON=240.267
./xmlchange PTS_LAT=37.107117
-./xmlchange RUN_STARTDATE=2019-01-01
./xmlchange DATM_YR_ALIGN=2019,DATM_YR_START=2019
-./xmlchange STOP_N=39
+# Different default start date and number of months to run for transient case
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange RUN_STARTDATE=2019-01-01
+ ./xmlchange STOP_N=39
+fi
-#./xmlchange RUN_STARTDATE=2018-09-01
diff --git a/cime_config/usermods_dirs/NEON/STER/shell_commands b/cime_config/usermods_dirs/NEON/STER/shell_commands
index 2c1699fc9c..38b173c309 100644
--- a/cime_config/usermods_dirs/NEON/STER/shell_commands
+++ b/cime_config/usermods_dirs/NEON/STER/shell_commands
@@ -1,3 +1,7 @@
./xmlchange NEONSITE=STER
./xmlchange PTS_LON=256.96992
./xmlchange PTS_LAT=40.45984
+# Setup to run with prognostic crops for this site
+# If you want to explicitly run in SP mode or add other
+# # options you'll need to add that after this...
+./xmlchange CLM_BLDNML_OPTS="--bgc bgc --crop"
diff --git a/cime_config/usermods_dirs/NEON/TEAK/shell_commands b/cime_config/usermods_dirs/NEON/TEAK/shell_commands
index 5309888a12..9058eda022 100644
--- a/cime_config/usermods_dirs/NEON/TEAK/shell_commands
+++ b/cime_config/usermods_dirs/NEON/TEAK/shell_commands
@@ -1,7 +1,10 @@
+#!/bin/bash
./xmlchange NEONSITE=TEAK
./xmlchange PTS_LON=240.99424199999999
./xmlchange PTS_LAT=37.006472
-./xmlchange RUN_STARTDATE=2019-01-01
-./xmlchange DATM_YR_ALIGN=2019,DATM_YR_START=2019
-./xmlchange STOP_N=39
-
+./xmlchange DATM_YR_ALIGN=2019,DATM_YR_START=2019
+# Different default start date and number of months to run for transient case
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange RUN_STARTDATE=2019-01-01
+ ./xmlchange STOP_N=39
+fi
diff --git a/cime_config/usermods_dirs/NEON/TOOL/shell_commands b/cime_config/usermods_dirs/NEON/TOOL/shell_commands
index fc2551390b..f1f2e1771a 100644
--- a/cime_config/usermods_dirs/NEON/TOOL/shell_commands
+++ b/cime_config/usermods_dirs/NEON/TOOL/shell_commands
@@ -1,8 +1,10 @@
+#!/bin/bash
./xmlchange NEONSITE=TOOL
./xmlchange PTS_LON=210.629872
./xmlchange PTS_LAT=68.66045
-./xmlchange RUN_STARTDATE=2020-01-01
./xmlchange DATM_YR_ALIGN=2020,DATM_YR_START=2020
-./xmlchange STOP_N=27
-
-
+# Different default start date and number of months to run for transient case
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange RUN_STARTDATE=2020-01-01
+ ./xmlchange STOP_N=27
+fi
diff --git a/cime_config/usermods_dirs/NEON/WREF/shell_commands b/cime_config/usermods_dirs/NEON/WREF/shell_commands
index 77a0b750cd..807055ae6e 100644
--- a/cime_config/usermods_dirs/NEON/WREF/shell_commands
+++ b/cime_config/usermods_dirs/NEON/WREF/shell_commands
@@ -1,3 +1,10 @@
+#!/bin/bash
./xmlchange NEONSITE=WREF
./xmlchange PTS_LON=238.04162
./xmlchange PTS_LAT=45.81637
+./xmlchange DATM_YR_ALIGN=2019,DATM_YR_START=2019
+# Different default start date and number of months to run for transient case
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange RUN_STARTDATE=2019-01-01
+ ./xmlchange STOP_N=39
+fi
diff --git a/cime_config/usermods_dirs/NEON/YELL/shell_commands b/cime_config/usermods_dirs/NEON/YELL/shell_commands
index c32b11ef7d..800d7d01ad 100644
--- a/cime_config/usermods_dirs/NEON/YELL/shell_commands
+++ b/cime_config/usermods_dirs/NEON/YELL/shell_commands
@@ -1,7 +1,10 @@
+#!/bin/bash
./xmlchange NEONSITE=YELL
./xmlchange PTS_LON=249.45803999999998
./xmlchange PTS_LAT=44.95597
-./xmlchange RUN_STARTDATE=2019-01-01
-./xmlchange DATM_YR_ALIGN=2019,DATM_YR_START=2019
-./xmlchange STOP_N=39
-# ./xmlchange RUN_STARTDATE=2018-08-01
+./xmlchange DATM_YR_ALIGN=2019,DATM_YR_START=2019
+# Different default start date and number of months to run for transient case
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange RUN_STARTDATE=2019-01-01
+ ./xmlchange STOP_N=39
+fi
diff --git a/cime_config/usermods_dirs/NEON/defaults/shell_commands b/cime_config/usermods_dirs/NEON/defaults/shell_commands
index 53e445e06a..7095e1def7 100644
--- a/cime_config/usermods_dirs/NEON/defaults/shell_commands
+++ b/cime_config/usermods_dirs/NEON/defaults/shell_commands
@@ -1,16 +1,26 @@
+#!/bin/bash
./xmlchange CLM_USRDAT_NAME=NEON
-./xmlchange RUN_STARTDATE=2018-01-01
-./xmlchange CLM_NML_USE_CASE=1850-2100_SSP3-7.0_transient
./xmlchange CCSM_CO2_PPMV=408.83
+# Set data forcing data to future scenario so will have data from 2018 to present-day
./xmlchange DATM_PRESAERO=SSP3-7.0
./xmlchange DATM_PRESNDEP=SSP3-7.0
./xmlchange DATM_PRESO3=SSP3-7.0
+# Explicitly set the MPI library to mpi-serial so won't have the build/run complexity of a full MPI library
+./xmlchange MPILIB=mpi-serial
+# Set years to run forcing data over
./xmlchange DATM_YR_ALIGN=2018,DATM_YR_END=2021,DATM_YR_START=2018
+compset=`./xmlquery COMPSET --value`
+# For a transient case run the whole length and don't cycle
+if [[ $compset =~ ^HIST ]]; then
+ ./xmlchange DATM_YR_END=2022
+ ./xmlchange RUN_STARTDATE=2018-01-01
+ # Number of months that can be run for the full transient case
+ ./xmlchange STOP_OPTION="nmonths"
+ ./xmlchange STOP_N=51
+ ./xmlchange CLM_NML_USE_CASE="2018-PD_transient"
+else
+ ./xmlchange CLM_NML_USE_CASE="2018_control"
+fi
# Explicitly set PIO Type to NETCDF since this is a single processor case (should already be set this way)
./xmlchange PIO_TYPENAME=netcdf
-# BD:05/06/2022 - The PIO_REARRANGER_LND value - for global runs, PIO_REARRANGER_LND = 1 is ideal
-# and a value of 2 results in slow I/O. For point runs like these, a value of 1 results in a crash (PIO bug, probably),
-# so we explicitly set a value of 2.
-./xmlchange PIO_REARRANGER_LND=2
-
diff --git a/cime_config/usermods_dirs/NEON/defaults/user_nl_clm b/cime_config/usermods_dirs/NEON/defaults/user_nl_clm
index 332060dd99..29e50431ce 100644
--- a/cime_config/usermods_dirs/NEON/defaults/user_nl_clm
+++ b/cime_config/usermods_dirs/NEON/defaults/user_nl_clm
@@ -19,19 +19,10 @@
!----------------------------------------------------------------------------------
flanduse_timeseries = ' ' ! This isn't needed for a non transient case, but will be once we start using transient compsets
-fsurdat = "$DIN_LOC_ROOT/lnd/clm2/surfdata_map/NEON/surfdata_hist_78pfts_CMIP6_simyr2000_${NEONSITE}_c211102.nc"
-model_year_align_urbantv = 2018
-stream_year_first_urbantv = 2018
-stream_year_last_urbantv = 2021
-stream_year_first_ndep = 2018
-model_year_align_ndep = 2018
-stream_year_last_ndep = 2021
-model_year_align_popdens = 2018
-stream_year_first_popdens = 2018
-stream_year_last_popdens = 2021
+fsurdat = "$DIN_LOC_ROOT/lnd/clm2/surfdata_map/NEON/surfdata_1x1_NEON_${NEONSITE}_hist_78pfts_CMIP6_simyr2000_c221111.nc"
stream_fldfilename_lightng = '$DIN_LOC_ROOT/atm/datm7/NASA_LIS/clmforc.Li_2016_climo1995-2013.360x720.lnfm_Total_NEONarea_c210625.nc'
-!stream_fldfilename_ndep = '$DIN_LOC_ROOT/lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP3-7.0-WACCM_2018-2030_monthly_c210826.nc'
+stream_meshfile_lightng = '$DIN_LOC_ROOT/atm/datm7/NASA_LIS/ESMF_MESH.Li_2016.360x720.NEONarea_cdf5_c221104.nc'
! h1 output stream
hist_fincl2 = 'AR','ELAI','FCEV','FCTR','FGEV','FIRA','FSA','FSH','GPP','H2OSOI',
diff --git a/cime_config/usermods_dirs/NEON/defaults/user_nl_datm_streams b/cime_config/usermods_dirs/NEON/defaults/user_nl_datm_streams
index 6244eed2fa..36f1e72b3a 100644
--- a/cime_config/usermods_dirs/NEON/defaults/user_nl_datm_streams
+++ b/cime_config/usermods_dirs/NEON/defaults/user_nl_datm_streams
@@ -1,16 +1,39 @@
+!------------------------------------------------------------------------
+! This file is used to modify datm.streams.xml generated in $RUNDIR
+! Entries should have the form
+! :<= new stream_value>
+! The following are accepted values for an assume streamname of foo
+! foo:meshfile = character string
+! foo:datafiles = comma separated string of full pathnames (e.g. file1,file2,file3...)
+! foo:datavars = comma separated string of field pairs (e.g. foo foobar,foo2 foobar2...)
+! foo:taxmode = one of [cycle, extend, limit]
+! foo:tintalgo = one of [lower,upper,nearest,linear,coszen]
+! foo:readmode = single (only suported mode right now)
+! foo:mapalgo = one of [bilinear,redist,nn,consf,consd,none]
+! foo:dtlimit = real (1.5 is default)
+! foo:year_first = integer
+! foo:year_last = integer
+! foo:year_align = integer
+! foo:vectors = one of [none,u:v]
+! foo:lev_dimname: = one of [null,name of level dimenion name]
+! foo:offset = integer
+! As an example:
+! foo:year_first = 1950
+! would change the stream year_first stream_entry to 1950 for the foo stream block
+!------------------------------------------------------------------------
presaero.SSP3-7.0:datafiles = $DIN_LOC_ROOT/atm/cam/chem/trop_mozart_aero/aero/aerodep_clm_SSP370_b.e21.BWSSP370cmip6.f09_g17.CMIP6-SSP3-7.0-WACCM.001_2018-2030_monthly_0.9x1.25_c210826.nc
presaero.SSP3-7.0:year_first=2018
-presaero.SSP3-7.0:year_last=2030
+presaero.SSP3-7.0:year_last=2022
presaero.SSP3-7.0:year_align=2018
presaero.SSP3-7.0:dtlimit=30
presndep.SSP3-7.0:datafiles = $DIN_LOC_ROOT/lnd/clm2/ndepdata/fndep_clm_f09_g17.CMIP6-SSP3-7.0-WACCM_2018-2030_monthly_c210826.nc
presndep.SSP3-7.0:year_first=2018
-presndep.SSP3-7.0:year_last=2030
+presndep.SSP3-7.0:year_last=2022
presndep.SSP3-7.0:year_align=2018
presndep.SSP3-7.0:dtlimit=30
preso3.SSP3-7.0:year_first=2018
-preso3.SSP3-7.0:year_last=2030
+preso3.SSP3-7.0:year_last=2022
preso3.SSP3-7.0:year_align=2018
preso3.SSP3-7.0:dtlimit=30
diff --git a/doc/ChangeLog b/doc/ChangeLog
index 68aea97f7e..5f16bf5634 100644
--- a/doc/ChangeLog
+++ b/doc/ChangeLog
@@ -1,4 +1,144 @@
===============================================================
+Tag name: ctsm5.1.dev114
+Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326)/@wwieder/@olyson/@ka7eh
+Date: Sat Nov 19 18:11:15 MST 2022
+One-line Summary: Some NEON updates fixing AG sites, update MOSART, small fixes
+
+Purpose and description of changes
+----------------------------------
+
+Minor changes to python scripts and usermod_dirs for NEON cases. Also update the lightning mesh file so that it goes with the
+smaller lightning file. Have NEON use new use-cases for 2018 and 2018-PD conditions for CLM. Have NEON
+Agricultural sites run with prognostic crop. Simple fix for warning about NaN's in import/export data from/to coupler.
+
+Get NEON tests working on izumi, add --inputdata-dir to subset_data and modify_singlept_site_neon.py so they aren't tied
+to only running on cheyenne.
+
+Also update MOSART with fixed for direct_to_outlet option.
+
+Add error checking in ParitionWoodFluxes. Fix value of albgrd_col in SurfaceAlbefdoType.F90.
+Previously, the wrong value (albgri_col) was being set in InitHistory.
+
+Significant changes to scientifically-supported configurations
+--------------------------------------------------------------
+
+Does this tag change answers significantly for any of the following physics configurations?
+(Details of any changes will be given in the "Answer changes" section below.)
+
+ [Put an [X] in the box for any configuration with significant answer changes.]
+
+[ ] clm5_1
+
+[ ] clm5_0
+
+[ ] ctsm5_0-nwp
+
+[ ] clm4_5
+
+
+Bugs fixed or introduced
+------------------------
+
+CTSM issues fixed (include CTSM Issue #):
+ Fixes #1871 -- Error in NEON surface datasets for AG sites
+ Fixes #1876 -- NEON data in container
+ Fixes #1889 -- NEON AG sites are running without prognostic crop
+ Fixes #1363 -- 2018_control and 2018-PD_transient use-cases for NEON
+ Fixes #1896 -- Improve misleading error message in check_for_nans
+ Fixes #1263 -- Fix partitionWood fluxes
+ Fixes #1788 -- Fix albgrd_col
+ Fixes #1901 -- Fix NEONSITE YELL
+
+ Some on #1910 -- add pandas version check to modify_singlept_site_neon.py so will abort cleanly if version not updated
+
+ Known bugs found since the previous tag (include issue #):
+ #1910 -- modify_singlept_site_neon.py has trouble on izumi
+
+Notes of particular relevance for users
+---------------------------------------
+
+Caveats for users (e.g., need to interpolate initial conditions):
+ NEON users: use neon_gcs_upload now. Filenames for NEON surface
+ datasets are changed. Start and end of simulations is different
+ for some sites, and managed by the user-mod-directories. The NEON
+ user-mod assumes transient cases will run with a transient compset
+ and the settings are slightly different for transient vs control
+ including pointing to 2018_control or 2018-PD_transient use-cases.
+
+Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables):
+ Add notes to python tools to run using conda environment setup in py_env_create
+
+Changes made to namelist defaults (e.g., changed parameter values):
+ New use cases: 2018_control and 2018-PD_transient
+
+Changes to the datasets (e.g., parameter, surface or initial files):
+ New updated NEON surface datasets
+
+Notes of particular relevance for developers:
+---------------------------------------------
+
+Caveats for developers (e.g., code that is duplicated that requires double maintenance):
+ Remove toolchain python scripts as this work was moved over to the ctsm5.2 development
+
+Changes to tests or testing:
+ Add a run_black target to the python directory Makefile to run black and not just do a black check
+ Add python modules needed for neon scripts to conda py_create_env conda environment
+
+Testing summary: regular, tools
+----------------
+ [PASS means all tests PASS; OK means tests PASS other than expected fails.]
+
+ build-namelist tests (if CLMBuildNamelist.pm has changed):
+
+ cheyenne - OK (141 NEON tests are different than baseline)
+
+ tools-tests (test/tools) (if tools have been changed):
+
+ cheyenne - OK
+ cheyenne (NEON) - PASS
+ izumi (NEON) -- OK (modify_singlept_site_neon.py test fails due to #1910)
+ izumi -- OK
+
+ python testing (if python code has changed; see instructions in python/README.md; document testing done):
+
+ Acheyenne -- PASS
+
+ regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing):
+
+ cheyenne ---- OK
+ izumi ------- OK
+
+ any other testing (give details below):
+ run_neon.py ran for all NEON sites ad, post-ad, and transient
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: No (other than NEON sites, and if direct_to_outlet turned on in MOSART)
+
+ Summarize any changes to answers, i.e.,
+ - what code configurations: NEON or if bypass_routing_option==direct_to_outlet in MOSART
+ - what platforms/compilers: all
+ - nature of change:
+ NEON AG sites are significantly different
+
+ NEON sites reran and reevaluated
+ MOSART direct_to_outlet option evaluated by @swensosc and @olyson
+
+Other details
+-------------
+
+List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): mosart
+ mosart updated from mosart1_0_45 to to mosart1_0_47 (asynchronous changes, and direct_to_outlet fixes)
+
+Pull Requests that document the changes (include PR ids):
+(https://github.com/ESCOMP/ctsm/pull)
+ #1872 -- NEON updates
+ #1814 -- Add error checking in partitionWoodFluxes
+ #1810 -- Fix albdgrd_col value
+
+===============================================================
+===============================================================
Tag name: ctsm5.1.dev113
Originator(s): sacks (Bill Sacks), ekluzek (Erik Kluzek), jedwards (Jim Edwards)
Date: Fri Oct 28 11:00:26 MDT 2022
diff --git a/doc/ChangeSum b/doc/ChangeSum
index 3008f25f69..772545d00c 100644
--- a/doc/ChangeSum
+++ b/doc/ChangeSum
@@ -1,5 +1,6 @@
Tag Who Date Summary
============================================================================================================================
+ ctsm5.1.dev114 multiple 11/19/2022 Some NEON updates fixing AG sites, update MOSART, small fixes
ctsm5.1.dev113 multiple 10/28/2022 Fix some compsets; add only clauses for ESMF use statements
ctsm5.1.dev112 multiple 10/15/2022 Rework fates test definitions and add new fates tests
ctsm5.1.dev111 multiple 10/05/2022 Fixes for NEON cases
diff --git a/python/Makefile b/python/Makefile
index 4ea5fba85d..bf219f16b6 100644
--- a/python/Makefile
+++ b/python/Makefile
@@ -39,11 +39,17 @@ lint: FORCE
$(PYLINT) $(PYLINT_ARGS) $(PYLINT_SRC)
.PHONY: black
-# Run black on all of the python files here and undeneath.
+# Run the black check on all of the python files here and undeneath.
# Use the black configure file to explicitly set a few things and specifiy the exact files.
black: FORCE
black --check --config pyproject.toml .
+.PHONY: run_black
+# Run black on all of the python files here and undeneath.
+# Use the black configure file to explicitly set a few things and specifiy the exact files.
+run_black: FORCE
+ black --config pyproject.toml .
+
.PHONY: clean
clean: FORCE
find . -name '*.pyc' -exec rm {} \;
diff --git a/python/conda_env_ctsm_py.txt b/python/conda_env_ctsm_py.txt
index 8c110e8f3f..d757ae1782 100644
--- a/python/conda_env_ctsm_py.txt
+++ b/python/conda_env_ctsm_py.txt
@@ -11,6 +11,8 @@
# conda activate ctsm_py # Do this anytime you want to run a CTSM python script
#
python=3.7.9
+pandas
+tqdm
scipy
netcdf4
requests
diff --git a/python/conda_env_ctsm_py_cgd.txt b/python/conda_env_ctsm_py_cgd.txt
index 45025506a8..e7ee4af8ab 100644
--- a/python/conda_env_ctsm_py_cgd.txt
+++ b/python/conda_env_ctsm_py_cgd.txt
@@ -13,6 +13,8 @@
# conda activate ctsm_py # Do this anytime you want to run a CTSM python script
#
python=3.7.0 # The python version MUST match the python version available on CGD systems through modules exactly
+pandas
+tqdm
scipy
netcdf4
requests
diff --git a/python/conda_env_ctsm_py_latest.txt b/python/conda_env_ctsm_py_latest.txt
index a7a28c9fc3..2dc2ed518d 100644
--- a/python/conda_env_ctsm_py_latest.txt
+++ b/python/conda_env_ctsm_py_latest.txt
@@ -1,5 +1,7 @@
# This is a test python environment intended to represent the latest environment that can be built
python>=3.9.13,<3.10 # Moving to 3.10 runs into conflicts
+pandas>=1.5.1
+tqdm>=4.64.1
scipy
netcdf4
requests
diff --git a/python/ctsm/gen_mksurf_namelist.py b/python/ctsm/gen_mksurf_namelist.py
deleted file mode 100644
index 735ae0493f..0000000000
--- a/python/ctsm/gen_mksurf_namelist.py
+++ /dev/null
@@ -1,377 +0,0 @@
-# 2020-11-08 Negin Sobhani
-
-"""
-|------------------------------------------------------------------|
-|--------------------- Instructions -----------------------------|
-|------------------------------------------------------------------|
-This Python script is part of the simplified toolchain for creating
-the surface dataset for ctsm cases.
-This script should be used as the first step of the new toolchain.
-It will automatically create namelist (control file) that is
-needed for creating surface dataset and requisite intermediate files for
-running CTSM cases.
-For transient cases, it will also create a txt file that includes the
-landuse files for every year.
-
--------------------------------------------------------------------
-Instructions for running on Cheyenne/Casper:
-
-load the following into your local environment:
-
- module load python
- ncar_pylib
--------------------------------------------------------------------
-To see the available options:
- ./gen_mksurf_namelist.py --help
-
-To run the script:
- ./gen_mksurf_namelist.py
-
-To remove NPL(ncar_pylib) from your environment on Cheyenne/Casper:
- deactivate
--------------------------------------------------------------------
-"""
-
-# TODO (NS)
-
-# -[x] Add default values in the help page.
-# -[x] Add info for help page note for end_year -- by default is start_year
-# -[x] Possibly remove year --years and range options
-# Currently comment them out.
-
-# -[x] maybe a verbose option and removing debug
-# -[x] --debug mode is not working...
-
-# -[ ] add error check for hi-res and years if they are 1850 and 2005.
-# -[ ] hirespft data only for 2005? add error-check
-
-# -[x] different path for each range of years for transient cases.
-# default should be picked based on the year. 1850 - 2015 -->
-# /glade/p/cesm/cseg/inputdata/lnd/clm2/rawdata/
-# pftcftdynharv.0.25x0.25.LUH2.histsimyr1850-2015.c170629/
-# 850-1850 -->
-# pftcftdynharv.0.25x0.25.LUH2.histsimyr0850-1849.c171012
-
-
-# Import libraries
-from __future__ import print_function
-
-import os
-import sys
-import logging
-import argparse
-
-# -- import local classes for this script
-from ctsm.toolchain.ctsm_case import CtsmCase
-
-# -- import ctsm logging flags
-from ctsm.ctsm_logging import (
- setup_logging_pre_config,
- add_logging_args,
- process_logging_args,
-)
-
-logger = logging.getLogger(__name__)
-
-## valid options for resolution and SSP scenarios:
-VALID_OPTS = {
- "res": [
- "512x1024",
- "360x720cru",
- "128x256",
- "64x128",
- "48x96",
- "94x192",
- "0.23x0.31",
- "0.47x0.63",
- "0.9x1.25",
- "1.9x2.5",
- "2.5x3.33",
- "4x5",
- "10x15",
- "0.125nldas2",
- "5x5_amazon",
- "1x1_camdenNJ",
- "1x1_vancouverCAN",
- "1x1_mexicocityMEX",
- "1x1_asphaltjungleNJ",
- "1x1_brazil,1x1_urbanc_alpha",
- "1x1_numaIA,1x1_smallvilleIA",
- "0.1x0.1",
- "0.25x0.25",
- "0.5x0.5",
- "3x3min",
- "5x5min",
- "10x10min",
- "0.33x0.33",
- "0.125x0.125",
- "ne4np4,ne16np4",
- "ne30np4.pg2",
- "ne30np4.pg3",
- "ne30np4",
- "ne60np4",
- "ne120np4",
- ],
- "ssp_rcp": [
- "hist",
- "SSP1-2.6",
- "SSP3-7.0",
- "SSP5-3.4",
- "SSP2-4.5",
- "SSP1-1.9",
- "SSP4-3.4",
- "SSP4-6.0",
- "SSP5-8.5",
- ],
-}
-
-
-def get_parser():
- """
- Get parser object for this script.
- """
- parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
- )
-
- parser.print_usage = parser.print_help
-
- parser.add_argument(
- "--sy",
- "--start-year",
- help="Simulation start year. [default: %(default)s] ",
- action="store",
- dest="start_year",
- required=False,
- type=start_year_type,
- default=2000,
- )
- parser.add_argument(
- "--ey",
- "--end-year",
- help="Simulation end year. [default: start_year] ",
- action="store",
- dest="end_year",
- required=False,
- type=int,
- )
- parser.add_argument(
- "--glc-nec",
- help="""
- Number of glacier elevation classes to use.
- [default: %(default)s]
- """,
- action="store",
- dest="glc_nec",
- type=glc_nec_type,
- default="10",
- )
- parser.add_argument(
- "--rundir",
- help="""
- Directory to run in.
- [default: %(default)s]
- """,
- action="store",
- dest="run_dir",
- required=False,
- default=os.getcwd(),
- )
- parser.add_argument(
- "--ssp-rcp",
- help="""
- Shared Socioeconomic Pathway and Representative
- Concentration Pathway Scenario name(s).
- [default: %(default)s]
- """,
- action="store",
- dest="ssp_rcp",
- required=False,
- choices=VALID_OPTS["ssp_rcp"],
- default="hist",
- )
-
- ##############################################
- # In mksurfdata.pl these options are -l --dinlc
- # But the group decided --raw_dir is more descriptive.
- # If everyone agrees, the commented out line should be removed.
- # parser.add_argument('-l','--dinlc', #--raw-dir or --rawdata-dir
-
- parser.add_argument(
- "--raw-dir",
- "--rawdata-dir",
- help="""
- /path/of/root/of/input/data',
- [default: %(default)s]
- """,
- action="store",
- dest="input_path",
- default="/glade/p/cesm/cseg/inputdata/lnd/clm2/rawdata/",
- )
- parser.add_argument(
- "--vic",
- help="""
- Flag for adding the fields required for the VIC model.
- """,
- action="store_true",
- dest="vic_flag",
- default=False,
- )
- parser.add_argument(
- "--glc",
- help="""
- Flag for adding the optional 3D glacier fields for verification of the glacier model.
- """,
- action="store_true",
- dest="glc_flag",
- default=False,
- )
- parser.add_argument(
- "--hirespft",
- help="""
- If you want to use the high-resolution pft dataset rather
- than the default lower resolution dataset.
- (Low resolution is at quarter-degree, high resolution at 3-minute)
- [Note: hires only available for 1850 and 2005.]
- """,
- action="store_true",
- dest="hres_flag",
- default=False,
- )
- parser.add_argument(
- "--nocrop",
- help="""
- Create datasets with the extensive list of prognostic crop types.
- """,
- action="store_false",
- dest="crop_flag",
- default=True,
- )
- parser.add_argument(
- "-f",
- "--fast",
- help="Toggle fast mode which does not user the large mapping file",
- action="store_true",
- dest="fast_flag",
- default=False,
- )
- parser.add_argument(
- "-r",
- "--res",
- help="""
- Resolution is the supported resolution(s) to use for files.
- [default: %(default)s]
- """,
- action="store",
- dest="res",
- choices=VALID_OPTS["res"],
- required=False,
- default="4x5",
- )
- return parser
-
-
-# -- types for this parser
-
-
-def glc_nec_type(glc):
- """
- Function for defining acceptable glc_nec input.
-
- Args:
- x (str) : glc_nec value from command line args.
-
- Raises:
- Error if value of glc_nec is not in the range
- of 1-99.
-
- Returns:
- x (int) : Acceptable glc_nec value.
- """
- glc = int(glc)
- if (glc <= 0) or (glc >= 100):
- raise argparse.ArgumentTypeError("ERROR: glc_nec must be between 1 and 99.")
- return glc.__str__()
-
-
-def start_year_type(year):
- """
- Function for defining acceptable start_year input.
-
- Args:
- year (str) : start_year string from command line args.
-
- Raises:
- Error if value of start_year is not in the range
- of 850-2105.
-
- Returns:
- year (int) : Acceptable start_year value.
- """
- year = int(year)
- if (year < 850) or (year > 2105):
- raise argparse.ArgumentTypeError(
- "ERROR: Simulation start year should be between 850 and 2105."
- )
- return year
-
-
-def main():
- """
- Main function for gen_mksurf_namelist.
- """
- # -- add logging flags from ctsm_logging
- setup_logging_pre_config()
- parser = get_parser()
- add_logging_args(parser)
-
- args = parser.parse_args()
- process_logging_args(args)
-
- res = args.res
- glc_nec = args.glc_nec
- input_path = args.input_path
- ssp_rcp = args.ssp_rcp
- crop_flag = args.crop_flag
- vic_flag = args.vic_flag
- glc_flag = args.glc_flag
- hres_flag = args.hres_flag
-
- start_year = args.start_year
- end_year = args.end_year
-
- # -- determine end_year if not given as an argument:
- if not end_year:
- end_year = start_year
-
- # -- check if the input path exist
- if not os.path.exists(input_path):
- sys.exit(
- "ERROR: \n"
- + "\t raw_dir does not exist on this machine. \n"
- + "\t Please point to the correct raw_dir using --raw-dir"
- + "or --rawdata-dir flags."
- )
-
- ctsm_case = CtsmCase(
- res,
- glc_nec,
- ssp_rcp,
- crop_flag,
- input_path,
- vic_flag,
- glc_flag,
- start_year,
- end_year,
- hres_flag,
- )
-
- logger.info("--------------------------")
- logger.info(" ctsm case : %s", ctsm_case)
- logger.info("--------------------------")
-
- ctsm_case.create_namelist_file()
-
-
-if __name__ == "__main__":
- main()
diff --git a/python/ctsm/lilac_build_ctsm.py b/python/ctsm/lilac_build_ctsm.py
index 20231c0df9..b189cb56ea 100644
--- a/python/ctsm/lilac_build_ctsm.py
+++ b/python/ctsm/lilac_build_ctsm.py
@@ -718,8 +718,13 @@ def _create_case(
else:
machine_args = ["--machine", machine]
+ cmd = os.path.join(cime_path, "scripts", "create_newcase")
+ if not os.path.exists(cmd):
+ abort(
+ "The create_newcase command doesn't exist as expected <{}> does not exist)".format(cmd)
+ )
create_newcase_cmd = [
- os.path.join(cime_path, "scripts", "create_newcase"),
+ cmd,
"--output-root",
build_dir,
"--case",
@@ -741,7 +746,12 @@ def _create_case(
create_newcase_cmd.extend(machine_args)
if inputdata_path:
create_newcase_cmd.extend(["--input-dir", inputdata_path])
- run_cmd_output_on_error(create_newcase_cmd, errmsg="Problem creating CTSM case directory")
+ if not os.path.isdir(inputdata_path):
+ abort("inputdata_path directory (<{}> does not exist)".format(inputdata_path))
+ run_cmd_output_on_error(
+ create_newcase_cmd,
+ errmsg="Problem running create_newcase to create the CTSM case directory",
+ )
subprocess.check_call([xmlchange, "LILAC_MODE=on"], cwd=case_dir)
if build_debug:
diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py
index 1dc6a522cc..8d5d81e014 100644
--- a/python/ctsm/site_and_regional/regional_case.py
+++ b/python/ctsm/site_and_regional/regional_case.py
@@ -154,7 +154,7 @@ def create_surfdata_at_reg(self, indir, file, user_mods_dir):
# specify files
fsurf_in = os.path.join(indir, file)
- fsurf_out = add_tag_to_filename(fsurf_in, self.tag)
+ fsurf_out = add_tag_to_filename(fsurf_in, self.tag, replace_res=True)
logger.info("fsurf_in: %s", fsurf_in)
logger.info("fsurf_out: %s", os.path.join(self.out_dir, fsurf_out))
@@ -194,7 +194,7 @@ def create_landuse_at_reg(self, indir, file, user_mods_dir):
# specify files
fluse_in = os.path.join(indir, file)
- fluse_out = add_tag_to_filename(fluse_in, self.tag)
+ fluse_out = add_tag_to_filename(fluse_in, self.tag, replace_res=True)
logger.info("fluse_in: %s", fluse_in)
logger.info("fluse_out: %s", os.path.join(self.out_dir, fluse_out))
diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py
index 31ab158706..05b918d316 100644
--- a/python/ctsm/site_and_regional/single_point_case.py
+++ b/python/ctsm/site_and_regional/single_point_case.py
@@ -342,7 +342,7 @@ def create_landuse_at_point(self, indir, file, user_mods_dir):
# specify files
fluse_in = os.path.join(indir, file)
- fluse_out = add_tag_to_filename(fluse_in, self.tag)
+ fluse_out = add_tag_to_filename(fluse_in, self.tag, replace_res=True)
logger.info("fluse_in: %s", fluse_in)
logger.info("fluse_out: %s", os.path.join(self.out_dir, fluse_out))
@@ -464,7 +464,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir):
# specify file
fsurf_in = os.path.join(indir, file)
- fsurf_out = add_tag_to_filename(fsurf_in, self.tag)
+ fsurf_out = add_tag_to_filename(fsurf_in, self.tag, replace_res=True)
logger.info("fsurf_in: %s", fsurf_in)
logger.info("fsurf_out: %s", os.path.join(self.out_dir, fsurf_out))
diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py
index 7a33d9c2fa..34a9e583d0 100644
--- a/python/ctsm/subset_data.py
+++ b/python/ctsm/subset_data.py
@@ -2,10 +2,10 @@
|------------------------------------------------------------------|
|--------------------- Instructions -----------------------------|
|------------------------------------------------------------------|
-Instructions for running on Cheyenne/Casper:
-load the following into your local environment
- module load python
- ncar_pylib
+Instructions for running using conda python environments:
+
+../../py_env_create
+conda activate ctsm_py
-------------------------------------------------------------------
To see the available options for single point or regional cases:
./subset_data.py --help
@@ -67,6 +67,7 @@
from ctsm.site_and_regional.regional_case import RegionalCase
from ctsm.args_utils import plon_type, plat_type
from ctsm.path_utils import path_to_ctsm_root
+from ctsm.utils import abort
# -- import ctsm logging flags
from ctsm.ctsm_logging import (
@@ -314,6 +315,14 @@ def get_parser():
action="store_true",
dest="overwrite",
)
+ subparser.add_argument(
+ "--inputdata-dir",
+ help="Top level path to the CESM inputdata directory.",
+ action="store",
+ dest="inputdatadir",
+ type=str,
+ default="defaults.cfg",
+ )
add_logging_args(subparser)
# -- print help for both subparsers
@@ -381,10 +390,19 @@ def setup_files(args, defaults, cesmroot):
if args.create_user_mods:
setup_user_mods(args.user_mods_dir, cesmroot)
+ if args.inputdatadir == "defaults.cfg":
+ clmforcingindir = defaults.get("main", "clmforcingindir")
+ else:
+ clmforcingindir = args.inputdatadir
+
+ if not os.path.isdir(clmforcingindir):
+ logger.info("clmforcingindir does not exist: %s", clmforcingindir)
+ abort("inputdata directory does not exist")
+
# DATM data
datm_type = "datm_gswp3"
dir_output_datm = "datmdata"
- dir_input_datm = defaults.get(datm_type, "dir")
+ dir_input_datm = os.path.join(clmforcingindir, defaults.get(datm_type, "dir"))
if args.create_datm:
if not os.path.isdir(os.path.join(args.out_dir, dir_output_datm)):
os.mkdir(os.path.join(args.out_dir, dir_output_datm))
@@ -398,14 +416,14 @@ def setup_files(args, defaults, cesmroot):
fluse_in = defaults.get("landuse", "landuse_" + num_pft + "pft")
file_dict = {
- "main_dir": defaults.get("main", "clmforcingindir"),
+ "main_dir": clmforcingindir,
"fdomain_in": defaults.get("domain", "file"),
"fsurf_dir": os.path.join(
- defaults.get("main", "clmforcingindir"),
+ clmforcingindir,
os.path.join(defaults.get("surfdat", "dir")),
),
"fluse_dir": os.path.join(
- defaults.get("main", "clmforcingindir"),
+ clmforcingindir,
os.path.join(defaults.get("landuse", "dir")),
),
"fsurf_in": fsurf_in,
diff --git a/python/ctsm/test/test_sys_lilac_build_ctsm.py b/python/ctsm/test/test_sys_lilac_build_ctsm.py
index f1c5e22f8f..d773749bf7 100755
--- a/python/ctsm/test/test_sys_lilac_build_ctsm.py
+++ b/python/ctsm/test/test_sys_lilac_build_ctsm.py
@@ -27,6 +27,7 @@ class TestSysBuildCtsm(unittest.TestCase):
def setUp(self):
self._tempdir = tempfile.mkdtemp()
+ self.assertTrue(os.path.isdir(self._tempdir))
# Hack around a check in CIME: As of https://github.com/ESMCI/cime/pull/4228, If
# NCAR_HOST is in the environment, CIME checks if the machine you're running on is
@@ -68,6 +69,7 @@ def test_buildSetup_userDefinedMachine_minimalInfo(self):
gmake_j=8,
no_pnetcdf=True,
)
+ self.assertTrue(os.path.isdir(build_dir))
# the critical piece of this test is that the above command doesn't generate any
# errors; however we also do some assertions below
@@ -87,6 +89,7 @@ def test_buildSetup_userDefinedMachine_allInfo(self):
build_dir = os.path.join(self._tempdir, "ctsm_build")
inputdata_path = os.path.realpath(os.path.join(self._tempdir, "my_inputdata"))
os.makedirs(inputdata_path)
+ self.assertTrue(os.path.isdir(inputdata_path))
build_ctsm(
cime_path=_CIME_PATH,
build_dir=build_dir,
@@ -107,6 +110,7 @@ def test_buildSetup_userDefinedMachine_allInfo(self):
build_with_openmp=True,
inputdata_path=os.path.join(self._tempdir, "my_inputdata"),
)
+ self.assertTrue(os.path.isdir(build_dir))
# the critical piece of this test is that the above command doesn't generate any
# errors; however we also do some assertions below
diff --git a/python/ctsm/test/test_unit_subset_data.py b/python/ctsm/test/test_unit_subset_data.py
new file mode 100755
index 0000000000..aa4c412bbb
--- /dev/null
+++ b/python/ctsm/test/test_unit_subset_data.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python3
+"""
+Unit tests for subset_data
+
+You can run this by:
+ python -m unittest test_unit_subset_data.py
+"""
+
+import unittest
+import configparser
+import os
+import sys
+
+# -- add python/ctsm to path (needed if we want to run the test stand-alone)
+_CTSM_PYTHON = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir)
+sys.path.insert(1, _CTSM_PYTHON)
+
+# pylint: disable=wrong-import-position
+from ctsm import unit_testing
+from ctsm.subset_data import get_parser, setup_files
+from ctsm.path_utils import path_to_ctsm_root
+
+# pylint: disable=invalid-name
+
+
+class TestSubsetData(unittest.TestCase):
+ """
+ Basic class for testing SubsetData class in subset_data.py.
+ """
+
+ def setUp(self):
+ sys.argv = ["subset_data", "point"]
+ DEFAULTS_FILE = "default_data.cfg"
+ parser = get_parser()
+ self.args = parser.parse_args()
+ self.cesmroot = path_to_ctsm_root()
+ self.defaults = configparser.ConfigParser()
+ self.defaults.read(os.path.join(self.cesmroot, "tools/site_and_regional", DEFAULTS_FILE))
+
+ def test_inputdata_setup_files_basic(self):
+ """
+ Test
+ """
+ setup_files(self.args, self.defaults, self.cesmroot)
+
+ def test_inputdata_setup_files_inputdata_dne(self):
+ """
+ Test that inputdata directory does not exist
+ """
+ self.defaults.set("main", "clmforcingindir", "/zztop")
+ with self.assertRaisesRegex(SystemExit, "inputdata directory does not exist"):
+ setup_files(self.args, self.defaults, self.cesmroot)
+
+ def test_inputdata_setup_files_bad_inputdata_arg(self):
+ """
+ Test that inputdata directory provided on command line does not exist if it's bad
+ """
+ self.args.inputdatadir = "/zztop"
+ with self.assertRaisesRegex(SystemExit, "inputdata directory does not exist"):
+ setup_files(self.args, self.defaults, self.cesmroot)
+
+
+if __name__ == "__main__":
+ unit_testing.setup_for_tests()
+ unittest.main()
diff --git a/python/ctsm/test/test_unit_utils_add_tag.py b/python/ctsm/test/test_unit_utils_add_tag.py
new file mode 100755
index 0000000000..bef69f6154
--- /dev/null
+++ b/python/ctsm/test/test_unit_utils_add_tag.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python3
+
+"""Unit tests for add_tag_to_filename
+"""
+
+import unittest
+
+from unittest.mock import patch
+from datetime import date
+from ctsm import unit_testing
+
+from ctsm import utils
+
+# Allow names that pylint doesn't like, because otherwise I find it hard
+# to make readable unit test names
+# pylint: disable=invalid-name
+
+
+class TestUtilsAddTag(unittest.TestCase):
+ """Tests of utils: add_tag_to_filename"""
+
+ @staticmethod
+ def _fake_today():
+ """Set the fake date to Halloween"""
+ return date(year=2022, month=10, day=31)
+
+ def testSimple(self):
+ """Simple test of surface dataset name"""
+
+ fsurf_in = "surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c221105.nc"
+ with patch("ctsm.utils.date") as mock_date:
+ mock_date.today.side_effect = self._fake_today
+
+ fsurf_out = utils.add_tag_to_filename(fsurf_in, "tag")
+ fsurf_out2 = utils.add_tag_to_filename(fsurf_in, "tag", replace_res=True)
+
+ expect_fsurf = "surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_tag_c221031.nc"
+ self.assertEqual(expect_fsurf, fsurf_out, "Expect filenames to be as expected")
+ expect_fsurf2 = "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr2000_c221031.nc"
+ self.assertEqual(expect_fsurf2, fsurf_out2, "Expect filenames to be as expected")
+
+ def testSimpleLanduse(self):
+ """Simple test of landuse dataset name"""
+
+ landuse_in = "landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_c190214.nc"
+ with patch("ctsm.utils.date") as mock_date:
+ mock_date.today.side_effect = self._fake_today
+
+ landuse_out = utils.add_tag_to_filename(landuse_in, "tag")
+ landuse_out2 = utils.add_tag_to_filename(landuse_in, "tag", replace_res=True)
+
+ expect_landuse = (
+ "landuse.timeseries_0.9x1.25_hist_78pfts_CMIP6_simyr1850-2015_tag_c221031.nc"
+ )
+ self.assertEqual(expect_landuse, landuse_out, "Expect filenames to be as expected")
+ expect_landuse2 = "landuse.timeseries_tag_hist_78pfts_CMIP6_simyr1850-2015_c221031.nc"
+ self.assertEqual(expect_landuse2, landuse_out2, "Expect filenames to be as expected")
+
+ def testSimpleDatmDomain(self):
+ """Simple test of datm domain dataset name"""
+
+ file_in = "domain.lnd.360x720_gswp3.0v1.c170606.nc"
+ with patch("ctsm.utils.date") as mock_date:
+ mock_date.today.side_effect = self._fake_today
+
+ file_out = utils.add_tag_to_filename(file_in, "tag")
+
+ expect_filename = "domain.lnd.360x720_gswp3.0v1_tag_c221031.nc"
+ self.assertEqual(expect_filename, file_out, "Expect filenames to be as expected")
+
+ def testSimpleDomain(self):
+ """Simple test of domain dataset name"""
+
+ file_in = "domain.lnd.fv0.9x1.25_gx1v7.151020.nc"
+ with patch("ctsm.utils.date") as mock_date:
+ mock_date.today.side_effect = self._fake_today
+
+ file_out = utils.add_tag_to_filename(file_in, "tag")
+
+ expect_filename = "domain.lnd.fv0.9x1.25_gx1v7_tag_c221031.nc"
+ self.assertEqual(expect_filename, file_out, "Expect filenames to be as expected")
+
+ def testSurfReplaceListDomain(self):
+ """Simple test of list of surface dataset name with replace_res option"""
+
+ files_in = [
+ "surfdata_48x96_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc",
+ "surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc",
+ "surfdata_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc",
+ "surfdata_1.9x2.5_hist_16pfts_Irrig_CMIP6_simyr2000_c190304.nc",
+ "surfdata_1.9x2.5_hist_16pfts_Irrig_CMIP6_simyr2000_c190304.nc",
+ "surfdata_4x5_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc",
+ "surfdata_10x15_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc",
+ "surfdata_10x15_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc",
+ "surfdata_0.125nldas2_hist_16pfts_Irrig_CMIP6_simyr2005_c190412.nc",
+ "surfdata_64x128_hist_16pfts_Irrig_CMIP6_simyr2000_c190214.nc",
+ "surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr2000_c190214.nc",
+ "surfdata_1.9x2.5_hist_78pfts_CMIP6_simyr2000_c190304.nc",
+ "surfdata_0.125x0.125_hist_78pfts_CMIP6_simyr2005_c190624.nc",
+ "surfdata_10x15_hist_78pfts_CMIP6_simyr2000_c190214.nc",
+ "surfdata_4x5_hist_78pfts_CMIP6_simyr2000_c190214.nc",
+ "surfdata_1.9x2.5_hist_16pfts_Irrig_CMIP6_simyr1850_c190304.nc",
+ "surfdata_10x15_hist_16pfts_Irrig_CMIP6_simyr1850_c190214.nc",
+ "surfdata_4x5_hist_16pfts_Irrig_CMIP6_simyr1850_c190214.nc",
+ "surfdata_48x96_hist_78pfts_CMIP6_simyr1850_c190214.nc",
+ "surfdata_0.9x1.25_hist_78pfts_CMIP6_simyr1850_c190214.nc",
+ "surfdata_1.9x2.5_hist_78pfts_CMIP6_simyr1850_c190304.nc",
+ "surfdata_10x15_hist_78pfts_CMIP6_simyr1850_c190214.nc",
+ "surfdata_4x5_hist_78pfts_CMIP6_simyr1850_c190214.nc",
+ "surfdata_ne0np4.ARCTICGRIS.ne30x8_hist_78pfts_CMIP6_simyr2000_c200426.nc",
+ "surfdata_C96_hist_78pfts_CMIP6_simyr1850_c200317.nc",
+ "surfdata_C96_hist_78pfts_CMIP6_simyr1850_c20221108.nc",
+ "surfdata_0.9x1.25_hist_16pfts_nourb_CMIP6_simyrPtVg_c181114.nc",
+ ]
+ expect_filenames = [
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr2005_c221031.nc",
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr2005_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr1850_c221031.nc",
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr1850_c221031.nc",
+ "surfdata_tag_hist_16pfts_Irrig_CMIP6_simyr1850_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr1850_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr1850_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr1850_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr1850_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr1850_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr2000_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr1850_c221031.nc",
+ "surfdata_tag_hist_78pfts_CMIP6_simyr1850_c221031.nc",
+ "surfdata_tag_hist_16pfts_nourb_CMIP6_simyrPtVg_c221031.nc",
+ ]
+ self.assertEqual(
+ len(files_in), len(expect_filenames), "length of arrays does not match as expected"
+ )
+ for i, file_in in enumerate(files_in):
+
+ with patch("ctsm.utils.date") as mock_date:
+ mock_date.today.side_effect = self._fake_today
+
+ file_out = utils.add_tag_to_filename(file_in, "tag", replace_res=True)
+
+ self.assertEqual(expect_filenames[i], file_out, "Expect filenames to be as expected")
+
+
+if __name__ == "__main__":
+ unit_testing.setup_for_tests()
+ unittest.main()
diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py
index 2851a3b619..42444e32c5 100644
--- a/python/ctsm/utils.py
+++ b/python/ctsm/utils.py
@@ -4,6 +4,7 @@
import os
import sys
import string
+import re
import pdb
from datetime import date
@@ -43,21 +44,29 @@ def fill_template_file(path_to_template, path_to_final, substitutions):
final_file.write(final_file_contents)
-def add_tag_to_filename(filename, tag):
+def add_tag_to_filename(filename, tag, replace_res=False):
"""
Add a tag and replace timetag of a filename
Expects file to end with [._]cYYMMDD.nc or [._]YYMMDD.nc
+ or with 4-digit years YYYYMMDD.
Add the tag to just before that ending part
and change the ending part to the current time tag.
+ if replace_res is True, then replace the resolution
+ part of the filename. Expects the file to start with
+ [a-z.]_ and then the resolution.
+
Parameters
----------
filename (str) : file name
tag (str) : string of a tag to be added to the end of filename
+ (or to replace the resolution part of the filename)
Raises
------
Error: When it cannot find . and _ in the filename.
+ Error: When it's asked to replace the resolution and
+ can't figure out where that is in the filename.
Returns
------
@@ -69,11 +78,27 @@ def add_tag_to_filename(filename, tag):
if basename[cend] == "c":
cend = cend - 1
if (basename[cend] != ".") and (basename[cend] != "_"):
- err_msg = "Trouble figuring out where to add tag to filename: " + filename
- abort(err_msg)
+ # Check if date stirng at end includes a 4 digit year
+ cend = -12
+ if basename[cend] == "c":
+ cend = cend - 1
+ if (basename[cend] != ".") and (basename[cend] != "_"):
+ err_msg = "Trouble figuring out where to add tag to filename: " + filename
+ abort(err_msg)
today = date.today()
today_string = today.strftime("%y%m%d")
- fname_out = basename[:cend] + "_" + tag + "_c" + today_string + ".nc"
+ if not replace_res:
+ fname_out = basename[:cend] + "_" + tag + "_c" + today_string + ".nc"
+ else:
+ match = re.fullmatch(r"([a-z.]+)_([Cfvnenp0-9x.crunldasA-Z]+)_(.+?)", basename[:cend])
+ if match is not None:
+ fname_out = (
+ match.group(1) + "_" + tag + "_" + match.group(3) + "_c" + today_string + ".nc"
+ )
+ else:
+ abort(
+ "Trouble figuring out where to replace the resolution in the filename: " + filename
+ )
return fname_out
diff --git a/src/biogeochem/CNProductsMod.F90 b/src/biogeochem/CNProductsMod.F90
index 9744b04aed..b6e5d6dad3 100644
--- a/src/biogeochem/CNProductsMod.F90
+++ b/src/biogeochem/CNProductsMod.F90
@@ -615,19 +615,19 @@ subroutine PartitionWoodFluxes(this, bounds, &
if (pprod_tot > 0) then
pprod10_frac = pprod10 / pprod_tot
pprod100_frac = pprod100 / pprod_tot
- else
- ! Avoid divide by 0
- pprod10_frac = 0._r8
- pprod100_frac = 0._r8
+ ! Note that the patch-level fluxes are expressed per unit gridcell area. So, to go
+ ! from patch-level fluxes to gridcell-level fluxes, we simply add up the various
+ ! patch contributions, without having to multiply by any area weightings.
+ this%dwt_prod10_gain_grc(g) = this%dwt_prod10_gain_grc(g) + &
+ dwt_wood_product_gain_patch(p) * pprod10_frac
+ this%dwt_prod100_gain_grc(g) = this%dwt_prod100_gain_grc(g) + &
+ dwt_wood_product_gain_patch(p) * pprod100_frac
+ else if (dwt_wood_product_gain_patch(p) > 0) then
+ call endrun(&
+ msg='ERROR: dwt_wood_product_gain_patch(p) > 0' // &
+ errMsg(sourcefile, __LINE__))
end if
- ! Note that the patch-level fluxes are expressed per unit gridcell area. So, to go
- ! from patch-level fluxes to gridcell-level fluxes, we simply add up the various
- ! patch contributions, without having to multiply by any area weightings.
- this%dwt_prod10_gain_grc(g) = this%dwt_prod10_gain_grc(g) + &
- dwt_wood_product_gain_patch(p) * pprod10_frac
- this%dwt_prod100_gain_grc(g) = this%dwt_prod100_gain_grc(g) + &
- dwt_wood_product_gain_patch(p) * pprod100_frac
end do
end subroutine PartitionWoodFluxes
diff --git a/src/biogeophys/SurfaceAlbedoType.F90 b/src/biogeophys/SurfaceAlbedoType.F90
index cf6b0a518a..dba2938d98 100644
--- a/src/biogeophys/SurfaceAlbedoType.F90
+++ b/src/biogeophys/SurfaceAlbedoType.F90
@@ -188,7 +188,7 @@ subroutine InitHistory(this, bounds)
avgflag='A', long_name='cosine of solar zenith angle', &
ptr_col=this%coszen_col, default='inactive')
- this%albgri_col(begc:endc,:) = spval
+ this%albgrd_col(begc:endc,:) = spval
call hist_addfld2d (fname='ALBGRD', units='proportion', type2d='numrad', &
avgflag='A', long_name='ground albedo (direct)', &
ptr_col=this%albgrd_col, default='inactive')
diff --git a/src/cpl/lilac/lnd_import_export.F90 b/src/cpl/lilac/lnd_import_export.F90
index 2da277dcee..281666c3e7 100644
--- a/src/cpl/lilac/lnd_import_export.F90
+++ b/src/cpl/lilac/lnd_import_export.F90
@@ -571,7 +571,7 @@ subroutine state_getimport(state, fb, fldname, bounds, output, ungridded_index,
end if
! Check for nans
- call check_for_nans(output, trim(fldname), bounds%begg)
+ call check_for_nans(output, trim(fldname), bounds%begg, "output")
end subroutine state_getimport
@@ -657,7 +657,7 @@ subroutine state_setexport(state, fb, fldname, bounds, input, minus, ungridded_i
end if
! check for nans
- call check_for_nans(input, trim(fldname), bounds%begg)
+ call check_for_nans(input, trim(fldname), bounds%begg, "input")
end subroutine state_setexport
diff --git a/src/cpl/mct/lnd_import_export.F90 b/src/cpl/mct/lnd_import_export.F90
index 2c84d2e471..3f7e67af68 100644
--- a/src/cpl/mct/lnd_import_export.F90
+++ b/src/cpl/mct/lnd_import_export.F90
@@ -136,7 +136,7 @@ subroutine lnd_import( bounds, x2l, glc_present, atm2lnd_inst, glc2lnd_inst, wat
! Check for nans from coupler
!--------------------------
- call check_for_nans(x2l(:,i), fname, begg)
+ call check_for_nans(x2l(:,i), fname, begg, "x2l")
end do
@@ -344,7 +344,7 @@ subroutine lnd_export( bounds, waterlnd2atmbulk_inst, lnd2atm_inst, lnd2glc_inst
! Check for nans to coupler
!--------------------------
- call check_for_nans(l2x(:,i), fname, begg)
+ call check_for_nans(l2x(:,i), fname, begg, "l2x")
end do
diff --git a/src/cpl/nuopc/lnd_import_export.F90 b/src/cpl/nuopc/lnd_import_export.F90
index 0e7a5e2eef..340009ccb3 100644
--- a/src/cpl/nuopc/lnd_import_export.F90
+++ b/src/cpl/nuopc/lnd_import_export.F90
@@ -1080,7 +1080,7 @@ subroutine state_getimport_1d(state, fldname, ctsmdata, rc)
do g = 1,size(ctsmdata)
ctsmdata(g) = fldptr1d(g)
end do
- call check_for_nans(ctsmdata, trim(fldname), 1)
+ call check_for_nans(ctsmdata, trim(fldname), 1, "import_1D")
end subroutine state_getimport_1d
@@ -1114,7 +1114,7 @@ subroutine state_getimport_2d(state, fldname, ctsmdata, rc)
do g = 1,size(ctsmdata,dim=1)
ctsmdata(g,n) = fldptr2d(n,g)
end do
- call check_for_nans(ctsmdata(:,n), trim(fldname)//trim(cnum), 1)
+ call check_for_nans(ctsmdata(:,n), trim(fldname)//trim(cnum), 1, "import_2D")
end do
end subroutine state_getimport_2d
@@ -1167,7 +1167,7 @@ subroutine state_setexport_1d(state, fldname, ctsmdata, init_spval, minus, rc)
fldptr1d(g) = ctsmdata(g)
end do
end if
- call check_for_nans(ctsmdata, trim(fldname), 1)
+ call check_for_nans(ctsmdata, trim(fldname), 1, "export_1D")
end subroutine state_setexport_1d
@@ -1222,7 +1222,7 @@ subroutine state_setexport_2d(state, fldname, ctsmdata, init_spval, minus, rc)
fldptr2d(n,g) = ctsmdata(g,n)
end do
end if
- call check_for_nans(ctsmdata(:,n), trim(fldname)//trim(cnum), 1)
+ call check_for_nans(ctsmdata(:,n), trim(fldname)//trim(cnum), 1, "export_2D")
end do
end subroutine state_setexport_2d
diff --git a/src/cpl/utils/lnd_import_export_utils.F90 b/src/cpl/utils/lnd_import_export_utils.F90
index 032cb19b6f..4b7941da5b 100644
--- a/src/cpl/utils/lnd_import_export_utils.F90
+++ b/src/cpl/utils/lnd_import_export_utils.F90
@@ -140,12 +140,13 @@ end subroutine check_for_errors
!=============================================================================
- subroutine check_for_nans(array, fname, begg)
+ subroutine check_for_nans(array, fname, begg, direction)
! input/output variables
real(r8) , intent(in) :: array(:)
character(len=*) , intent(in) :: fname
integer , intent(in) :: begg
+ character(len=*) , intent(in) :: direction
! local variables
integer :: i
@@ -161,7 +162,7 @@ subroutine check_for_nans(array, fname, begg)
write(iulog,*) "NaN found in field ", trim(fname), ' at gridcell index ',begg+i-1
end if
end do
- call shr_sys_abort(' ERROR: One or more of the output from CLM to the coupler are NaN ' )
+ call shr_sys_abort(' ERROR: One or more of the CTSM cap '//direction//' fields are NaN ' )
end if
end subroutine check_for_nans
diff --git a/test/tools/nl_files/modify_data_YELL b/test/tools/nl_files/modify_data_YELL
index e76322cdeb..159c92ae63 100644
--- a/test/tools/nl_files/modify_data_YELL
+++ b/test/tools/nl_files/modify_data_YELL
@@ -1 +1 @@
---neon_site YELL --surf_dir CSMDATA/lnd/clm2/surfdata_map/NEON --out_dir EXEDIR
+--neon_site YELL --surf_dir CSMDATA/lnd/clm2/surfdata_map/NEON --out_dir EXEDIR --inputdata-dir CSMDATA
diff --git a/test/tools/nl_files/run_neon_OSBS b/test/tools/nl_files/run_neon_OSBS
index c49fb77783..0c274b13ad 100644
--- a/test/tools/nl_files/run_neon_OSBS
+++ b/test/tools/nl_files/run_neon_OSBS
@@ -1 +1 @@
---verbose --run-type ad --setup-only
+--verbose --run-type ad --setup-only --neon-site OSBS
diff --git a/test/tools/nl_files/subset_data_KONA b/test/tools/nl_files/subset_data_KONA
index cb743f2b45..c3be007869 100644
--- a/test/tools/nl_files/subset_data_KONA
+++ b/test/tools/nl_files/subset_data_KONA
@@ -1 +1 @@
-point --lon 263.38956 --lat 39.1082 --site KONA --dompft 17 19 23 45 --pctpft 28 12 32 28 --crop --create-domain --create-surface --outdir EXEDIR/KONA_user-mod_and_data --user-mods-dir EXEDIR/KONA_user-mod_and_data --verbose
+point --lon 263.38956 --lat 39.1082 --site KONA --dompft 17 19 23 45 --pctpft 28 12 32 28 --crop --create-domain --create-surface --outdir EXEDIR/KONA_user-mod_and_data --user-mods-dir EXEDIR/KONA_user-mod_and_data --verbose --inputdata-dir CSMDATA
diff --git a/test/tools/nl_files/subset_data_US-UMB b/test/tools/nl_files/subset_data_US-UMB
index 499b5f53fd..935b0dc99d 100644
--- a/test/tools/nl_files/subset_data_US-UMB
+++ b/test/tools/nl_files/subset_data_US-UMB
@@ -1 +1 @@
-point --lon 275.28626 --lat 45.5598 --site 1x1_US-UMB --dompft 7 --cap-saturation --uniform-snowpack --create-surface --outdir EXEDIR/US-UMB_user-mod_and_data --user-mods-dir EXEDIR/US-UMB_user-mod_and_data --verbose
+point --lon 275.28626 --lat 45.5598 --site 1x1_US-UMB --dompft 7 --cap-saturation --uniform-snowpack --create-surface --outdir EXEDIR/US-UMB_user-mod_and_data --user-mods-dir EXEDIR/US-UMB_user-mod_and_data --verbose --inputdata-dir CSMDATA
diff --git a/test/tools/nl_files/subset_data_YELL b/test/tools/nl_files/subset_data_YELL
index 5e142713df..8295830c25 100644
--- a/test/tools/nl_files/subset_data_YELL
+++ b/test/tools/nl_files/subset_data_YELL
@@ -1 +1 @@
-point --lon 250.45804 --lat 44.95597 --site YELL --dompft 1 --crop --create-domain --create-surface --outdir EXEDIR/YELL_user-mod_and_data --user-mods-dir EXEDIR/YELL_user-mod_and_data --verbose
+point --lon 250.45804 --lat 44.95597 --site YELL --dompft 1 --crop --create-domain --create-surface --outdir EXEDIR/YELL_user-mod_and_data --user-mods-dir EXEDIR/YELL_user-mod_and_data --verbose --inputdata-dir CSMDATA
diff --git a/test/tools/nl_files/subset_data_f09_US_pt b/test/tools/nl_files/subset_data_f09_US_pt
index 4acdfeabd4..bf6d5e2861 100644
--- a/test/tools/nl_files/subset_data_f09_US_pt
+++ b/test/tools/nl_files/subset_data_f09_US_pt
@@ -1 +1 @@
-point --lon 257.5 --lat 43.822 --site 1x1_ --include-nonveg --crop --create-landuse --create-datm --create-user-mods --datm-syr 2000 --datm-eyr 2000 --create-surface --outdir EXEDIR/f09_US_pt_user-mod_and_data --user-mods-dir EXEDIR/f09_US_pt_user-mod_and_data --verbose
+point --lon 257.5 --lat 43.822 --site 1x1_ --include-nonveg --crop --create-landuse --create-datm --create-user-mods --datm-syr 2000 --datm-eyr 2000 --create-surface --outdir EXEDIR/f09_US_pt_user-mod_and_data --user-mods-dir EXEDIR/f09_US_pt_user-mod_and_data --verbose --inputdata-dir CSMDATA
diff --git a/test/tools/nl_files/subset_data_region1 b/test/tools/nl_files/subset_data_region1
index c1c5607239..fce83f0e2e 100644
--- a/test/tools/nl_files/subset_data_region1
+++ b/test/tools/nl_files/subset_data_region1
@@ -1 +1 @@
-region --lat1 -40 --lat2 15 --lon1 275 --lon2 330 --create-domain --create-surface --create-landuse --verbose --overwrite --reg test1
+region --lat1 -40 --lat2 15 --lon1 275 --lon2 330 --create-domain --create-surface --create-landuse --verbose --overwrite --reg test1 --inputdata-dir CSMDATA
diff --git a/test/tools/test_driver.sh b/test/tools/test_driver.sh
index 1b3c141d79..ce501f980a 100755
--- a/test/tools/test_driver.sh
+++ b/test/tools/test_driver.sh
@@ -54,8 +54,6 @@ module load nco
module load ncl
module load conda
-$CESMDATAROOT/py_env_create
-conda activate ctsm_py
##omp threads
@@ -119,8 +117,6 @@ module load openmpi
module load nco
module load conda
module load ncl
-$CESMDATAROOT/py_env_create
-conda activate ctsm_py
##omp threads
@@ -220,8 +216,6 @@ module load compiler/intel
module load tool/nco
module load tool/netcdf
module load lang/python
-$CESMDATAROOT/py_env_create
-conda activate ctsm_py
export NETCDF_DIR=\$NETCDF_PATH
export INC_NETCDF=\${NETCDF_PATH}/include
@@ -303,8 +297,6 @@ module load compiler/intel
module load tool/nco
module load tool/netcdf
module load lang/python
-$CESMDATAROOT/py_env_create
-conda activate ctsm_py
export NETCDF_DIR=\$NETCDF_PATH
export INC_NETCDF=\${NETCDF_PATH}/include
@@ -380,6 +372,10 @@ else
fi
fi
+# Setup conda environement
+\$CLM_ROOT/py_env_create
+conda activate ctsm_py
+
##output files
clm_log=\${initdir}/td.\${JOBID}.log
if [ -f \$clm_log ]; then
diff --git a/tools/site_and_regional/default_data.cfg b/tools/site_and_regional/default_data.cfg
index f689c99044..7e841dca54 100644
--- a/tools/site_and_regional/default_data.cfg
+++ b/tools/site_and_regional/default_data.cfg
@@ -2,7 +2,7 @@
clmforcingindir = /glade/p/cesmdata/inputdata
[datm_gswp3]
-dir = /glade/p/cgd/tss/CTSM_datm_forcing_data/atm_forcing.datm7.GSWP3.0.5d.v1.c170516
+dir = atm/datm7/atm_forcing.datm7.GSWP3.0.5d.v1.c170516
domain = domain.lnd.360x720_gswp3.0v1.c170606.nc
solardir = Solar
precdir = Precip
diff --git a/tools/site_and_regional/modify_singlept_site_neon.py b/tools/site_and_regional/modify_singlept_site_neon.py
index 3610b0bb5c..2798f463ec 100755
--- a/tools/site_and_regional/modify_singlept_site_neon.py
+++ b/tools/site_and_regional/modify_singlept_site_neon.py
@@ -16,14 +16,11 @@
- Modify surface dataset with downloaded data.
-------------------------------------------------------------------
-Instructions for running on Cheyenne/Casper:
+Instructions for running using conda python environments:
-load the following into your local environment
- module load python
- ncar_pylib
+../../py_env_create
+conda activate ctsm_py
-To remove NPL from your environment on Cheyenne/Casper:
- deactivate
-------------------------------------------------------------------
To see the available options:
./modify_singlept_site_neon.py --help
@@ -50,6 +47,7 @@
import numpy as np
import pandas as pd
import xarray as xr
+from packaging import version
from datetime import date
from getpass import getuser
@@ -152,6 +150,18 @@ def get_parser():
required=False,
default="/glade/scratch/" + myname + "/single_point_neon_updated/",
)
+ parser.add_argument(
+ "--inputdata-dir",
+ help="""
+ Directory to write updated single point surface dataset.
+ [default: %(default)s]
+ """,
+ action="store",
+ dest="inputdatadir",
+ type=str,
+ required=False,
+ default="/glade/p/cesmdata/cseg/inputdata"
+ )
parser.add_argument(
"-d",
"--debug",
@@ -250,8 +260,7 @@ def find_surffile(surf_dir, site_name):
surf_file (str): name of the surface dataset file
"""
- # sf_name = "surfdata_hist_16pfts_Irrig_CMIP6_simyr2000_"+site_name+"*.nc"
- sf_name = "surfdata_*hist_78pfts_CMIP6_simyr2000_" + site_name + "*.nc"
+ sf_name = "surfdata_1x1_NEON_"+site_name+"*hist_78pfts_CMIP6_simyr2000_*.nc"
print (os.path.join(surf_dir , sf_name))
surf_file = sorted(glob.glob(os.path.join(surf_dir , sf_name)))
@@ -265,15 +274,15 @@ def find_surffile(surf_dir, site_name):
surf_file = surf_file[0]
else:
sys.exit(
- "Surface data for this site " + site_name + "was not found:" + surf_file,
- ".",
- "\n",
- "Please run ./subset_data.py for this site.",
+ "Surface data for this site " + str(site_name) + " was not found:" + str(surf_dir) + str(sf_name) +
+ "." +
+ "\n" +
+ "Please run ./subset_data.py for this site."
)
return surf_file
-def find_soil_structure(surf_file):
+def find_soil_structure(args, surf_file):
"""
Function for finding surface dataset soil
strucutre using surface data metadata.
@@ -302,7 +311,7 @@ def find_soil_structure(surf_file):
print("------------")
# print (f1.attrs["Soil_texture_raw_data_file_name"])
- clm_input_dir = "/glade/p/cesmdata/cseg/inputdata/lnd/clm2/rawdata/"
+ clm_input_dir = os.path.join( args.inputdatadir, "lnd/clm2/rawdata/" )
surf_soildepth_file = os.path.join(
clm_input_dir, f1.attrs["Soil_texture_raw_data_file_name"]
)
@@ -510,6 +519,12 @@ def main():
if args.debug:
logging.basicConfig(level=logging.DEBUG)
+ # Check if pandas is a recent enough version
+ pdvers = pd.__version__
+ if version.parse(pdvers) < version.parse("1.1.0"):
+ sys.exit("The pandas version in your python environment is too old, update to a newer version of pandas (>=1.1.0): version=%s", pdvers )
+
+
file_time = check_neon_time()
# -- specify site from which to extract data
@@ -538,7 +553,7 @@ def main():
f1 = xr.open_dataset(surf_file)
# -- Find surface dataset soil depth information
- soil_bot, soil_top = find_soil_structure(surf_file)
+ soil_bot, soil_top = find_soil_structure(args, surf_file)
# -- Find surface dataset soil levels
# TODO: how? NS uses metadata on file to find
@@ -682,7 +697,7 @@ def main():
print("Updating PCT_NAT_PFT")
#print (f2.PCT_NAT_PFT)
print(f2.PCT_NAT_PFT.values[0])
- f2.PCT_NAT_PFT.values[0] = [[100.0]]
+ #f2.PCT_NAT_PFT.values[0] = [[100.0]]
print(f2.PCT_NAT_PFT[0].values)
out_dir = args.out_dir
diff --git a/tools/site_and_regional/neon_s3_upload b/tools/site_and_regional/neon_gcs_upload
similarity index 80%
rename from tools/site_and_regional/neon_s3_upload
rename to tools/site_and_regional/neon_gcs_upload
index 447886e936..40afef8e74 100755
--- a/tools/site_and_regional/neon_s3_upload
+++ b/tools/site_and_regional/neon_gcs_upload
@@ -9,8 +9,7 @@ import os, sys
_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..","..",'python'))
sys.path.insert(1, _CTSM_PYTHON)
-import boto3
-from botocore.exceptions import ClientError
+from google.cloud import storage
import glob
import datetime
from ctsm import add_cime_to_path
@@ -60,7 +59,7 @@ def get_parser(args, description, valid_neon_sites):
dest="file_date",
required = False,
type = datetime.date.fromisoformat,
- default = datetime.datetime.strptime("0268-01-01",'%Y-%m-%d'))
+ default = datetime.datetime.strptime("0318-01-01",'%Y-%m-%d'))
parser.add_argument('--upload-finidat',
@@ -97,28 +96,24 @@ def get_parser(args, description, valid_neon_sites):
return neon_sites, args.output_root, args.file_date, args.upload_finidat, args.upload_history
-def upload_file(file_name, bucket, object_name=None):
- """Upload a file to an S3 bucket
+def upload_blob(bucket_name, source_file_name, destination_blob_name):
+ """Uploads a file to the bucket."""
+ # The ID of your GCS bucket
+ # bucket_name = "your-bucket-name"
+ # The path to your file to upload
+ # source_file_name = "local/path/to/file"
+ # The ID of your GCS object
+ # destination_blob_name = "storage-object-name"
- :param file_name: File to upload
- :param bucket: Bucket to upload to
- :param object_name: S3 object name. If not specified then file_name is used
- :return: True if file was uploaded, else False
- """
+ storage_client = storage.Client()
+ bucket = storage_client.bucket(bucket_name)
+ blob = bucket.blob(destination_blob_name)
- # If S3 object_name was not specified, use file_name
- if object_name is None:
- object_name = os.path.basename(file_name)
+ blob.upload_from_filename(source_file_name)
- # Upload the file
- s3_client = boto3.client('s3')
- try:
- logger.info("Uploading file {} to {}".format(file_name, object_name))
- response = s3_client.upload_file(file_name, bucket, object_name)
- except ClientError as e:
- logger.error(e)
- return False
- return True
+ print(
+ f"File {source_file_name} uploaded to {destination_blob_name}."
+ )
def main(description):
"""
@@ -126,10 +121,10 @@ def main(description):
from there,
"""
- if not os.path.isfile(os.path.join(os.getenv("HOME"),".aws","credentials")):
- raise FileNotFoundError("User account must have valid aws credentials to run this script.")
-
cesmroot = path_to_ctsm_root()
+ os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = os.path.join(os.environ["HOME"],'.uploadGCSkey.json')
+ #os.path.join(os.environ["HOME"],"gcwriter")
+
# Get the list of supported neon sites from usermods
valid_neon_sites = glob.glob(os.path.join(cesmroot,"cime_config","usermods_dirs","NEON","[!d]*"))
valid_neon_sites = [v.split('/')[-1] for v in valid_neon_sites]
@@ -149,8 +144,8 @@ def main(description):
logger.warning("Could not find file {}".format(finidat_file))
continue
newfile = basefile.replace(".postad.",".{}.".format(filedatestamp))
+ upload_blob("neon-ncar-artifacts", finidat_file, os.path.join("NEON","lnd","ctsm","initdata",newfile) )
- upload_file(finidat_file, 'neon-ncar-transfer', os.path.join("NEON","lnd","ctsm","initdata",newfile))
if upload_history:
logger.info("Upload history for {}".format(site))
case_path = os.path.join(output_root, site+".transient")
@@ -161,7 +156,7 @@ def main(description):
archive_dir = os.path.join(case.get_value("DOUT_S_ROOT"),"lnd","hist")
for histfile in glob.iglob(archive_dir + "/*.h1.*"):
newfile = os.path.basename(histfile)
- upload_file(histfile, 'neon-ncar-transfer', os.path.join("NEON","archive",site,"lnd","hist",newfile))
+ upload_blob("neon-ncar-artifacts", histfile, os.path.join("NEON","archive",site,"lnd","hist",newfile))
diff --git a/tools/site_and_regional/neon_sites_dompft.csv b/tools/site_and_regional/neon_sites_dompft.csv
index ad5d14e53d..e67d67375b 100644
--- a/tools/site_and_regional/neon_sites_dompft.csv
+++ b/tools/site_and_regional/neon_sites_dompft.csv
@@ -1,48 +1,48 @@
,Site,Domain,Lat,Lon,pft,start_year,end_year
-1,BART,1,44.06516,-71.28834,7,2018,2021
-2,HARV,1,42.53562,-72.17562,7,2018,2021
-3,BLAN,2,39.06044,-78.07115,7,2018,2021
-4,SCBI,2,38.89209,-78.13764,7,2018,2021
-5,SERC,2,38.89124,-76.55884,7,2018,2021
-6,DSNY,3,28.12919,-81.43394,14,2018,2021
-7,JERC,3,31.19608,-84.46647,1,2018,2021
-8,OSBS,3,29.68819,-81.99345,1,2018,2021
-9,GUAN,4,17.96882,-66.86888,6,2019,2021
-10,LAJA,4,18.02184,-67.07608,14,2019,2021
-11,STEI,5,45.5076,-89.5888,7,2018,2021
-12,TREE,5,45.49266,-89.58748,7,2018,2021
-13,UNDE,5,46.14103,-89.3221,7,2018,2021
-14,KONA,6,39.10828,-96.61044,18,2018,2021
-15,KONZ,6,39.1007,-96.56227,14,2018,2021
-16,UKFS,6,39.04168,-95.20495,7,2018,2021
-17,GRSM,7,35.68839,-83.50185,7,2018,2021
-18,MLBS,7,37.37783,-80.52425,7,2018,2019
-19,ORNL,7,35.57525,-84.16581,7,2018,2021
-20,DELA,8,32.54092,-87.80341,7,2018,2021
-21,LENO,8,31.8531,-88.16103,7,2021,2021
-22,TALL,8,32.95106,-87.3941,1,2018,2021
-23,DCFS,9,47.15919,-99.11251,13,2018,2021
-24,NOGP,9,46.76846,-100.91832,13,2018,2021
-25,WOOD,9,47.12833,-99.23907,13,2018,2021
-26,CPER,10,40.81297,-104.74455,14,2018,2021
-27,RMNP,10,40.27707,-105.54524,1,2018,2021
-28,STER,10,40.45984,-103.03008,18,2018,2021
-29,CLBJ,11,33.40143,-97.56725,7,2018,2021
-30,OAES,11,35.41062,-99.06044,14,2018,2021
-31,YELL,12,44.95597,-110.54196,1,2019,2021
-32,MOAB,13,38.25136,-109.38882,14,2018,2020
-33,NIWO,13,40.05236,-105.58324,12,2018,2021
-34,JORN,14,32.59052,-106.84377,14,2018,2021
-35,SRER,14,31.91068,-110.83549,9,2018,2021
-36,ONAQ,15,35.68839,-83.50185,9,2018,2019
+1,BART, 1, 44.06516, -71.28834,7,2018,2021
+2,HARV, 1, 42.53562, -72.17562,7,2018,2021
+3,BLAN, 2, 39.06044, -78.07115,7,2018,2021
+4,SCBI, 2, 38.89209, -78.13764,7,2018,2021
+5,SERC, 2, 38.89124, -76.55884,7,2018,2021
+6,DSNY, 3, 28.12919, -81.43394,14,2018,2021
+7,JERC, 3, 31.19608, -84.46647,1,2018,2021
+8,OSBS, 3, 29.68819, -81.99345,1,2018,2021
+9,GUAN, 4, 17.96882, -66.86888,6,2019,2021
+10,LAJA, 4, 18.02184, -67.07608,14,2019,2021
+11,STEI, 5, 45.50760, -89.58880,7,2018,2021
+12,TREE, 5, 45.49266, -89.58748,7,2018,2021
+13,UNDE, 5, 46.14103, -89.32210,7,2018,2021
+14,KONA, 6, 39.10828, -96.61044,19,2018,2021
+15,KONZ, 6, 39.10070, -96.56227,14,2018,2021
+16,UKFS, 6, 39.04168, -95.20495,7,2018,2021
+17,GRSM, 7, 35.68839, -83.50185,7,2018,2021
+18,MLBS, 7, 37.37783, -80.52425,7,2018,2019
+19,ORNL, 7, 35.57525, -84.16581,7,2018,2021
+20,DELA, 8, 32.54092, -87.80341,7,2018,2021
+21,LENO, 8, 31.85310, -88.16103,7,2021,2021
+22,TALL, 8, 32.95106, -87.39410,1,2018,2021
+23,DCFS, 9, 47.15919, -99.11251,13,2018,2021
+24,NOGP, 9, 46.76846, -100.91832,13,2018,2021
+25,WOOD, 9, 47.12833, -99.23907,13,2018,2021
+26,CPER,10, 40.81297, -104.74455,14,2018,2021
+27,RMNP,10, 40.27707, -105.54524,1,2018,2021
+28,STER,10, 40.45984, -103.03008,19,2018,2021
+29,CLBJ,11, 33.40143, -97.56725,7,2018,2021
+30,OAES,11, 35.41062, -99.06044,14,2018,2021
+31,YELL,12, 44.95597, -110.54196,1,2019,2021
+32,MOAB,13, 38.25136, -109.38882,14,2018,2020
+33,NIWO,13, 40.05236, -105.58324,12,2018,2021
+34,JORN,14, 32.59052, -106.84377,14,2018,2021
+35,SRER,14, 31.91068, -110.83549,9,2018,2021
+36,ONAQ,15, 35.68839, -83.50185,9,2018,2019
37,ABBY,16,45.762378,-122.329672,1,2018,2021
-38,WREF,16,45.81637,-121.95838,1,2019,2021
-39,SJER,17,37.107117,-119.733,13,2019,2021
-40,SOAP,17,37.03269,-119.2621,1,2018,2021
+38,WREF,16, 45.81637, -121.95838,1,2019,2021
+39,SJER,17,37.107117, -119.73300,13,2019,2021
+40,SOAP,17,37.032690, -119.26210,1,2018,2021
41,TEAK,17,37.006472,-119.005758,1,2019,2021
-42,TOOL,17,68.66045,-149.370128,1,2020,2021
+42,TOOL,17, 68.66045,-149.370128,1,2020,2021
43,BARR,18,71.281711,-156.650219,12,2019,2021
-44,BONA,19,65.15333,-147.50194,2,2018,2021
-45,DEJU,19,63.87983,-145.74765,2,2018,2021
-46,HEAL,19,63.8798,-149.21539,12,2018,2021
-47,PUUM,20,19.55309,-155.31731,4,2020,2020
+44,BONA,19, 65.15333, -147.50194,2,2018,2021
+45,DEJU,19, 63.87983, -145.74765,2,2018,2021
+46,HEAL,19, 63.87980, -149.21539,12,2018,2021
+47,PUUM,20, 19.55309, -155.31731,4,2020,2020
diff --git a/tools/site_and_regional/neon_surf_wrapper.py b/tools/site_and_regional/neon_surf_wrapper.py
index df58d3ab36..e02ee2bd53 100755
--- a/tools/site_and_regional/neon_surf_wrapper.py
+++ b/tools/site_and_regional/neon_surf_wrapper.py
@@ -11,10 +11,10 @@
based on the downloaded neon data.
(i.e. modify_singlept_site_neon.py)
-Instructions for running on Cheyenne/Casper:
-load the following into your local environment
- module load python
- ncar_pylib
+Instructions for running using conda python environments:
+
+../../py_env_create
+conda activate ctsm_py
"""
# TODO
@@ -96,9 +96,10 @@ def main():
lon = row['Lon']
site = row['Site']
pft = row['pft']
+ clmsite = "1x1_NEON_"+site
print ("Now processing site :", site)
- command = ['./subset_data','point','--lat',str(lat),'--lon',str(lon),'--site',site,'--dompft',str(pft),'--crop',
- '--create-surface','--uniform-snowpack','--cap-saturation','--verbose']
+ command = ['./subset_data','point','--lat',str(lat),'--lon',str(lon),'--site',clmsite,'--dompft',str(pft),'--crop',
+ '--create-surface','--uniform-snowpack','--cap-saturation','--verbose','--overwrite']
execute(command)
command = ['./modify_singlept_site_neon.py','--neon_site',site, '--surf_dir',
diff --git a/tools/site_and_regional/run_neon.py b/tools/site_and_regional/run_neon.py
index e0aae36274..50f0640d03 100755
--- a/tools/site_and_regional/run_neon.py
+++ b/tools/site_and_regional/run_neon.py
@@ -26,14 +26,10 @@
4) Build and submit the case.
-------------------------------------------------------------------
-Instructions for running on Cheyenne/Casper:
-
-load the following into your local environment
- module load python
- ncar_pylib
-
-To remove NPL from your environment on Cheyenne/Casper:
- deactivate
+Instructions for running using conda python environments:
+
+../../py_env_create
+conda activate ctsm_py
-------------------------------------------------------------------
To see the available options:
@@ -83,7 +79,7 @@
import CIME.build as build
from standard_script_setup import *
from CIME.case import Case
-from CIME.utils import safe_copy, expect, symlink_force
+from CIME.utils import safe_copy, expect, symlink_force, run_cmd_no_fail
from argparse import RawTextHelpFormatter
from CIME.locked_files import lock_file, unlock_file
@@ -213,6 +209,7 @@ def get_parser(args, description, valid_neon_sites):
help="""
Start date for running CTSM simulation in ISO format.
[default: %(default)s]
+ (currently non-functional)
""",
action="store",
dest="start_date",
@@ -280,6 +277,8 @@ def get_parser(args, description, valid_neon_sites):
# The transient run length is set by cdeps atm buildnml to the last date of the available tower data
# this value is not used
run_length = "4Y"
+ else:
+ run_length = args.run_length
run_length = parse_isoduration(run_length)
base_case_root = None
@@ -419,6 +418,14 @@ def build_base_case(
print("---- base case setup ------")
case.case_setup()
else:
+ # For existing case check that the compset name is correct
+ existingcompname = case.get_value("COMPSET")
+ match = re.search("^HIST", existingcompname, flags=re.IGNORECASE)
+ if re.search("^HIST", compset, flags=re.IGNORECASE) is None:
+ expect( match == None, "Existing base case is a historical type and should not be -- rerun with the --orverwrite option" )
+ else:
+ expect( match != None, "Existing base case should be a historical type and is not -- rerun with the --orverwrite option" )
+ # reset the case
case.case_setup(reset=True)
case_path = case.get_value("CASEROOT")
@@ -479,6 +486,13 @@ def run_case(
elif rerun:
with Case(case_root, read_only=False) as case:
rundir = case.get_value("RUNDIR")
+ # For existing case check that the compset name is correct
+ existingcompname = case.get_value("COMPSET")
+ match = re.search("^HIST", existingcompname, flags=re.IGNORECASE)
+ if re.search("^HIST", compset, flags=re.IGNORECASE) is None:
+ expect( match == None, "Existing base case is a historical type and should not be -- rerun with the --orverwrite option" )
+ else:
+ expect( match != None, "Existing base case should be a historical type and is not -- rerun with the --orverwrite option" )
if os.path.isfile(os.path.join(rundir, "ESMF_Profile.summary")):
print(
"Case {} appears to be complete, not rerunning.".format(
@@ -507,15 +521,21 @@ def run_case(
# read_only = False should not be required here
with Case(base_case_root, read_only=False) as basecase:
print("---- cloning the base case in {}".format(case_root))
+ #
+ # EBK: 11/05/2022 -- Note keeping the user_mods_dirs argument is important. Although
+ # it causes some of the user_nl_* files to have duplicated inputs. It also ensures
+ # that the shell_commands file is copied, as well as taking care of the DATM inputs.
+ # See https://github.com/ESCOMP/CTSM/pull/1872#pullrequestreview-1169407493
+ #
basecase.create_clone(
case_root, keepexe=True, user_mods_dirs=user_mods_dirs
)
with Case(case_root, read_only=False) as case:
- # in order to avoid the complication of leap years we always set the run_length in units of days.
- case.set_value("STOP_OPTION", "ndays")
- case.set_value("STOP_N", run_length)
- case.set_value("REST_OPTION", "end")
+ if run_type is not "transient":
+ # in order to avoid the complication of leap years we always set the run_length in units of days.
+ case.set_value("STOP_OPTION", "ndays")
+ case.set_value("REST_OPTION", "end")
case.set_value("CONTINUE_RUN", False)
case.set_value("NEONVERSION", version)
if run_type == "ad":
@@ -524,6 +544,8 @@ def run_case(
case.set_value("RUN_REFDATE", "0018-01-01")
case.set_value("RUN_STARTDATE", "0018-01-01")
case.set_value("RESUBMIT", 1)
+ case.set_value("STOP_N", run_length)
+
else:
case.set_value("CLM_FORCE_COLDSTART", "off")
case.set_value("CLM_ACCELERATED_SPINUP", "off")
@@ -531,38 +553,19 @@ def run_case(
if run_type == "postad":
self.set_ref_case(case)
+ case.set_value("STOP_N", run_length)
+ # For transient cases STOP will be set in the user_mod_directory
if run_type == "transient":
if self.finidat:
case.set_value("RUN_TYPE", "startup")
else:
if not self.set_ref_case(case):
return
- case.set_value("STOP_OPTION", "nmonths")
- case.set_value("STOP_N", self.diff_month())
- case.set_value("DATM_YR_ALIGN", self.start_year)
- case.set_value("DATM_YR_START", self.start_year)
- case.set_value("DATM_YR_END", self.end_year)
case.set_value("CALENDAR", "GREGORIAN")
case.set_value("RESUBMIT", 0)
- else:
- # for the spinup we want the start and end on year boundaries
- if self.start_month == 1:
- case.set_value("DATM_YR_ALIGN", self.start_year)
- case.set_value("DATM_YR_START", self.start_year)
- elif self.start_year + 1 <= self.end_year:
- case.set_value("DATM_YR_ALIGN", self.start_year + 1)
- case.set_value("DATM_YR_START", self.start_year + 1)
- if self.end_month == 12:
- case.set_value("DATM_YR_END", self.end_year)
- else:
- case.set_value("DATM_YR_END", self.end_year - 1)
+ case.set_value("STOP_OPTION", "nmonths")
- # Let's no be so clevar with start / end dates
- #case.set_value("DATM_YR_ALIGN", int(args.start_date[0:4]))
- #case.set_value("DATM_YR_START", int(args.start_date[0:4]))
- #case.set_value("DATM_YR_END", int(args.end_date[0:4]))
-
if not rundir:
rundir = case.get_value("RUNDIR")
@@ -620,11 +623,7 @@ def set_ref_case(self, case):
case.set_value("RUN_REFDATE", refdate)
if case_root.endswith(".postad"):
case.set_value("RUN_STARTDATE", refdate)
- else:
- case.set_value(
- "RUN_STARTDATE",
- "{yr:04d}-{mo:02d}-01".format(yr=self.start_year, mo=self.start_month),
- )
+ # NOTE: if start options are set, RUN_STARTDATE should be modified here
return True
def modify_user_nl(self, case_root, run_type, rundir):
@@ -784,7 +783,10 @@ def main(description):
# -- so no need to define a base_case for every site.
res = "CLM_USRDAT"
- compset = "I1PtClm51Bgc"
+ if run_type == "transient":
+ compset = "IHist1PtClm51Bgc"
+ else:
+ compset = "I1PtClm51Bgc"
# -- Looping over neon sites
diff --git a/tools/site_and_regional/subset_data b/tools/site_and_regional/subset_data
index bb582b21f8..162dcf1d4e 100755
--- a/tools/site_and_regional/subset_data
+++ b/tools/site_and_regional/subset_data
@@ -18,13 +18,10 @@ To see all available options for single-point/regional subsetting:
./subset_data --help
----------------------------------------------------------------
-Instructions for running on Cheyenne/Casper:
- load the following into your local environment
- module load python
- ncar_pylib
+Instructions for running using conda python environments:
-To remove from your environment on Cheyenne/Casper:
- deactivate
+../../py_env_create
+conda activate ctsm_py
"""
import os
diff --git a/tools/toolchain/gen_mksurf_namelist b/tools/toolchain/gen_mksurf_namelist
deleted file mode 100755
index ecd225dd19..0000000000
--- a/tools/toolchain/gen_mksurf_namelist
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python
-
-# 2020-11-08 Negin Sobhani
-
-"""
-|------------------------------------------------------------------|
-|--------------------- Instructions -----------------------------|
-|------------------------------------------------------------------|
-This is a just top-level skeleton script that calls
-gen_mksurf_namelist.py.
-The original code (./gen_mksurf_namelist.py) is located under
-python/ctsm folder.
-
-This Python script is part of the simplified toolchain for creating
-the surface dataset for ctsm cases.
-This script should be used as the first step of the new toolchain.
-It will automatically create namelist (control file) that is
-needed for creating surface dataset and requisite intermediate files for
-running CTSM cases.
-For transient cases, it will also create a txt file that includes the
-landuse files for every year.
-
--------------------------------------------------------------------
-Instructions for running on Cheyenne/Casper:
-
-load the following into your local environment:
-
- module load python
- ncar_pylib
--------------------------------------------------------------------
-To see the available options:
- ./gen_mksurf_namelist.py --help
-
-To run the script:
- ./gen_mksurf_namelist.py
-
-To remove NPL(ncar_pylib) from your environment on Cheyenne/Casper:
- deactivate
--------------------------------------------------------------------
-"""
-
-#TODO (NS)
-
-# -[x] Add default values in the help page.
-# -[x] Add info for help page note for end_year -- by default is start_year
-# -[ ] Possibly remove year --years and range options
-# Currently comment them out.
-
-# -[ ] maybe a verbose option and removing debug
-# -[x] --debug mode is not working...
-
-# -[ ] add error check for hi-res and years if they are 1850 and 2005.
-
-# -[x] different path for each range of years for transient cases.
-# default should be picked based on the year. 1850 - 2015 -->
-# /glade/p/cesm/cseg/inputdata/lnd/clm2/rawdata/pftcftdynharv.0.25x0.25.LUH2.histsimyr1850-2015.c170629/
-# 850-1850 -->
-# pftcftdynharv.0.25x0.25.LUH2.histsimyr0850-1849.c171012
-
-# -[ ] hirespft data only for 2005?
-
-# -- Import libraries
-import os
-import sys
-
-# -- add python/ctsm to path
-_CTSM_PYTHON = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python"
- )
-sys.path.insert(1, _CTSM_PYTHON)
-
-from ctsm.gen_mksurf_namelist import main
-
-if __name__ == "__main__":
- main()
-
-