diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index e14ce11599..f2a9d2da8d 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -11,11 +11,16 @@ b88e1cd1b28e3609684c79a2ec0e88f26cfc362b b771971e3299c4fa56534b93421f7a2b9c7282fd 9de88bb57ea9855da408cbec1dc8acb9079eda47 8bc4688e52ea23ef688e283698f70a44388373eb +4ee49e3e516ca7dee5df378f65664f93a7db4415 +0207bc98dd5c75cd69a0e788bc53e41093712f5c +e4d38681df23ccca0ae29581a45f8362574e0630 0a5a9e803b56ec1bbd6232eff1c99dbbeef25eb7 810cb346f05ac1aabfff931ab1a2b7b584add241 5933b0018f8e29413e30dda9b906370d147bad45 # Ran SystemTests and python/ctsm through black python formatter 5364ad66eaceb55dde2d3d598fe4ce37ac83a93c 8056ae649c1b37f5e10aaaac79005d6e3a8b2380 +0bc3f00115d86d026a977918661c93779b3b19f9 540b256d1f3382f4619d7b0877c32d54ce5c40b6 8a168bb0895f4f2421608dd2589398e13a6663e6 +6fccf682eaf718615407d9bacdd3903b8786a03d diff --git a/Externals.cfg b/Externals.cfg index 4a41895631..539995247b 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -8,7 +8,7 @@ required = True local_path = components/cism protocol = git repo_url = https://github.com/ESCOMP/CISM-wrapper -tag = cismwrap_2_1_95 +tag = cismwrap_2_1_96 externals = Externals_CISM.cfg required = True @@ -44,18 +44,18 @@ required = True local_path = cime protocol = git repo_url = https://github.com/ESMCI/cime -tag = cime6.0.125 +tag = cime6.0.175 required = True [cmeps] -tag = cmeps0.14.21 +tag = cmeps0.14.43 protocol = git repo_url = https://github.com/ESCOMP/CMEPS.git local_path = components/cmeps required = True [cdeps] -tag = cdeps1.0.13 +tag = cdeps1.0.24 protocol = git repo_url = https://github.com/ESCOMP/CDEPS.git local_path = components/cdeps @@ -63,7 +63,7 @@ externals = Externals_CDEPS.cfg required = True [cpl7] -tag = cpl77.0.5 +tag = cpl77.0.7 protocol = git repo_url = https://github.com/ESCOMP/CESM_CPL7andDataComps local_path = components/cpl7 @@ -84,7 +84,7 @@ local_path = libraries/mct required = True [parallelio] -tag = pio2_5_10 +tag = pio2_6_2 protocol = git repo_url = https://github.com/NCAR/ParallelIO local_path = libraries/parallelio diff --git a/Externals_CLM.cfg b/Externals_CLM.cfg index cab31b6b7f..5f8e8d2441 100644 --- a/Externals_CLM.cfg +++ b/Externals_CLM.cfg @@ -2,7 +2,7 @@ local_path = src/fates protocol = git repo_url = https://github.com/NGEET/fates -tag = sci.1.68.2_api.30.0.0 +tag = sci.1.68.2_api.31.0.0 required = True [externals_description] diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm index 65cef8b77e..17d64cd59f 100755 --- a/bld/CLMBuildNamelist.pm +++ b/bld/CLMBuildNamelist.pm @@ -613,7 +613,7 @@ sub process_namelist_user_input { process_namelist_commandline_infile($opts, $definition, $nl, $envxml_ref); # Apply the commandline options and make sure the user didn't change it above - process_namelist_commandline_options($opts, $nl_flags, $definition, $defaults, $nl, $physv); + process_namelist_commandline_options($opts, $nl_flags, $definition, $defaults, $nl, $envxml_ref, $physv); # The last two process command line arguments for usr_name and use_case # They require that process_namelist_commandline_options was called before this @@ -634,10 +634,10 @@ sub process_namelist_commandline_options { # Obtain default values for the following build-namelist input arguments # : res, mask, ssp_rcp, sim_year, sim_year_range, and clm_accelerated_spinup. - my ($opts, $nl_flags, $definition, $defaults, $nl, $physv) = @_; + my ($opts, $nl_flags, $definition, $defaults, $nl, $envxml_ref, $physv) = @_; setup_cmdl_chk_res($opts, $defaults); - setup_cmdl_resolution($opts, $nl_flags, $definition, $defaults); + setup_cmdl_resolution($opts, $nl_flags, $definition, $defaults, $envxml_ref); setup_cmdl_mask($opts, $nl_flags, $definition, $defaults, $nl); setup_cmdl_configuration_and_structure($opts, $nl_flags, $definition, $defaults, $nl); setup_cmdl_bgc($opts, $nl_flags, $definition, $defaults, $nl); @@ -668,7 +668,7 @@ sub setup_cmdl_chk_res { } sub setup_cmdl_resolution { - my ($opts, $nl_flags, $definition, $defaults) = @_; + my ($opts, $nl_flags, $definition, $defaults, $envxml_ref) = @_; my $var = "res"; my $val; @@ -686,16 +686,30 @@ sub setup_cmdl_resolution { $val = "e_string( $nl_flags->{'res'} ); if ( ! $definition->is_valid_value( $var, $val ) ) { my @valid_values = $definition->get_valid_values( $var ); - if ( ! defined($opts->{'clm_usr_name'}) || $nl_flags->{'res'} ne $opts->{'clm_usr_name'} ) { + if ( $nl_flags->{'res'} ne "CLM_USRDAT" ) { $log->fatal_error("$var has a value ($val) that is NOT valid. Valid values are: @valid_values"); } } } + if ( $nl_flags->{'res'} eq "CLM_USRDAT" ) { + if ( ! defined($opts->{'clm_usr_name'}) ) { + $log->fatal_error("Resolution is CLM_USRDAT, but --clm_usr_name option is NOT set, and it is required for CLM_USRDAT resolutions"); + } + } + # # For NEON sites - if ($nl_flags->{'res'} =~ /NEON/) { - $nl_flags->{'neon'} = ".true." - } else { - $nl_flags->{'neon'} = ".false." + # + $nl_flags->{'neon'} = ".false."; + $nl_flags->{'neonsite'} = ""; + if ( $nl_flags->{'res'} eq "CLM_USRDAT" ) { + if ( $opts->{'clm_usr_name'} eq "NEON" ) { + $nl_flags->{'neon'} = ".true."; + $nl_flags->{'neonsite'} = $envxml_ref->{'NEONSITE'}; + $log->verbose_message( "This is a NEON site with NEONSITE = " . $nl_flags->{'neonsite'} ); + } + } + if ( ! &value_is_true( $nl_flags->{'neon'} ) ) { + $log->verbose_message( "This is NOT a NEON site" ); } } @@ -1572,7 +1586,7 @@ sub process_namelist_inline_logic { setup_logic_grainproduct($opts, $nl_flags, $definition, $defaults, $nl, $physv); setup_logic_soilstate($opts, $nl_flags, $definition, $defaults, $nl); setup_logic_demand($opts, $nl_flags, $definition, $defaults, $nl); - setup_logic_surface_dataset($opts, $nl_flags, $definition, $defaults, $nl); + setup_logic_surface_dataset($opts, $nl_flags, $definition, $defaults, $nl, $envxml_ref); setup_logic_dynamic_subgrid($opts, $nl_flags, $definition, $defaults, $nl); if ( remove_leading_and_trailing_quotes($nl_flags->{'clm_start_type'}) ne "branch" ) { setup_logic_initial_conditions($opts, $nl_flags, $definition, $defaults, $nl, $physv); @@ -2031,6 +2045,13 @@ sub setup_logic_snicar_methods { sub setup_logic_snow { my ($opts, $nl_flags, $definition, $defaults, $nl) = @_; + add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'snow_thermal_cond_method' ); + + my $var = $nl->get_value('snow_thermal_cond_method'); + if ( $var ne "'Jordan1991'" && $var ne "'Sturm1997'" ) { + $log->fatal_error("$var is incorrect entry for the namelist variable snow_thermal_cond_method; expected Jordan1991 or Sturm1997"); + } + my $numrad_snw = $nl->get_value('snicar_numrad_snw'); add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'fsnowoptics', 'snicar_numrad_snw' => $numrad_snw); @@ -2290,6 +2311,7 @@ sub setup_logic_demand { $settings{'use_lch4'} = $nl_flags->{'use_lch4'}; $settings{'use_nitrif_denitrif'} = $nl_flags->{'use_nitrif_denitrif'}; $settings{'use_crop'} = $nl_flags->{'use_crop'}; + $settings{'neon'} = $nl_flags->{'neon'}; my $demand = $nl->get_value('clm_demand'); if (defined($demand)) { @@ -2342,7 +2364,7 @@ sub setup_logic_surface_dataset { # consistent with it # MUST BE AFTER: setup_logic_demand which is where flanduse_timeseries is set # - my ($opts, $nl_flags, $definition, $defaults, $nl) = @_; + my ($opts, $nl_flags, $definition, $defaults, $nl, $xmlvar_ref) = @_; $nl_flags->{'flanduse_timeseries'} = "null"; my $flanduse_timeseries = $nl->get_value('flanduse_timeseries'); @@ -2367,26 +2389,42 @@ sub setup_logic_surface_dataset { if ( ! &value_is_true($nl_flags->{'use_fates'}) ) { add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var, 'hgrid'=>$nl_flags->{'res'}, 'ssp_rcp'=>$nl_flags->{'ssp_rcp'}, + 'neon'=>$nl_flags->{'neon'}, 'neonsite'=>$nl_flags->{'neonsite'}, 'sim_year'=>$nl_flags->{'sim_year'}, 'irrigate'=>".true.", 'use_vichydro'=>$nl_flags->{'use_vichydro'}, - 'use_crop'=>".true.", 'glc_nec'=>$nl_flags->{'glc_nec'}, 'nofail'=>1); + 'use_crop'=>".true.", 'glc_nec'=>$nl_flags->{'glc_nec'}, 'use_fates'=>$nl_flags->{'use_fates'}, 'nofail'=>1); } # If didn't find the crop version check for the exact match - if ( ! defined($nl->get_value($var) ) ) { + my $fsurdat = $nl->get_value($var); + if ( ! defined($fsurdat) ) { if ( ! &value_is_true($nl_flags->{'use_fates'}) ) { $log->verbose_message( "Crop version of $var NOT found, searching for an exact match" ); } add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var, 'hgrid'=>$nl_flags->{'res'}, 'ssp_rcp'=>$nl_flags->{'ssp_rcp'}, 'use_vichydro'=>$nl_flags->{'use_vichydro'}, - 'sim_year'=>$nl_flags->{'sim_year'}, 'irrigate'=>$nl_flags->{'irrigate'}, + 'sim_year'=>$nl_flags->{'sim_year'}, 'irrigate'=>$nl_flags->{'irrigate'}, 'use_fates'=>$nl_flags->{'use_fates'}, + 'neon'=>$nl_flags->{'neon'}, 'neonsite'=>$nl_flags->{'neonsite'}, 'use_crop'=>$nl_flags->{'use_crop'}, 'glc_nec'=>$nl_flags->{'glc_nec'}, 'nofail'=>1 ); - if ( ! defined($nl->get_value($var) ) ) { + if ( ! defined($fsurdat) ) { $log->verbose_message( "Exact match of $var NOT found, searching for version with irrigate true" ); } add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, $var, 'hgrid'=>$nl_flags->{'res'}, 'ssp_rcp'=>$nl_flags->{'ssp_rcp'}, 'use_vichydro'=>$nl_flags->{'use_vichydro'}, - 'sim_year'=>$nl_flags->{'sim_year'}, 'irrigate'=>".true.", + 'sim_year'=>$nl_flags->{'sim_year'}, 'irrigate'=>".true.", 'use_fates'=>$nl_flags->{'use_fates'}, + 'neon'=>$nl_flags->{'neon'}, 'neonsite'=>$nl_flags->{'neonsite'}, 'use_crop'=>$nl_flags->{'use_crop'}, 'glc_nec'=>$nl_flags->{'glc_nec'} ); } + # + # Expand the XML variables for NEON cases so that NEONSITE will be used + # + if ( &value_is_true($nl_flags->{'neon'}) ) { + my $fsurdat = $nl->get_value($var); + my $newval = SetupTools::expand_xml_var( $fsurdat, $xmlvar_ref ); + if ( $newval ne $fsurdat ) { + my $group = $definition->get_group_name($var); + $nl->set_variable_value($group, $var, $newval); + $log->verbose_message( "This is a NEON site and the fsurdat file selected is: $newval" ); + } + } } #------------------------------------------------------------------------------- @@ -4174,7 +4212,6 @@ sub setup_logic_snowpack { add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'snow_overburden_compaction_method'); add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'lotmp_snowdensity_method'); add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'upplim_destruct_metamorph'); - add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'fresh_snw_rds_max'); add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'reset_snow'); add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'reset_snow_glc'); add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'reset_snow_glc_ela'); diff --git a/bld/env_run.xml b/bld/env_run.xml index 8bf59d0911..f3b7467168 100644 --- a/bld/env_run.xml +++ b/bld/env_run.xml @@ -9,5 +9,6 @@ Sample env_run.xml file that allows build-namelist to be run for testing in this --> + diff --git a/bld/namelist_files/createMapEntry.pl b/bld/namelist_files/createMapEntry.pl index 561683bb05..f9009ba86f 100755 --- a/bld/namelist_files/createMapEntry.pl +++ b/bld/namelist_files/createMapEntry.pl @@ -23,7 +23,7 @@ my $scriptName; ($scriptName = $0) =~ s!(.*)/!!; # get name of script my $cwd = getcwd(); - my $CSMDATA = "/glade/p/cesm/cseg/inputdata"; + my $CSMDATA = "/glade/campaign/cesm/cesmdata/cseg/inputdata"; if ($#ARGV != 0 ) { usage(); diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index 2e57391df7..3997066a0b 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -431,10 +431,6 @@ attributes from the config_cache.xml file (with keys converted to upper-case). 175.d00 175.d00 -54.526d00 -204.526d00 -204.526d00 - 0.08d00 .false. @@ -445,6 +441,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). 1.e9 SwensonLawrence2012 +Jordan1991 -lnd/clm2/paramdata/ctsm51_params.c211112.nc -lnd/clm2/paramdata/clm50_params.c211112.nc -lnd/clm2/paramdata/clm45_params.c211112.nc +lnd/clm2/paramdata/ctsm51_params.c231117.nc +lnd/clm2/paramdata/clm50_params.c231117.nc +lnd/clm2/paramdata/clm45_params.c231117.nc @@ -680,6 +677,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). set up. If more finidat files are added you may need to add more of these. Or one specific file will be chosen over another. --> + hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.true. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. @@ -724,6 +722,11 @@ attributes from the config_cache.xml file (with keys converted to upper-case). >hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. + +hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. + + hgrid=1.9x2.5 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. @@ -822,6 +825,12 @@ attributes from the config_cache.xml file (with keys converted to upper-case). >hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. + +hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.true. glc_nec=10 do_transient_pfts=.false. + + hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. -p + hgrid=ne0np4CONUS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. - +lnd/clm2/initdata_map/clmi.B1850Clm50BgcCrop.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200729.nc + + lnd/clm2/surfdata_map/surfdata_0.9x1.25_hist_16pfts_nourb_CMIP6_simyrPtVg_c181114.nc + + +lnd/clm2/surfdata_map/NEON/16PFT_mixed/surfdata_1x1_NEON_${NEONSITE}_hist_16pfts_Irrig_CMIP6_simyr2000_c230120.nc + +lnd/clm2/surfdata_map/NEON/surfdata_1x1_NEON_${NEONSITE}_hist_78pfts_CMIP6_simyr2000_c230601.nc + + + + + lnd/clm2/surfdata_map/landuse.timeseries_0.9x1.25_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c170824.nc lnd/clm2/surfdata_map/release-clm5.0.18/landuse.timeseries_48x96_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c190214.nc -lnd/clm2/surfdata_map/landuse.timeseries_ne30np4_hist_16pfts_Irrig_CMIP6_simyr1850-2015_c170824.nc +lnd/clm2/surfdata_map/release-clm5.0.30/landuse.timeseries_ne30np4_SSP5-8.5_78pfts_CMIP6_simyr1850-2100_c200426.nc +lnd/clm2/surfdata_map/release-clm5.0.30/landuse.timeseries_ne30np4.pg3_SSP5-8.5_78pfts_CMIP6_simyr1850-2100_c200426.nc lnd/clm2/surfdata_map/landuse.timeseries_ne0np4.ARCTIC.ne30x4_hist_78pfts_CMIP6_simyr1850-2015_c191023.nc +>lnd/clm2/surfdata_map/release-clm5.0.30/landuse.timeseries_ne0np4.ARCTIC.ne30x4_SSP5-8.5_78pfts_CMIP6_simyr1850-2100_c200426.nc diff --git a/bld/namelist_files/namelist_defaults_overall.xml b/bld/namelist_files/namelist_defaults_overall.xml index c4ccac6467..96db00478a 100644 --- a/bld/namelist_files/namelist_defaults_overall.xml +++ b/bld/namelist_files/namelist_defaults_overall.xml @@ -44,8 +44,10 @@ determine default values for namelists. -flanduse_timeseries -flanduse_timeseries +null +null +flanduse_timeseries +flanduse_timeseries diff --git a/bld/namelist_files/namelist_definition_ctsm.xml b/bld/namelist_files/namelist_definition_ctsm.xml index bca1beca6a..97310ebe80 100644 --- a/bld/namelist_files/namelist_definition_ctsm.xml +++ b/bld/namelist_files/namelist_definition_ctsm.xml @@ -2156,7 +2156,7 @@ Land mask description + valid_values="clm4_5_CRUv7,clm4_5_GSWP3v1,clm4_5_cam6.0,clm5_0_cam6.0,clm5_0_CRUv7,clm5_0_GSWP3v1,clm5_1_GSWP3v1,clm5_1_cam6.0"> General configuration of model version and atmospheric forcing to tune the model to run under. This sets the model to run with constants and initial conditions that were set to run well under the configuration of model version and atmospheric forcing. To run well constants would need to be changed @@ -2783,11 +2783,6 @@ Snow compaction overburden exponential factor (1/K) Not used for snow_overburden_compaction_method=Vionnet2012 - -maximum warm (at freezing) fresh snow effective radius [microns] - - If set to .true., then reset the snow pack over non-glacier columns to a small value. @@ -2845,6 +2840,11 @@ NiuYang2007: Niu and Yang 2007 SwensonLawrence2012: Swenson and Lawrence 2012 + +Parameterization to use for snow thermal conductivity + + diff --git a/bld/namelist_files/use_cases/2018-PD_transient.xml b/bld/namelist_files/use_cases/2018-PD_transient.xml index d838efbd00..96f14207ad 100644 --- a/bld/namelist_files/use_cases/2018-PD_transient.xml +++ b/bld/namelist_files/use_cases/2018-PD_transient.xml @@ -1,8 +1,12 @@ + + -Simulate transient land-use, and aerosol deposition changes from 2018 to current day with a mix of historical data, and future scenario data +Simulate transient Nitrogen-deposition, aerosol deposition, urban, and fire related (pop-density, lightning) changes from 2018 to current day with a mix of historical data, and future scenario data +Simulate transient Nitrogen-deposition, aerosol deposition, urban, and fire related (pop-density, lightning) changes from 2018 to current day with a mix of historical data, and future scenario data +Simulate transient urban and aerosol deposition changes from 2018 to current day with a mix of historical data, and future scenario data diff --git a/bld/namelist_files/use_cases/2018_control.xml b/bld/namelist_files/use_cases/2018_control.xml index e5e572d749..28554074c4 100644 --- a/bld/namelist_files/use_cases/2018_control.xml +++ b/bld/namelist_files/use_cases/2018_control.xml @@ -1,5 +1,8 @@ + + + Conditions to simulate 2018 land-use diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl index da4201d68f..4d03124116 100755 --- a/bld/unit_testers/build-namelist_test.pl +++ b/bld/unit_testers/build-namelist_test.pl @@ -42,7 +42,7 @@ sub make_env_run { my %settings = @_; # Set default settings - my %env_vars = ( DIN_LOC_ROOT=>"MYDINLOCROOT", GLC_TWO_WAY_COUPLING=>"FALSE" ); + my %env_vars = ( DIN_LOC_ROOT=>"MYDINLOCROOT", GLC_TWO_WAY_COUPLING=>"FALSE", NEONSITE=>"" ); # Set any settings that came in from function call foreach my $item ( keys(%settings) ) { $env_vars{$item} = $settings{$item}; @@ -139,7 +139,7 @@ sub cat_and_create_namelistinfile { $inputdata_rootdir = $ENV{'CSMDATA'}; } else { # use yellowstone location as default - $inputdata_rootdir="/glade/p/cesm/cseg/inputdata"; + $inputdata_rootdir="/glade/campaign/cesm/cesmdata/cseg/inputdata"; print("WARNING: -csmdata nor CSMDATA are set, using default yellowstone location: $inputdata_rootdir\n"); } @@ -381,7 +381,7 @@ sub cat_and_create_namelistinfile { "JORN", "LAJA", "MOAB", "OAES", "OSBS", "SCBI", "SOAP", "STER", "TOOL", "UNDE", "YELL" ) { - &make_env_run(); + &make_env_run( NEONSITE=>"$site" ); # # Concatonate default usermods and specific sitetogether expanding env variables while doing that # @@ -400,7 +400,7 @@ sub cat_and_create_namelistinfile { # # Now run the site # - my $options = "-res CLM_USRDAT -clm_usr_name NEON -no-megan -bgc bgc -sim_year 2018 -infile $namelistfile"; + my $options = "--res CLM_USRDAT --clm_usr_name NEON --no-megan --bgc bgc --use_case 2018_control --infile $namelistfile"; eval{ system( "$bldnml -envxml_dir . $options > $tempfile 2>&1 " ); }; is( $@, '', "options: $options" ); $cfiles->checkfilesexist( "$options", $mode ); diff --git a/bld/unit_testers/xFail/expectedClmTestFails.xml b/bld/unit_testers/xFail/expectedClmTestFails.xml index 12c954d38b..c7cbfee488 100644 --- a/bld/unit_testers/xFail/expectedClmTestFails.xml +++ b/bld/unit_testers/xFail/expectedClmTestFails.xml @@ -34,19 +34,6 @@ - - - - Doesn't check for valid values - - - - - - - - diff --git a/cime_config/buildnml b/cime_config/buildnml index e239f0ec58..84e1581406 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -136,7 +136,6 @@ def buildnml(case, caseroot, compname): clmusr = "" if lnd_grid == "CLM_USRDAT": clm_usrdat_name = case.get_value("CLM_USRDAT_NAME") - lnd_grid = clm_usrdat_name clmusr = " -clm_usr_name %s " % clm_usrdat_name # Write warning about initial condition data if "NEON" in clm_usrdat_name and clm_force_coldstart == "off": diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index ebe7cd8254..a949a15a17 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -64,7 +64,7 @@ Tuning parameters and initial conditions should be optimized for what CLM model version and what meteorlogical forcing combination? UNSET - clm5_0_cam6.0,clm5_0_GSWP3v1,clm5_0_CRUv7,clm4_5_CRUv7,clm4_5_GSWP3v1,clm4_5_cam6.0,clm5_1_GSWP3v1 + clm5_0_cam6.0,clm5_0_GSWP3v1,clm5_0_CRUv7,clm4_5_CRUv7,clm4_5_GSWP3v1,clm4_5_cam6.0,clm5_1_GSWP3v1,clm5_1_cam6.0 clm4_5_CRUv7 clm4_5_CRUv7 @@ -77,6 +77,7 @@ clm5_0_cam6.0 clm5_0_cam6.0 clm5_1_GSWP3v1 + clm5_1_cam6.0 diff --git a/cime_config/config_pes.xml b/cime_config/config_pes.xml index d6f90167e1..7bccfd96df 100644 --- a/cime_config/config_pes.xml +++ b/cime_config/config_pes.xml @@ -113,6 +113,43 @@ + + + + none + + -1 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + @@ -261,6 +298,80 @@ + + + + none + + -1 + -12 + -12 + -12 + -12 + -12 + -12 + -12 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + + + + + Much lower core count f19 layout, mainly for testing + + -1 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + + + + @@ -303,34 +414,34 @@ none - -4 - -4 - -4 - -4 - -4 - -4 - -4 - -4 + -4 + -4 + -4 + -4 + -4 + -4 + -4 + -4 - 1 - 1 - 1 - 1 - 1 - 1 - 1 - 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 - 0 - 0 - 0 - 0 - 0 - 0 - 0 - 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 @@ -372,6 +483,80 @@ + + + + none + + -1 + -14 + -14 + -14 + -14 + -14 + -14 + -14 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + + + + + Much lower core count f09 layout, mainly for testing + + -1 + -5 + -5 + -5 + -5 + -5 + -5 + -5 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + @@ -557,6 +742,80 @@ + + + + none + + -1 + -5 + -5 + -5 + -5 + -5 + -5 + -5 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + + + + + none + + -1 + -20 + -20 + -20 + -20 + -20 + -20 + -20 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + @@ -1225,6 +1484,44 @@ + + + + none + + -1 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + + + + @@ -1374,6 +1671,80 @@ + + + + none + + -1 + -14 + -14 + -14 + -14 + -14 + -14 + -14 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + + + + + Much lower core count nldas2 layout, mainly for testing + + -1 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + + + + @@ -1524,5 +1895,102 @@ + + + + Need at least 4 nodes to default to normal queue + + -1 + -3 + -3 + -3 + -3 + -3 + -3 + -3 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + + + + + none + + -1 + -7 + -7 + -7 + -7 + -7 + -7 + -7 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + 0 + 0 + 0 + 0 + 0 + 0 + 0 + + + + diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml index b696fadcf2..f5053a7843 100644 --- a/cime_config/testdefs/ExpectedTestFails.xml +++ b/cime_config/testdefs/ExpectedTestFails.xml @@ -28,6 +28,14 @@ --> + + + + FAIL + #2268 + + + @@ -43,51 +51,51 @@ - + FAIL #1733 - + FAIL - #1733 + ESMCI/ccs_config_cesm#131 - - + + FAIL - #2236 + ESMCI/ccs_config_cesm#130 - - - - - + + + FAIL - #667 + ESMCI/ccs_config_cesm#130 - - + + FAIL - FATES#897 + ESMCI/ccs_config_cesm#130 - - - + + + FAIL - FATES#897 + ESMCI/ccs_config_cesm#130 - + + + FAIL FATES#701 @@ -101,16 +109,44 @@ - + + + FAIL + FATES#701 + + + + PEND #1045 + + + PEND + #1045 + + + + + + FAIL + ESMCI/ccs_config_cesm#130 + + + + + + FAIL + ESMCI/ccs_config_cesm#130 + + + - FAIL + PEND FATES#983 This job should time out on izumi, seems to be hanging on history output. @@ -123,5 +159,11 @@ + + + FAIL + FATES#1089 + + diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index a4e1b3fa00..8d50096d95 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -3,6 +3,7 @@ + @@ -12,23 +13,18 @@ + + - - - - - - - - - + + @@ -38,6 +34,7 @@ + @@ -48,14 +45,82 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -65,6 +130,7 @@ + @@ -74,6 +140,7 @@ + @@ -90,6 +157,7 @@ + @@ -99,6 +167,7 @@ + @@ -108,6 +177,7 @@ + @@ -117,6 +187,7 @@ + @@ -126,6 +197,7 @@ + @@ -136,6 +208,8 @@ + + @@ -179,6 +253,7 @@ + @@ -194,6 +269,15 @@ + + + + + + + + + @@ -203,6 +287,15 @@ + + + + + + + + + @@ -211,6 +304,14 @@ + + + + + + + + @@ -219,9 +320,18 @@ + + + + + + + + + @@ -231,6 +341,8 @@ + + @@ -239,6 +351,7 @@ + @@ -247,6 +360,7 @@ + @@ -256,6 +370,7 @@ + @@ -264,6 +379,7 @@ + @@ -272,6 +388,7 @@ + @@ -281,6 +398,8 @@ + + @@ -295,9 +414,19 @@ + + + + + + + + + + @@ -307,6 +436,7 @@ + @@ -316,6 +446,7 @@ + @@ -339,9 +470,19 @@ + + + + + + + + + + @@ -351,6 +492,7 @@ + @@ -360,6 +502,7 @@ + @@ -369,6 +512,7 @@ + @@ -378,6 +522,7 @@ + @@ -387,6 +532,8 @@ + + @@ -395,6 +542,7 @@ + @@ -405,6 +553,8 @@ + + @@ -413,6 +563,7 @@ + @@ -421,6 +572,7 @@ + @@ -430,6 +582,8 @@ + + @@ -438,6 +592,7 @@ + @@ -447,6 +602,7 @@ + @@ -456,6 +612,7 @@ + @@ -479,9 +636,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -515,6 +700,17 @@ + + + + + + + + + + + @@ -523,6 +719,14 @@ + + + + + + + + @@ -532,7 +736,16 @@ - + + + + + + + + + + @@ -558,6 +771,14 @@ + + + + + + + + @@ -566,6 +787,14 @@ + + + + + + + + @@ -574,6 +803,14 @@ + + + + + + + + @@ -583,6 +820,15 @@ + + + + + + + + + @@ -592,6 +838,15 @@ + + + + + + + + + @@ -601,6 +856,15 @@ + + + + + + + + + @@ -610,10 +874,21 @@ + + + + + + + + + + + @@ -632,6 +907,7 @@ + @@ -641,6 +917,8 @@ + + @@ -649,6 +927,7 @@ + @@ -657,6 +936,7 @@ + @@ -673,6 +953,7 @@ + @@ -682,6 +963,7 @@ + @@ -721,9 +1003,19 @@ + + + + + + + + + + @@ -733,6 +1025,7 @@ + @@ -742,6 +1035,7 @@ + @@ -757,6 +1051,15 @@ + + + + + + + + + @@ -765,6 +1068,14 @@ + + + + + + + + @@ -774,6 +1085,15 @@ + + + + + + + + + @@ -783,9 +1103,19 @@ + + + + + + + + + + @@ -795,6 +1125,7 @@ + @@ -804,6 +1135,7 @@ + @@ -813,6 +1145,7 @@ + @@ -822,6 +1155,7 @@ + @@ -830,6 +1164,7 @@ + @@ -839,6 +1174,7 @@ + @@ -855,6 +1191,16 @@ + + + + + + + + + + @@ -865,6 +1211,16 @@ + + + + + + + + + + @@ -875,6 +1231,16 @@ + + + + + + + + + + @@ -884,6 +1250,15 @@ + + + + + + + + + @@ -892,6 +1267,14 @@ + + + + + + + + @@ -901,6 +1284,15 @@ + + + + + + + + + @@ -911,6 +1303,16 @@ + + + + + + + + + + @@ -919,6 +1321,14 @@ + + + + + + + + @@ -928,6 +1338,15 @@ + + + + + + + + + @@ -938,6 +1357,16 @@ + + + + + + + + + + @@ -948,9 +1377,20 @@ + + + + + + + + + + + @@ -965,9 +1405,19 @@ + + + + + + + + + + @@ -977,6 +1427,7 @@ + @@ -986,6 +1437,7 @@ + @@ -995,6 +1447,7 @@ + @@ -1010,12 +1463,25 @@ + + + + + + + + + + + + + @@ -1024,6 +1490,7 @@ + @@ -1033,6 +1500,7 @@ + @@ -1041,6 +1509,7 @@ + @@ -1050,6 +1519,8 @@ + + @@ -1067,6 +1538,7 @@ + @@ -1121,6 +1593,7 @@ + @@ -1142,12 +1615,30 @@ - + + + + + + + + + + + + + + + + + + + - + - + @@ -1163,35 +1654,45 @@ - + + + + + + + + + + + - + - + - - - + + - + - + + - + @@ -1217,6 +1718,7 @@ + @@ -1232,9 +1734,19 @@ + + + + + + + + + + @@ -1245,6 +1757,8 @@ + + @@ -1255,6 +1769,8 @@ + + @@ -1264,6 +1780,8 @@ + + @@ -1288,10 +1806,21 @@ + + + + + + + + + + + @@ -1302,6 +1831,7 @@ + @@ -1312,6 +1842,8 @@ + + @@ -1320,6 +1852,7 @@ + @@ -1328,6 +1861,7 @@ + @@ -1338,6 +1872,7 @@ + @@ -1347,6 +1882,7 @@ + @@ -1356,6 +1892,7 @@ + @@ -1365,6 +1902,7 @@ + @@ -1374,6 +1912,7 @@ + @@ -1383,6 +1922,7 @@ + @@ -1392,6 +1932,7 @@ + @@ -1401,6 +1942,7 @@ + @@ -1410,6 +1952,7 @@ + @@ -1418,10 +1961,13 @@ - + + + + @@ -1439,6 +1985,12 @@ + + + @@ -1467,6 +2019,16 @@ + + + + + + + + + + @@ -1481,6 +2043,10 @@ + + + + @@ -1491,6 +2057,9 @@ + + + @@ -1499,6 +2068,7 @@ + @@ -1508,6 +2078,8 @@ + + @@ -1526,15 +2098,21 @@ + + - + + + @@ -1546,6 +2124,10 @@ + + @@ -1556,6 +2138,10 @@ + + @@ -1575,9 +2161,26 @@ + + + + + + + + + + + + + + @@ -1591,6 +2194,12 @@ + + + @@ -1601,6 +2210,10 @@ + + + + @@ -1611,6 +2224,7 @@ + @@ -1628,23 +2242,23 @@ - - - - - - + + + + + + - - - - - - + + + + + + @@ -1659,6 +2273,7 @@ + @@ -1667,9 +2282,11 @@ + + @@ -1684,6 +2301,8 @@ + + @@ -1693,6 +2312,7 @@ + @@ -1719,6 +2339,8 @@ + + @@ -1728,6 +2350,7 @@ + @@ -1737,6 +2360,7 @@ + @@ -1746,6 +2370,7 @@ + @@ -1765,6 +2390,7 @@ + @@ -1775,6 +2401,8 @@ + + @@ -1801,6 +2429,7 @@ + @@ -1810,6 +2439,7 @@ + @@ -1818,6 +2448,7 @@ + @@ -1827,6 +2458,8 @@ + + @@ -1835,6 +2468,7 @@ + @@ -1844,6 +2478,7 @@ + @@ -1852,6 +2487,7 @@ + @@ -1861,6 +2497,7 @@ + @@ -1871,6 +2508,7 @@ + @@ -1881,6 +2519,7 @@ + @@ -1900,6 +2539,8 @@ + + @@ -1909,6 +2550,7 @@ + @@ -1918,6 +2560,7 @@ + @@ -1927,6 +2570,7 @@ + @@ -1936,6 +2580,12 @@ + + + @@ -1953,6 +2603,10 @@ + + @@ -1963,6 +2617,8 @@ + + @@ -1973,15 +2629,26 @@ - + + + + + + + + + + + + @@ -1990,6 +2657,7 @@ + @@ -1998,6 +2666,7 @@ + @@ -2007,6 +2676,7 @@ + @@ -2017,6 +2687,7 @@ + @@ -2027,6 +2698,7 @@ + @@ -2037,13 +2709,14 @@ + - + @@ -2053,6 +2726,16 @@ + + + + + + + + + + @@ -2087,15 +2770,24 @@ + + + + + + + + + - - - - - - + + + + + + @@ -2109,33 +2801,40 @@ - + + + + + + + - - - - - - - + + + + + + + + + - - - - - - - - + + + + + + + @@ -2151,9 +2850,19 @@ + + + + + + + + + + @@ -2164,6 +2873,7 @@ + @@ -2174,6 +2884,7 @@ + @@ -2185,6 +2896,7 @@ + @@ -2196,6 +2908,8 @@ + + @@ -2206,6 +2920,7 @@ + @@ -2216,6 +2931,10 @@ + + @@ -2227,8 +2946,10 @@ + + @@ -2239,6 +2960,10 @@ + + @@ -2249,6 +2974,7 @@ + @@ -2260,6 +2986,7 @@ + @@ -2270,6 +2997,7 @@ + @@ -2280,6 +3008,7 @@ + @@ -2290,6 +3019,7 @@ + @@ -2300,6 +3030,7 @@ + @@ -2309,6 +3040,7 @@ + @@ -2318,6 +3050,7 @@ + @@ -2327,6 +3060,7 @@ + @@ -2335,6 +3069,7 @@ + @@ -2344,6 +3079,7 @@ + @@ -2353,6 +3089,7 @@ + @@ -2363,6 +3100,7 @@ + @@ -2372,6 +3110,7 @@ + @@ -2381,6 +3120,7 @@ + @@ -2398,9 +3138,20 @@ + + + + + + + + + + + @@ -2410,6 +3161,7 @@ + @@ -2420,6 +3172,7 @@ + @@ -2431,6 +3184,8 @@ + + @@ -2440,6 +3195,7 @@ + @@ -2451,6 +3207,9 @@ + + + @@ -2460,6 +3219,7 @@ + @@ -2469,97 +3229,83 @@ - - - - - - + + + + + + - - - - - - - - - - - + + + + + + + + - - - - - - + + + + + + - - - - - - - - + + + + + + + + - - - - - - - - - - - - + + + + + + + - - - - - - - - - - - - + + + + + + + + + - - - - - - - - + + + + + @@ -2572,6 +3318,15 @@ + + + + + + + + + @@ -2582,6 +3337,15 @@ + + + + + + + + + @@ -2592,47 +3356,64 @@ + + + + + + + + + - - - - - - + + + + + + + + + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + diff --git a/cime_config/testdefs/testmods_dirs/clm/DA_multidrv/user_nl_clm_0001 b/cime_config/testdefs/testmods_dirs/clm/DA_multidrv/user_nl_clm_0001 index 719785e25d..17e5cd7da9 100644 --- a/cime_config/testdefs/testmods_dirs/clm/DA_multidrv/user_nl_clm_0001 +++ b/cime_config/testdefs/testmods_dirs/clm/DA_multidrv/user_nl_clm_0001 @@ -10,4 +10,4 @@ hist_type1d_pertape = ' ',' ',' ' use_init_interp = .true. - finidat = '/glade/p/cisl/dares/RDA_strawman/CESM_ensembles/CLM/CLM5BGC-Crop/ctsm_2001-01-01-00000/clm5_f09_spinup80.clm2_0001.r.2001-01-01-00000.nc' + finidat = '/glade/campaign/cisl/dares/glade-p-dares-Oct2023/RDA_strawman/CESM_ensembles/CLM/CLM5BGC-Crop/ctsm_2001-01-01-00000/clm5_f09_spinup80.clm2_0001.r.2001-01-01-00000.nc' diff --git a/cime_config/testdefs/testmods_dirs/clm/DA_multidrv/user_nl_clm_0002 b/cime_config/testdefs/testmods_dirs/clm/DA_multidrv/user_nl_clm_0002 index 37d5b2b24e..6ef6ce8df2 100644 --- a/cime_config/testdefs/testmods_dirs/clm/DA_multidrv/user_nl_clm_0002 +++ b/cime_config/testdefs/testmods_dirs/clm/DA_multidrv/user_nl_clm_0002 @@ -10,4 +10,4 @@ hist_type1d_pertape = ' ',' ',' ' use_init_interp = .true. - finidat = '/glade/p/cisl/dares/RDA_strawman/CESM_ensembles/CLM/CLM5BGC-Crop/ctsm_2001-01-01-00000/clm5_f09_spinup80.clm2_0002.r.2001-01-01-00000.nc' + finidat = '/glade/campaign/cisl/dares/glade-p-dares-Oct2023/RDA_strawman/CESM_ensembles/CLM/CLM5BGC-Crop/ctsm_2001-01-01-00000/clm5_f09_spinup80.clm2_0002.r.2001-01-01-00000.nc' diff --git a/cime_config/testdefs/testmods_dirs/clm/Meier2022_surf_rough/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/Meier2022_surf_rough/user_nl_clm deleted file mode 100644 index 01df79ecba..0000000000 --- a/cime_config/testdefs/testmods_dirs/clm/Meier2022_surf_rough/user_nl_clm +++ /dev/null @@ -1,4 +0,0 @@ -z0param_method = 'Meier2022' -use_z0m_snowmelt = .true. -paramfile = '$DIN_LOC_ROOT/lnd/clm2/paramdata/ctsm51_params.RMz0.c220304.nc' - diff --git a/cime_config/testdefs/testmods_dirs/clm/USUMB_mct/shell_commands b/cime_config/testdefs/testmods_dirs/clm/USUMB_mct/shell_commands index 446125abf9..e410197c3d 100755 --- a/cime_config/testdefs/testmods_dirs/clm/USUMB_mct/shell_commands +++ b/cime_config/testdefs/testmods_dirs/clm/USUMB_mct/shell_commands @@ -1,12 +1,12 @@ -# shell commands to execute xmlchange commands written by PTCLMmkdata: -# ./PTCLMmkdata --cesm_root ../../../.. -s US-UMB -d /glade/p/cesm/cseg/inputdata --mydatadir=/glade/p/cesm/cseg/inputdata/lnd/clm2/PTCLMmydatafiles.c171024 +#!/bin/bash +# shell commands to execute xmlchange commands written by PTCLMmkdata: which is now unsupported ./xmlchange CLM_USRDAT_NAME=1x1pt_US-UMB ./xmlchange DATM_CLMNCEP_YR_START=1999 ./xmlchange DATM_CLMNCEP_YR_END=2006 # Comment this out if NINST_LND is greater than 1 (see: http://bugs.cgd.ucar.edu/show_bug.cgi?id=2521) ./xmlchange MPILIB=mpi-serial -./xmlchange ATM_DOMAIN_PATH=/glade/p/cesm/cseg/inputdata/lnd/clm2/PTCLMmydatafiles.c171024/1x1pt_US-UMB -./xmlchange LND_DOMAIN_PATH=/glade/p/cesm/cseg/inputdata/lnd/clm2/PTCLMmydatafiles.c171024/1x1pt_US-UMB +./xmlchange ATM_DOMAIN_PATH='$DIN_LOC_ROOT/lnd/clm2/PTCLMmydatafiles.c171024/1x1pt_US-UMB' +./xmlchange LND_DOMAIN_PATH='$DIN_LOC_ROOT/lnd/clm2/PTCLMmydatafiles.c171024/1x1pt_US-UMB' ./xmlchange ATM_DOMAIN_FILE=domain.lnd.1x1pt_US-UMB_navy.171024.nc ./xmlchange LND_DOMAIN_FILE=domain.lnd.1x1pt_US-UMB_navy.171024.nc ./xmlchange --append CLM_BLDNML_OPTS='-mask navy -no-crop' @@ -15,5 +15,4 @@ ./xmlchange ATM_NCPL=24 ./xmlchange RUN_STARTDATE=1999-01-01 ./xmlchange DATM_CLMNCEP_YR_ALIGN=1999 -./xmlchange DIN_LOC_ROOT=/glade/p/cesm/cseg/inputdata -./xmlchange DIN_LOC_ROOT_CLMFORC=/glade/p/cesm/cseg/inputdata/lnd/clm2/PTCLMmydatafiles.c171024 +./xmlchange DIN_LOC_ROOT_CLMFORC='$DIN_LOC_ROOT/lnd/clm2/PTCLMmydatafiles.c171024' diff --git a/cime_config/testdefs/testmods_dirs/clm/USUMB_mct/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/USUMB_mct/user_nl_clm index 38ce400297..8bb7848d49 100644 --- a/cime_config/testdefs/testmods_dirs/clm/USUMB_mct/user_nl_clm +++ b/cime_config/testdefs/testmods_dirs/clm/USUMB_mct/user_nl_clm @@ -1,5 +1,4 @@ -! user_nl_clm namelist options written by PTCLMmkdata: -! ./PTCLMmkdata --cesm_root ../../../.. -s US-UMB -d /glade/p/cesm/cseg/inputdata --mydatadir=/glade/p/cesm/cseg/inputdata/lnd/clm2/PTCLMmydatafiles.c171024 - fsurdat = '/glade/p/cesm/cseg/inputdata/lnd/clm2/PTCLMmydatafiles.c171024/1x1pt_US-UMB/surfdata_1x1pt_US-UMB_16pfts_Irrig_CMIP6_simyr2000_c171024.nc' +! user_nl_clm namelist options written by PTCLMmkdata, which is no longer available + fsurdat = '$DIN_LOC_ROOT/lnd/clm2/PTCLMmydatafiles.c171024/1x1pt_US-UMB/surfdata_1x1pt_US-UMB_16pfts_Irrig_CMIP6_simyr2000_c171024.nc' hist_nhtfrq = 0 hist_mfilt = 1200 diff --git a/cime_config/testdefs/testmods_dirs/clm/USUMB_nuopc/shell_commands b/cime_config/testdefs/testmods_dirs/clm/USUMB_nuopc/shell_commands index 08a9014abe..43fe16a192 100755 --- a/cime_config/testdefs/testmods_dirs/clm/USUMB_nuopc/shell_commands +++ b/cime_config/testdefs/testmods_dirs/clm/USUMB_nuopc/shell_commands @@ -1,5 +1,4 @@ -# shell commands to execute xmlchange commands written by PTCLMmkdata: -# ./PTCLMmkdata --cesm_root ../../../.. -s US-UMB -d /glade/p/cesm/cseg/inputdata --mydatadir=/glade/p/cesm/cseg/inputdata/lnd/clm2/PTCLMmydatafiles.c171024 +# shell commands to execute xmlchange commands written by PTCLMmkdata: which is now unsupported ./xmlchange CLM_USRDAT_NAME=1x1pt_US-UMB ./xmlchange DATM_YR_START=1999 ./xmlchange DATM_YR_END=2006 @@ -11,7 +10,6 @@ ./xmlchange ATM_NCPL=24 ./xmlchange RUN_STARTDATE=1999-01-01 ./xmlchange DATM_YR_ALIGN=1999 -./xmlchange DIN_LOC_ROOT=/glade/p/cesm/cseg/inputdata -./xmlchange DIN_LOC_ROOT_CLMFORC=/glade/p/cesm/cseg/inputdata/lnd/clm2/PTCLMmydatafiles.c171024 +./xmlchange DIN_LOC_ROOT_CLMFORC='$DIN_LOC_ROOT/lnd/clm2/PTCLMmydatafiles.c171024' ./xmlchange PTS_LON=275.2862 ./xmlchange PTS_LAT=45.5598 diff --git a/cime_config/testdefs/testmods_dirs/clm/USUMB_nuopc/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/USUMB_nuopc/user_nl_clm index 38ce400297..8bb7848d49 100644 --- a/cime_config/testdefs/testmods_dirs/clm/USUMB_nuopc/user_nl_clm +++ b/cime_config/testdefs/testmods_dirs/clm/USUMB_nuopc/user_nl_clm @@ -1,5 +1,4 @@ -! user_nl_clm namelist options written by PTCLMmkdata: -! ./PTCLMmkdata --cesm_root ../../../.. -s US-UMB -d /glade/p/cesm/cseg/inputdata --mydatadir=/glade/p/cesm/cseg/inputdata/lnd/clm2/PTCLMmydatafiles.c171024 - fsurdat = '/glade/p/cesm/cseg/inputdata/lnd/clm2/PTCLMmydatafiles.c171024/1x1pt_US-UMB/surfdata_1x1pt_US-UMB_16pfts_Irrig_CMIP6_simyr2000_c171024.nc' +! user_nl_clm namelist options written by PTCLMmkdata, which is no longer available + fsurdat = '$DIN_LOC_ROOT/lnd/clm2/PTCLMmydatafiles.c171024/1x1pt_US-UMB/surfdata_1x1pt_US-UMB_16pfts_Irrig_CMIP6_simyr2000_c171024.nc' hist_nhtfrq = 0 hist_mfilt = 1200 diff --git a/cime_config/testdefs/testmods_dirs/clm/ciso_cwd_hr/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/ciso_cwd_hr/user_nl_clm index 7ae4a69aad..534cbb84b8 100644 --- a/cime_config/testdefs/testmods_dirs/clm/ciso_cwd_hr/user_nl_clm +++ b/cime_config/testdefs/testmods_dirs/clm/ciso_cwd_hr/user_nl_clm @@ -1,2 +1,2 @@ -paramfile = '/glade/p/cesm/cseg/inputdata/lnd/clm2/paramdata/ctsm51_ciso_cwd_hr_params.c211112.nc' +paramfile = '$DIN_LOC_ROOT/lnd/clm2/paramdata/ctsm51_ciso_cwd_hr_params.c231117.nc' hist_fincl1 = 'CWDC_HR','C13_CWDC_HR','C14_CWDC_HR','CWD_HR_L2','CWD_HR_L2_vr','CWD_HR_L3','CWD_HR_L3_vr' diff --git a/cime_config/testdefs/testmods_dirs/clm/Meier2022_surf_rough/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/clm51cam6LndTuningMode/include_user_mods similarity index 100% rename from cime_config/testdefs/testmods_dirs/clm/Meier2022_surf_rough/include_user_mods rename to cime_config/testdefs/testmods_dirs/clm/clm51cam6LndTuningMode/include_user_mods diff --git a/cime_config/testdefs/testmods_dirs/clm/clm51cam6LndTuningMode/shell_commands b/cime_config/testdefs/testmods_dirs/clm/clm51cam6LndTuningMode/shell_commands new file mode 100644 index 0000000000..cf39cca1c0 --- /dev/null +++ b/cime_config/testdefs/testmods_dirs/clm/clm51cam6LndTuningMode/shell_commands @@ -0,0 +1,5 @@ +#!/bin/bash + +./xmlchange LND_TUNING_MODE="clm5_1_cam6.0" +./xmlchange ROF_NCPL='$ATM_NCPL' + diff --git a/cime_config/testdefs/testmods_dirs/clm/clm51cam6LndTuningModeCiso/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/clm51cam6LndTuningModeCiso/include_user_mods new file mode 100644 index 0000000000..aa76c52034 --- /dev/null +++ b/cime_config/testdefs/testmods_dirs/clm/clm51cam6LndTuningModeCiso/include_user_mods @@ -0,0 +1 @@ +../clm51cam6LndTuningMode diff --git a/cime_config/testdefs/testmods_dirs/clm/clm51cam6LndTuningModeCiso/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/clm51cam6LndTuningModeCiso/user_nl_clm new file mode 100644 index 0000000000..e7627dea50 --- /dev/null +++ b/cime_config/testdefs/testmods_dirs/clm/clm51cam6LndTuningModeCiso/user_nl_clm @@ -0,0 +1,5 @@ +! Turn on Carbon isotopes +use_c13 = .true. +use_c14 = .true. +use_c13_timeseries = .true. +use_c14_bombspike = .true. diff --git a/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/README b/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/README index 81fb991ed0..af5d819ffc 100644 --- a/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/README +++ b/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/README @@ -9,10 +9,10 @@ According to the file the following two files used in this test are default files for the following options: -fsurdat = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/surfdata_map/surfdata_10x15_78pfts_CMIP6_simyr1850_c170824.nc' +fsurdat = '$DIN_LOC_ROOT/lnd/clm2/surfdata_map/surfdata_10x15_78pfts_CMIP6_simyr1850_c170824.nc' hgrid="10x15" sim_year="1850" use_crop=".true." -flanduse_timeseries = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/surfdata_map/landuse.timeseries_10x15_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc' +flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_map/landuse.timeseries_10x15_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc' hgrid="10x15" sim_year_range="1850-2000" use_crop=".true." hgrid="10x15" rcp="8.5" sim_year_range="1850-2100" use_crop=".true." hgrid="10x15" rcp="6" sim_year_range="1850-2100" use_crop=".true." diff --git a/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/user_nl_clm index ff78e0122c..8c4fed6873 100644 --- a/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/user_nl_clm +++ b/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/user_nl_clm @@ -1,2 +1,2 @@ -fsurdat = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/surfdata_map/surfdata_10x15_78pfts_CMIP6_simyr1850_c170824.nc' -flanduse_timeseries = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/surfdata_map/landuse.timeseries_10x15_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc' +fsurdat = '$DIN_LOC_ROOT/lnd/clm2/surfdata_map/surfdata_10x15_78pfts_CMIP6_simyr1850_c170824.nc' +flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_map/landuse.timeseries_10x15_hist_78pfts_CMIP6_simyr1850-2015_c170824.nc' diff --git a/cime_config/testdefs/testmods_dirs/clm/cplhist/shell_commands b/cime_config/testdefs/testmods_dirs/clm/cplhist/shell_commands index a284aefcb9..f0eb85010b 100755 --- a/cime_config/testdefs/testmods_dirs/clm/cplhist/shell_commands +++ b/cime_config/testdefs/testmods_dirs/clm/cplhist/shell_commands @@ -1,5 +1,5 @@ ./xmlchange DATM_CPLHIST_CASE=cam6ctsm51_cesm23a12c_ne30pg3g17_CPLHIST_1850 -./xmlchange DATM_CPLHIST_DIR=/glade/p/cgd/tss/CTSM_datm_forcing_data/atm_forcing.cdeps_datm.CPLHIST_cam6ctsm51_cesm23a12c_ne30pg3g17_1850 +./xmlchange DATM_CPLHIST_DIR='$DIN_LOC_ROOT/atm/datm7/atm_forcing.cdeps_datm.CPLHIST_cam6ctsm51_cesm23a12c_ne30pg3g17_1850' ./xmlchange DATM_YR_START=1 ./xmlchange DATM_YR_END=1 ./xmlchange DATM_YR_ALIGN=1 diff --git a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/modify_smallville_with_dynurban.ncl b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/modify_smallville_with_dynurban.ncl index 15ec0469be..5ac651b508 100644 --- a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/modify_smallville_with_dynurban.ncl +++ b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/modify_smallville_with_dynurban.ncl @@ -18,8 +18,8 @@ begin print ("Start Time: "+systemfunc("date") ) print ("=========================================") - infile = "/glade/p/cgd/tss/people/oleson/modify_surfdata/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc" - outfile = "/glade/p/cgd/tss/people/oleson/modify_surfdata/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c220223.nc" + infile = "/glade/campaign/cgd/tss/people/oleson/modify_surfdata/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc" + outfile = "/glade/campaign/cgd/tss/people/oleson/modify_surfdata/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_dynUrban_c220223.nc" system("cp " + infile + " " + outfile) @@ -61,7 +61,9 @@ begin outf->PCT_URBAN = pct_urban outf->PCT_CROP = pct_crop - outf@history = "This file was created with the following NCL script: /glade/p/cgd/tss/people/oleson/modify_surfdata/modify_smallville_with_dynurban.ncl. The file used as a template is: /glade/p/cesm/cseg/inputdata/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc. Key points are that urban area starts as 0, increases after the first year, then decreases after the second year. Medium density urban is set to zero to test the memory-saving behavior of PCT_URBAN_MAX. PCT_CROP is also changed so that PCT_URBAN + PCT_CROP <= 100. (Here, PCT_CROP increases and decreases at the same time as PCT_URBAN in order to exercise the simultaneous increase or decrease of two landunits, but that isn't a critical part of this test.). Note that the use of this file means that this testmod can only be used with the 1x1_smallvilleIA grid." + outf@history = "This file was created with the following NCL script: +/glade/campaign/cgd/tss/people/oleson/modify_surfdata/modify_smallville_with_dynurban.ncl. The file used as a template is: +/glade/campaign/cesm/cesmdata/inputdata/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc. Key points are that urban area starts as 0, increases after the first year, then decreases after the second year. Medium density urban is set to zero to test the memory-saving behavior of PCT_URBAN_MAX. PCT_CROP is also changed so that PCT_URBAN + PCT_CROP <= 100. (Here, PCT_CROP increases and decreases at the same time as PCT_URBAN in order to exercise the simultaneous increase or decrease of two landunits, but that isn't a critical part of this test.). Note that the use of this file means that this testmod can only be used with the 1x1_smallvilleIA grid." print ("=========================================") print ("Finish Time: "+systemfunc("date") ) diff --git a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm index 69a78ee17d..0ba93b1ee2 100644 --- a/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm +++ b/cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly/user_nl_clm @@ -1,9 +1,9 @@ do_transient_urban = .true. ! The flanduse_timeseries file was created with the following NCL script (a copy of this script is in cime_config/testdefs/testmods_dirs/clm/smallville_dynurban_monthly): -! /glade/p/cgd/tss/people/oleson/modify_surfdata/modify_smallville_with_dynurban.ncl +! /glade/campaign/cgd/tss/people/oleson/modify_surfdata/modify_smallville_with_dynurban.ncl ! The file used as a template is: -! /glade/p/cesm/cseg/inputdata/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc +! /glade/campaign/cgd/tss/people/oleson/modify_surfdata/lnd/clm2/surfdata_map/landuse.timeseries_1x1_smallvilleIA_hist_78pfts_simyr1850-1855_c160127.nc ! Key points are that urban area starts as 0, increases after the first year, then decreases after the second year. ! Medium density urban is set to zero to test the memory-saving behavior of PCT_URBAN_MAX. ! PCT_CROP is also changed so that PCT_URBAN + PCT_CROP <= 100. (Here, PCT_CROP increases and decreases at the same time as PCT_URBAN in order to exercise the simultaneous increase or decrease of two landunits, but that isn't a critical part of this test.) diff --git a/cime_config/testdefs/testmods_dirs/clm/sowingWindows/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/sowingWindows/user_nl_clm index d3d922f721..03165bb306 100644 --- a/cime_config/testdefs/testmods_dirs/clm/sowingWindows/user_nl_clm +++ b/cime_config/testdefs/testmods_dirs/clm/sowingWindows/user_nl_clm @@ -1,5 +1,5 @@ -stream_fldFileName_swindow_start = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/cropdata/calendars/processed/swindow_starts_ggcmi_crop_calendar_phase3_v1.01.2000-2000.20231005_145103.nc' -stream_fldFileName_swindow_end = '/glade/p/cesmdata/cseg/inputdata/lnd/clm2/cropdata/calendars/processed/swindow_ends_ggcmi_crop_calendar_phase3_v1.01.2000-2000.20231005_145103.nc' -stream_meshfile_cropcal = '/glade/p/cesmdata/cseg/inputdata/share/meshes/360x720_120830_ESMFmesh_c20210507_cdf5.nc' +stream_fldFileName_swindow_start = '$DIN_LOC_ROOT/lnd/clm2/cropdata/calendars/processed/swindow_starts_ggcmi_crop_calendar_phase3_v1.01.2000-2000.20231005_145103.nc' +stream_fldFileName_swindow_end = '$DIN_LOC_ROOT/lnd/clm2/cropdata/calendars/processed/swindow_ends_ggcmi_crop_calendar_phase3_v1.01.2000-2000.20231005_145103.nc' +stream_meshfile_cropcal = '$DIN_LOC_ROOT/share/meshes/360x720_120830_ESMFmesh_c20210507_cdf5.nc' stream_year_first_cropcal = 2000 stream_year_last_cropcal = 2000 diff --git a/cime_config/usermods_dirs/NEON/FATES/defaults/shell_commands b/cime_config/usermods_dirs/NEON/FATES/defaults/shell_commands index 798abcae84..5bec7332b0 100644 --- a/cime_config/usermods_dirs/NEON/FATES/defaults/shell_commands +++ b/cime_config/usermods_dirs/NEON/FATES/defaults/shell_commands @@ -39,3 +39,5 @@ fi # Explicitly set PIO Type to NETCDF since this is a single processor case (should already be set this way) ./xmlchange PIO_TYPENAME=netcdf + +./xmlchange NEONVERSION="v2" diff --git a/cime_config/usermods_dirs/NEON/defaults/shell_commands b/cime_config/usermods_dirs/NEON/defaults/shell_commands index 437201f2b2..39810dbc70 100644 --- a/cime_config/usermods_dirs/NEON/defaults/shell_commands +++ b/cime_config/usermods_dirs/NEON/defaults/shell_commands @@ -32,7 +32,7 @@ else fi # If needed for SP simulations: & set history file variables -if [[ $compset =~ .*CLM[0-9]+%.*SP.* ]]; then +if [[ $compset =~ .*CLM[0-9]+%[^_]*SP.* ]]; then if [[ $TEST != "TRUE" ]]; then ./xmlchange STOP_OPTION=nyears fi @@ -43,3 +43,5 @@ fi # Explicitly set PIO Type to NETCDF since this is a single processor case (should already be set this way) ./xmlchange PIO_TYPENAME=netcdf + +./xmlchange NEONVERSION="v2" diff --git a/doc/.ChangeLog_template b/doc/.ChangeLog_template index 9f4f64c587..bbd65c8c83 100644 --- a/doc/.ChangeLog_template +++ b/doc/.ChangeLog_template @@ -96,11 +96,11 @@ infrastructure should be run when appropriate, as described below. build-namelist tests (if CLMBuildNamelist.pm has changed): - cheyenne - + derecho - tools-tests (test/tools) (if tools have been changed): - cheyenne - + derecho - python testing (if python code has changed; see instructions in python/README.md; document testing done): @@ -114,15 +114,15 @@ infrastructure should be run when appropriate, as described below. doing their own baseline generation. If you are already running the full aux_clm then you do NOT need to separately run the clm_pymods test suite, and you can remove the following line.] - clm_pymods test suite on cheyenne - + clm_pymods test suite on derecho - regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): - cheyenne ---- + derecho ----- izumi ------- fates tests: (give name of baseline if different from CTSM tagname, normally fates baselines are fates--) - cheyenne ---- + derecho ----- izumi ------- any other testing (give details below): diff --git a/doc/ChangeLog b/doc/ChangeLog index 66fc04a752..04694b5c07 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,625 @@ =============================================================== +Tag name: ctsm5.1.dev158 +Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326) +Date: Thu 07 Dec 2023 10:22:46 PM MST +One-line Summary: First tag with testing moved to Derecho and working PE-layouts for Derecho + +Purpose and description of changes +---------------------------------- + +First tag for CTSM working and tested on Derecho. Update CDEPS so that we can run with the NAG compiler. +Working PE layouts. Changes from CESM3_dev over to main-dev. Testing added for Derecho. +Do some work to get tools testing working on Derecho, not completed. +Add Derecho to the README files under tools/modify_input_files and tools/site_and_regional +Remove some /glade/p references in the code. This is still an issue in the: doc, lilac, tools/mksurfdata_map, +tools/contrib, tools/mkmapdata directories, and the namelist_defaults_ctsm_tools file. + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed or introduced +------------------------ + +CTSM issues fixed (include CTSM Issue #): + Resolves Derecho transition: Tests and test infrastructure #1995 + Resolves Updating Externals for Derecho causes Izumi nag tests to fail #2280 + Resolves Transient simulation with ne30np4.pg3 fails due to floating point error #2268 + Resolves Need to move location of DA_multidrv finidat files from /glade/p to /glade/campaign #2282 + Works on Add support to test/tools/test_driver.sh for Derecho for NEON tools #2276 + +Notes of particular relevance for users +--------------------------------------- + +Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables): + Added clm5_1_cam6 option to LND_TUNING_MODE + This is important in order to enable using latest clm5_1 physics with fully coupled cases + +Changes made to namelist defaults (e.g., changed parameter values): + Make sure there are finidat files for clm5_1 with CAM6 for 1850 and 2000 (from clm5_0 version at f09 + Make sure ne30np4.pg3 is setup + Some adjustments for ne30np4 and ne30np4.pg3 to make sure landuse.timeseries files are correct + +Notes of particular relevance for developers: +--------------------------------------------- + +Changes to tests or testing: + Add cesm3_dev test list from the CESM3_dev branch + Make sure ne30np4.pg3 + + Unit tests fail on Derecho because of ESMCI/ccs_config_cesm#131 + Derecho tests with DEBUG=T, intel compiler, and mpi-serial fail because of ESMCI/ccs_config_cesm#130 + + +Testing summary: regular + fates + ctsm_sci + cesm3_dev +---------------- + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + derecho - PASS + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + derecho - PASS + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- OK + derecho ----- OK + izumi ------- OK + + fates tests: (give name of baseline if different from CTSM tagname, normally fates baselines are fates--) + derecho ----- OK + izumi ------- OK + + + +Answer changes +-------------- + +Changes answers relative to baseline: No, bit-for-bit + +Other details +------------- +[Remove any lines that don't apply. Remove entire section if nothing applies.] + +List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): cdeps + cdeps -> cdeps1.0.24 (allows to run with NAG compiler) + +Pull Requests that document the changes (include PR ids): + #2269 -- First tag with testing moved to Derecho and working PE layouts +(https://github.com/ESCOMP/ctsm/pull) + +=============================================================== +=============================================================== +Tag name: ctsm5.1.dev157 +Originator(s): samrabin (Sam Rabin, UCAR/TSS, samrabin@ucar.edu) +Date: Tue Dec 5 09:48:26 MST 2023 +One-line Summary: Update Externals to work on Derecho + +Purpose and description of changes +---------------------------------- + +Updates Externals.cfg to work on Derecho. + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed or introduced +------------------------ + +CTSM issues fixed (include CTSM Issue #): +* Resolves #2217 ("Tags for building CTSM library on Derecho [WRF-CTSM]", https://github.com/ESCOMP/CTSM/issues/2217) +* Resolves #2090 ("Update to cesm2_3_beta16 externals.", https://github.com/ESCOMP/CTSM/issues/2090) + +Known bugs introduced in this tag (include issue #): +* #2280: Updating Externals for Derecho causes Izumi nag tests to fail (https://github.com/ESCOMP/CTSM/issues/2280) + + +Notes of particular relevance for developers: +--------------------------------------------- + +Changes to tests or testing: +* All Izumi nag tests fail early in the run phase. This should be fixed in the next tag, which will be a more comprehensive Derecho-focused update. + + +Testing summary: +---------------- + + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- OK + izumi ------- PASS (except nag) + + +Other details +------------- + +List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): +* cime: cime6.0.125 -> cime6.0.175 +* cmeps: cmeps0.14.21 -> cmeps0.14.43 +* cdeps: cdeps1.0.13 -> cdeps1.0.23 +* cpl7: cpl77.0.5 -> cpl77.0.7 +* parallelio: pio2_5_10 -> pio2_6_2 + +Pull Requests that document the changes (include PR ids): +* #2270: Update Externals.cfg to work on Derecho (https://github.com/ESCOMP/CTSM/pull/2270) + +=============================================================== +=============================================================== +Tag name: ctsm5.1.dev156 +Originator(s): samrabin (Sam Rabin, UCAR/TSS, samrabin@ucar.edu) +Date: Thu Nov 30 15:27:18 MST 2023 +One-line Summary: Do not use Meier roughness by default + +Purpose and description of changes +---------------------------------- + +ctsm5.1.dev155 had turned on Meier2022 surface roughness calculation by default for 5.1 compsets. Several bugs have recently emerged that were not caught by pre-merge testing, so this tag reverts that change. Thus, the ZengWang2007 method is default for all compsets again. + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[X] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Notes of particular relevance for users +--------------------------------------- + +Changes made to namelist defaults (e.g., changed parameter values): 5.1 compsets now use ZengWang2007 method (instead of Meier2022) for roughness calculation. + + +Testing summary: +---------------- + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- DIFF + izumi ------- DIFF + + +Answer changes +-------------- + +Changes answers relative to baseline: YES + + Summarize any changes to answers, i.e., + - what code configurations: 5.1 compsets + - what platforms/compilers: All + - nature of change (roundoff; larger than roundoff/same climate; new climate): new climate + + No climate-evaluating run performed, as this change is reverting part of a commit thats barely a week old. + + +Other details +------------- +Pull Requests that document the changes (include PR ids): +(https://github.com/ESCOMP/ctsm/pull) +* #2273: Do not use Meier roughness by default, even with 5.1. (https://github.com/ESCOMP/CTSM/pull/2273) + +=============================================================== +=============================================================== +Tag name: ctsm5.1.dev155 +Originator(s): samrabin (Sam Rabin, UCAR/TSS, samrabin@ucar.edu) +Date: Mon Nov 27 21:16:51 MST 2023 +One-line Summary: Use baset_latvary parameters + +Purpose and description of changes +---------------------------------- + +Namelist parameters baset_latvary_slope and baset_latvary_intercept were never actually used, with values of 0.4 and 12 being hard-coded in the relevant subroutine instead. This PR fixes that, and also adds unit testing of a refactored function that uses them. + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Testing summary: +---------------- + + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- OK + izumi ------- PASS + + +Other details +------------- + +Pull Requests that document the changes (include PR ids): +* #2240: Use baset_latvary parameters (https://github.com/ESCOMP/CTSM/pull/2240) + +=============================================================== +=============================================================== +Tag name: ctsm5.1.dev154 +Originator(s): slevis (Samuel Levis,UCAR/TSS,303-665-1310) +Date: Wed Nov 22 09:53:01 MST 2023 +One-line Summary: New params files: Changes for Meier roughness, MIMICS, and SNICAR, and changes to leafcn and k*_nonmyc + +Purpose and description of changes +---------------------------------- + +This PR (#2258) addresses several issues: +1) Start using existing new params file for Meier roughness: +/glade/campaign/cesm/cesmdata/inputdata/lnd/clm2/paramdata/ctsm51_params.RMz0.c231011.nc +and include bug-fix #2219. +2) Update forcing heights per #2071. +3) Update params file for MIMICS per #1845. +4) Make leafcn for pfts 15 and 16 the same per #2184. +5) Switch the values of params kc_nonmyc and kn_nonmyc per #2120. +6) Move SNICAR parameters to ctsm51, clm50, and clm45 params files per #2247. + +See #2258 and the above issues for a list of contributors. + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[x] clm5_1 + +[x] clm5_0 + +[x] ctsm5_0-nwp + +[x] clm4_5 + + +Bugs fixed or introduced +------------------------ +CTSM issues fixed (include CTSM Issue #): +Fixes #2219 +Fixes #2071 +Fixes #1845 +Fixes #2184 +Fixes #2120 +Fixes #2247 + + +Notes of particular relevance for users +--------------------------------------- +Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables): + SNICAR namelist variable fresh_snw_rds_max moved to the params file. + +Changes made to namelist defaults (e.g., changed parameter values): + SNICAR namelist variable fresh_snw_rds_max moved to the params file. + Pointing to new params files for clm4_5, clm5_0, clm5_1. + +Changes to the datasets (e.g., parameter, surface or initial files): + New clm5_1 params file with new parameters and with modified values of existing parameters. + New clm5_0 and clm4_5 params files with new parameters for SNICAR. + ./rimport on the new params files fails with "No space left on device" but the 4 files are safe here: + /glade/u/home/slevis/paramfiles/*_params.c231117.nc + +Notes of particular relevance for developers: +--------------------------------------------- +Changes to tests or testing: +- Remove clm50 Meier test. Should be clm51 but the compset I1850Clm51BgcNoAnthro does not exist. +- Remove Meier testmod directory and remove such reference from corresponding tests. +- Change mimics tests from clm50 to clm51. +- For details, see the updated testlist_clm.xml file. + +Testing summary: +---------------- + + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- OK + izumi ------- OK + + +Answer changes +-------------- + +Changes answers relative to baseline: YES + + Summarize any changes to answers, i.e., + - what code configurations: ALL + - what platforms/compilers: ALL + - nature of change: + Larger than roundoff/same climate? + I will post this tag on the Answer changing tags wiki page as "SIGNIFICANT" + and will run a simulation and diagnostics to compare against dev145. + + I used the izumi test-suite to perform one bfb sanity test: + I backed up my branch to 6dc1966 (before the snicar mods), then I put back the changes of the + commit right after snicar (71e174f). Comparing to dev154 (this tag's new baseline), + the izumi test-suite passed bfb (12 gnu, 18 intel, and 32 nag tests). + Other mods are quite confined and clear, so I will not pursue other sanity tests. + + Changes to answers commit-by-commit in this PR: + f9978db and b8c71fa: These two change answers for Meier2022 and, therefore, clm51 only + 626f520: Takes out if (z0param_method == 'Meier2022'), so changes answers for all three CLMs + 319d194: Changes answers for mimics and, therefore clm51 only (order of ops, so roundoff) + 2ee6943: Changes clm51 params file, so affects clm51 only (expect more than roundoff) + f185a31: bfb + 6dc1966: This git merge escomp/master probably does change answers from previous commit + 29ca5ad and 71e174f: Puts snicar params on the params files for all three CLMs; sanity test gave bfb + +Other details +------------- +Pull Requests that document the changes (include PR ids): + https://github.com/ESCOMP/ctsm/pull/2258 + +=============================================================== +=============================================================== +Tag name: ctsm5.1.dev153 +Originator(s): afoster (Adrianna Foster) and johnpaulalex (John Paul Alex) +Date: Fri Nov 17 11:53:14 MST 2023 +One-line Summary: Call new FATES-side FatesReadParameters + +Purpose and description of changes +---------------------------------- + +Have CTSM use the new code path in FATES that allows passing in a `fates_param_reader_type`, which does the actual work reading the parameter files, in lieu of calling CTSM methods. + +Also updated NEON usermods to use version 2 data by default, rather than latest. + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + +Bugs fixed or introduced +------------------------ + +Some progress towards CTSM#2006 and FATES#1076 + +Testing summary: +---------------- + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- OK + izumi ------- OK + + fates tests: (give name of baseline if different from CTSM tagname, normally fates baselines are fates--) + cheyenne ---- OK + izumi ------- OK + + fates baseline: `fates-sci.1.68.2_api.31.0.0-ctsm5.1.dev153` + + +Answer changes +-------------- + +None + + +Other details +-------------- + +List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): fates + +Pull Requests that document the changes (include PR ids): +https://github.com/NGEET/fates/pull/1096 +https://github.com/ESCOMP/CTSM/pull/2198 + + +=============================================================== +=============================================================== +Tag name: ctsm5.1.dev152 +Originator(s): multiple (tking (Teagan King); slevis (Sam Levis); AdrienDams (Adrien Damseaux); afoster (Adrianna Foster); samrabin (Sam Rabin); ekluzek (Erik Kluzek); wwieder (Will Wieder); sacks (Bill Sacks); a few others listed below) +Date: Tue Nov 14 17:09:43 MST 2023 +One-line Summary: Mv tools to /python and add tests; add snow_thermal_cond_method; a few fixes / refactors + +Purpose and description of changes +---------------------------------- + +#2156 tking, slevis +Move the following scripts to /python/ctsm/site_and_regional +and make wrapper scripts for them in /tools/site_and_regional: +- run_neon.py +- neon_surf_wrapper.py +- modify_singlept_site_neon.py + +Add unit testing for: +- iso_utils +- modify_singlept_site_neon +- neon_surf_wrapper +- run_neon + +Add system testing for: +- modify_singlept_site_neon +- run_neon + +#2148 Adrien Damseaux (AWI, Germany), Victoria Dutch, Leanne Wake +Add namelist option snow_thermal_cond_method to select between Jordan (1991) (default) and +Sturm et al. (1997). Sturm option described for single point runs by Dutch et al. (2022). + +#2233 afoster, sacks +Fix a compiler error (for GNU 13.2) within cropcalStreamMod. +Simple fix was to change whole-array assignments/references for the starts and ends arrays to specifically +reference bounds (begp and endp). + +#2235 srabin, wwieder +Refactor ssp_anomaly_forcing script to make it easier to read and more amenable to future development. +- Adds --output-dir option; default ./anomaly_forcing reproduces previous behavior +- Makes synonyms for options with hyphens replacing underscores + +#2237 srabin +Add the following fields to restart files: +- repr_grainc_to_seed_perharv_patch +- swindow_starts_thisyr_patch +- swindow_ends_thisyr_patch + +#2044 ekluzek +More confined regular expression for NEON and a few simple fixes. + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed or introduced +------------------------ +CTSM issues fixed (include CTSM Issue #): +Closes #2156 Fixes #1441 +Closes #2148 +Closes #2233 Fixes #2232 +Closes #2235 +Closes #2237 Fixes #2236 +Closes #2044 Fixes #2039 Fixes #2103 Fixes #2028 Fixes #1506 Fixes #1499 + +Known Issues: +pylint errors from previous work remain in this tag. + +Notes of particular relevance for users +--------------------------------------- +Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables): +#2156 New wrapper scripts don't have .py suffixes. +#2148 New namelist option snow_thermal_cond_method as described above. +#2133 None +#2135 New --output-dir option; default ./anomaly_forcing reproduces previous behavior. +Also makes synonyms for options with hyphens replacing underscores. +#2137 None +#2044 None + +Notes of particular relevance for developers: +--------------------------------------------- +Changes to tests or testing: +#2156 Numerous changes were made to include new tests. +README.md for testing was updated to clarify that arguments should be used. + +Testing summary: +---------------- + + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + (any machine) - cheyenne OK (pylint suggestions from previous work remain) + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- OK + izumi ------- OK, the following PASS/FAILs are expected: + +PASS ERS_Lm20_Mmpi-serial.1x1_smallvilleIA.I2000Clm50BgcCropQianRs.izumi_gnu.clm-cropMonthlyNoinitial COMPARE_base_rest (UNEXPECTED: expected FAIL) +FAIL ERS_Lm20_Mmpi-serial.1x1_smallvilleIA.I2000Clm50BgcCropQianRs.izumi_gnu.clm-cropMonthlyNoinitial BASELINE ctsm5.1.dev151: DIFF + +FAIL SMS_Ld10_D_Mmpi-serial.CLM_USRDAT.I1PtClm51Bgc.izumi_nag.clm-default--clm-NEON-NIWO BASELINE ctsm5.1.dev151: DIFF +FAIL SMS_Ld10_D_Mmpi-serial.CLM_USRDAT.I1PtClm51Bgc.izumi_nag.clm-NEON-MOAB--clm-PRISM BASELINE ctsm5.1.dev151: DIFF + + +Answer changes +-------------- +Changes answers relative to baseline: +#2156 NO +#2148 NO +#2233 NO +#2235 NO, adds attributes to write_climo files' dimension variables +#2237 ONLY Smallville "no initial" restarts; specifically, this previously +failing (COMPARE_base_rest) aux_clm test +ERS_Lm20_Mmpi-serial.1x1_smallvilleIA.I2000Clm50BgcCropQianRs.izumi_gnu.clm-cropMonthlyNoinitial +now differs from the baseline as follows: + SUMMARY of cprnc: + A total number of 76 fields were compared + and 3 had differences in fill patterns + A total number of 2 fields could not be analyzed + diff_test: the two files seem to be DIFFERENT +#2044 ONLY the NEON tests listed above due to the one-line change in +cime_config/usermods_dirs/NEON/defaults/shell_commands in #2044 + +Other details +------------- +Pull Requests that document the changes (include PR ids): + https://github.com/ESCOMP/ctsm/pull/2156 + https://github.com/ESCOMP/ctsm/pull/2148 + https://github.com/ESCOMP/ctsm/pull/2233 + https://github.com/ESCOMP/ctsm/pull/2235 + https://github.com/ESCOMP/ctsm/pull/2237 + https://github.com/ESCOMP/ctsm/pull/2044 + +=============================================================== +=============================================================== Tag name: ctsm5.1.dev151 Originator(s): rgknox (Ryan Knox,LAWRENCE BERKELEY NATIONAL LABORATORY,510-495-2153) Date: Sat Nov 11 16:53:01 MST 2023 diff --git a/doc/ChangeSum b/doc/ChangeSum index 9ef20790d9..abf4401013 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,12 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.1.dev158 erik 12/07/2023 First tag with testing moved to Derecho and working PE-layouts for Derecho + ctsm5.1.dev157 samrabin 12/05/2023 Update Externals to work on Derecho + ctsm5.1.dev156 samrabin 11/30/2023 Do not use Meier roughness by default. + ctsm5.1.dev155 samrabin 11/27/2023 Use baset_latvary parameters + ctsm5.1.dev154 slevis 11/22/2023 New params files with changes for Meier roughness, MIMICS, and SNICAR, and changes to leafcn and k*_nonmyc + ctsm5.1.dev153 afoster 11/17/2023 Call new FATES-side FatesReadParameters + ctsm5.1.dev152 multiple 11/14/2023 Mv tools to /python and add tests; add snow_thermal_cond_method; a few fixes / refactors ctsm5.1.dev151 rgknox 11/11/2023 Fixes to FATES long run restarts ctsm5.1.dev150 rgknox 11/06/2023 FATES API fix to support future fates npp-fixation coupling, and urgent coupling fixes with E3SM. ctsm5.1.dev149 samrabin 11/03/2023 Rearrange leaf/stem "harvest" and fix soil gas diffusivity diff --git a/python/README.md b/python/README.md index cf3b893084..c40f55c6c7 100644 --- a/python/README.md +++ b/python/README.md @@ -47,7 +47,8 @@ thing, but support different options: 2. via `./run_ctsm_py_tests` You can specify various arguments to this; run `./run_ctsm_py_tests - -h` for details + -h` for details. Please specify either --unit or --sys rather than + not including any arguments. In any configuration where you run the system tests, you need to first execute `module load nco`. diff --git a/python/conda_env_ctsm_py.txt b/python/conda_env_ctsm_py.txt index 0ac4656e73..0306a9ca06 100644 --- a/python/conda_env_ctsm_py.txt +++ b/python/conda_env_ctsm_py.txt @@ -1,7 +1,8 @@ # -# NOTE: Changes here should be coordinated with the cgd python environment file +# NOTE: Changes here should be coordinated with the cgd python environment file +# +# NOTE: Derecho already has conda installed for you, so you just need to do the following... # -# To install this on cheyenne with conda loaded in modules # use the top level bash script: # ../py_env_create # Do this each time you update your CTSM Version # conda activate ctsm_pylib # Do this anytime you want to run a CTSM python script diff --git a/python/conda_env_ctsm_py_cgd.txt b/python/conda_env_ctsm_py_cgd.txt index 8312dc328a..3afcf4bba2 100644 --- a/python/conda_env_ctsm_py_cgd.txt +++ b/python/conda_env_ctsm_py_cgd.txt @@ -3,7 +3,6 @@ # # This should be coordinated with the main python environment file! # -# To install this on cheyenne with conda loaded in modules # use the top level bash script: # ../py_env_create # Do this each time you update your CTSM Version # conda activate ctsm_pylib # Do this anytime you want to run a CTSM python script diff --git a/python/ctsm/crop_calendars/generate_gdds.py b/python/ctsm/crop_calendars/generate_gdds.py index b54e7df40f..16e3e130da 100644 --- a/python/ctsm/crop_calendars/generate_gdds.py +++ b/python/ctsm/crop_calendars/generate_gdds.py @@ -1,4 +1,4 @@ -paramfile_dir = "/glade/p/cesmdata/cseg/inputdata/lnd/clm2/paramdata" +paramfile_dir = "/glade/campaign/cesm/cesmdata/cseg/inputdata/lnd/clm2/paramdata" # Import other shared functions import os diff --git a/python/ctsm/machine_defaults.py b/python/ctsm/machine_defaults.py index 7486237323..0f3900c152 100644 --- a/python/ctsm/machine_defaults.py +++ b/python/ctsm/machine_defaults.py @@ -47,7 +47,9 @@ "cheyenne": MachineDefaults( job_launcher_type=JOB_LAUNCHER_QSUB, scratch_dir=os.path.join(os.path.sep, "glade", "scratch", get_user()), - baseline_dir=os.path.join(os.path.sep, "glade", "p", "cgd", "tss", "ctsm_baselines"), + baseline_dir=os.path.join( + os.path.sep, "glade", "p", "cgd", "tss", "To_Be_Safely_Deleted", "ctsm_baselines" + ), account_required=True, create_test_retry=0, # NOTE(wjs, 2022-02-23) By default, use the regular queue, even for @@ -66,6 +68,25 @@ ) }, ), + "derecho": MachineDefaults( + job_launcher_type=JOB_LAUNCHER_QSUB, + scratch_dir=os.path.join(os.path.sep, "glade", "derecho", "scratch", get_user()), + baseline_dir=os.path.join(os.path.sep, "glade", "campaign", "cgd", "tss", "ctsm_baselines"), + account_required=True, + create_test_retry=0, + create_test_queue="regular", + job_launcher_defaults={ + JOB_LAUNCHER_QSUB: QsubDefaults( + queue="main", + walltime="03:50:00", + extra_args="", + # The following assumes a single node, with a single mpi proc; we may want + # to add more flexibility in the future, making the node / proc counts + # individually selectable + required_args="-l select=1:ncpus=128:mpiprocs=1 -V -r n -k oed", + ) + }, + ), "hobart": MachineDefaults( job_launcher_type=JOB_LAUNCHER_QSUB, scratch_dir=os.path.join(os.path.sep, "scratch", "cluster", get_user()), diff --git a/python/ctsm/machine_utils.py b/python/ctsm/machine_utils.py index da5c8b9c6a..970d2e9080 100644 --- a/python/ctsm/machine_utils.py +++ b/python/ctsm/machine_utils.py @@ -41,6 +41,8 @@ def _machine_from_hostname(hostname): """ if re.match(r"cheyenne\d+", hostname): machine = "cheyenne" + elif re.match(r"derecho\d+", hostname): + machine = "derecho" else: machine = hostname diff --git a/python/ctsm/run_ctsm_py_tests.py b/python/ctsm/run_ctsm_py_tests.py index 0542dc41cb..8b39d69afa 100644 --- a/python/ctsm/run_ctsm_py_tests.py +++ b/python/ctsm/run_ctsm_py_tests.py @@ -45,7 +45,10 @@ def main(description): def _commandline_args(description): - """Parse and return command-line arguments""" + """Parse and return command-line arguments + Note that run_ctsm_py_tests is not intended to be + used without argument specifications + """ parser = argparse.ArgumentParser( description=description, formatter_class=argparse.RawTextHelpFormatter ) diff --git a/tools/site_and_regional/modify_singlept_site_neon.py b/python/ctsm/site_and_regional/modify_singlept_site_neon.py similarity index 86% rename from tools/site_and_regional/modify_singlept_site_neon.py rename to python/ctsm/site_and_regional/modify_singlept_site_neon.py index e135760a48..1013ba944c 100755 --- a/tools/site_and_regional/modify_singlept_site_neon.py +++ b/python/ctsm/site_and_regional/modify_singlept_site_neon.py @@ -31,81 +31,34 @@ """ # TODO (NS) # --[] If subset file not found run subset_data.py -# --[] List of valid neon sites for all scripts come from one place. # --[] Download files only when available. # Import libraries from __future__ import print_function +import argparse +from datetime import date +from getpass import getuser +import glob +import logging import os import sys -import glob -import argparse import requests -import logging import numpy as np import pandas as pd import xarray as xr from packaging import version -from datetime import date -from getpass import getuser - +from ctsm.path_utils import path_to_ctsm_root myname = getuser() # -- valid neon sites -valid_neon_sites = [ - "ABBY", - "BARR", - "BART", - "BLAN", - "BONA", - "CLBJ", - "CPER", - "DCFS", - "DEJU", - "DELA", - "DSNY", - "GRSM", - "GUAN", - "HARV", - "HEAL", - "JERC", - "JORN", - "KONA", - "KONZ", - "LAJA", - "LENO", - "MLBS", - "MOAB", - "NIWO", - "NOGP", - "OAES", - "ONAQ", - "ORNL", - "OSBS", - "PUUM", - "RMNP", - "SCBI", - "SERC", - "SJER", - "SOAP", - "SRER", - "STEI", - "STER", - "TALL", - "TEAK", - "TOOL", - "TREE", - "UKFS", - "UNDE", - "WOOD", - "WREF", - "YELL", -] +valid_neon_sites = glob.glob( + os.path.join(path_to_ctsm_root(), "cime_config", "usermods_dirs", "NEON", "[!d]*") +) def get_parser(): @@ -153,14 +106,14 @@ def get_parser(): parser.add_argument( "--inputdata-dir", help=""" - Directory to write updated single point surface dataset. + Directory containing standard input files from CESM input data such as the surf_soildepth_file. [default: %(default)s] """, action="store", dest="inputdatadir", type=str, required=False, - default="/glade/p/cesmdata/cseg/inputdata" + default="/glade/p/cesmdata/cseg/inputdata", ) parser.add_argument( "-d", @@ -233,10 +186,7 @@ def get_neon(neon_dir, site_name): print("Download finished successfully for", site_name) elif response.status_code == 404: sys.exit( - "Data for this site " - + site_name - + " was not available on the neon server:" - + url + "Data for this site " + site_name + " was not available on the neon server:" + url ) print("Download exit status code: ", response.status_code) @@ -270,12 +220,12 @@ def find_surffile(surf_dir, site_name, pft_16): """ if pft_16: - sf_name = "surfdata_1x1_NEON_"+site_name+"*hist_16pfts_Irrig_CMIP6_simyr2000_*.nc" + sf_name = "surfdata_1x1_NEON_" + site_name + "*hist_16pfts_Irrig_CMIP6_simyr2000_*.nc" else: - sf_name = "surfdata_1x1_NEON_" +site_name+"*hist_78pfts_CMIP6_simyr2000_*.nc" + sf_name = "surfdata_1x1_NEON_" + site_name + "*hist_78pfts_CMIP6_simyr2000_*.nc" - print (os.path.join(surf_dir , sf_name)) - surf_file = sorted(glob.glob(os.path.join(surf_dir , sf_name))) + print(os.path.join(surf_dir, sf_name)) + surf_file = sorted(glob.glob(os.path.join(surf_dir, sf_name))) if len(surf_file) > 1: print("The following files found :", *surf_file, sep="\n- ") @@ -287,10 +237,14 @@ def find_surffile(surf_dir, site_name, pft_16): surf_file = surf_file[0] else: sys.exit( - "Surface data for this site " + str(site_name) + " was not found:" + str(surf_dir) + str(sf_name) + - "." + - "\n" + - "Please run ./subset_data.py for this site." + "Surface data for this site " + + str(site_name) + + " was not found:" + + str(surf_dir) + + str(sf_name) + + "." + + "\n" + + "Please run ./subset_data.py for this site." ) return surf_file @@ -298,7 +252,7 @@ def find_surffile(surf_dir, site_name, pft_16): def find_soil_structure(args, surf_file): """ Function for finding surface dataset soil - strucutre using surface data metadata. + structure using surface data metadata. In CLM surface data, soil layer information is in a file from surface data metadata @@ -324,10 +278,8 @@ def find_soil_structure(args, surf_file): print("------------") # print (f1.attrs["Soil_texture_raw_data_file_name"]) - clm_input_dir = os.path.join( args.inputdatadir, "lnd/clm2/rawdata/" ) - surf_soildepth_file = os.path.join( - clm_input_dir, f1.attrs["Soil_texture_raw_data_file_name"] - ) + clm_input_dir = os.path.join(args.inputdatadir, "lnd/clm2/rawdata/") + surf_soildepth_file = os.path.join(clm_input_dir, f1.attrs["Soil_texture_raw_data_file_name"]) if os.path.exists(surf_soildepth_file): print( @@ -345,9 +297,7 @@ def find_soil_structure(args, surf_file): else: sys.exit( - "Cannot find soil structure file : " - + surf_soildepth_file - + "for the surface dataset." + "Cannot find soil structure file : " + surf_soildepth_file + "for the surface dataset." ) return soil_bot, soil_top @@ -356,7 +306,7 @@ def find_soil_structure(args, surf_file): def update_metadata(nc, surf_file, neon_file, zb_flag): """ Function for updating modified surface dataset - metadat for neon sites. + metadata for neon sites. Args: nc (xr Dataset): netcdf file including updated neon surface data @@ -492,9 +442,9 @@ def download_file(url, fname): elif response.status_code == 404: print("File " + fname + "was not available on the neon server:" + url) except Exception as err: - print ('The server could not fulfill the request.') - print ('Something went wrong in downloading', fname) - print ('Error code:', err.code) + print("The server could not fulfill the request.") + print("Something went wrong in downloading", fname) + print("Error code:", err.code) def fill_interpolate(f2, var, method): @@ -526,7 +476,6 @@ def fill_interpolate(f2, var, method): def main(): - args = get_parser().parse_args() # -- debugging option @@ -536,10 +485,13 @@ def main(): # Check if pandas is a recent enough version pdvers = pd.__version__ if version.parse(pdvers) < version.parse("1.1.0"): - sys.exit("The pandas version in your python environment is too old, update to a newer version of pandas (>=1.1.0): version=%s", pdvers ) - + sys.exit( + """The pandas version in your python environment is too old, + update to a newer version of pandas (>=1.1.0): version=%s""", + pdvers, + ) - file_time = check_neon_time() + # file_time = check_neon_time() # -- specify site from which to extract data site_name = args.site_name @@ -550,12 +502,9 @@ def main(): # -- directory structure current_dir = os.getcwd() - parent_dir = os.path.dirname(current_dir) clone_dir = os.path.abspath(os.path.join(__file__, "../../../..")) neon_dir = os.path.join(clone_dir, "neon_surffiles") - print("Present Directory", current_dir) - # -- download neon data if needed neon_file = get_neon(neon_dir, site_name) @@ -575,18 +524,11 @@ def main(): # better suggestion by WW to write dzsoi to neon surface dataset # This todo needs to go to the subset_data - # TODO Will: if I sum them up , are they 3.5? (m) YES - print("soil_top:", soil_top) - print("soil_bot:", soil_bot) - print("Sum of soil top depths :", sum(soil_top)) - print("Sum of soil bottom depths :", sum(soil_bot)) - soil_top = np.cumsum(soil_top) soil_bot = np.cumsum(soil_bot) soil_mid = 0.5 * (soil_bot - soil_top) + soil_top # print ("Cumulative sum of soil bottom depths :", sum(soil_bot)) - obs_top = df["biogeoTopDepth"] / 100 obs_bot = df["biogeoBottomDepth"] / 100 # -- Mapping surface dataset and neon soil levels @@ -635,12 +577,10 @@ def main(): # -- Check to make sure the rounded oc is not higher than carbon_tot. # -- Use carbon_tot if estimated_oc is bigger than carbon_tot. - if estimated_oc > carbon_tot: - estimated_oc = carbon_tot + estimated_oc = min(estimated_oc, carbon_tot) layer_depth = ( - df["biogeoBottomDepth"][bin_index[soil_lev]] - - df["biogeoTopDepth"][bin_index[soil_lev]] + df["biogeoBottomDepth"][bin_index[soil_lev]] - df["biogeoTopDepth"][bin_index[soil_lev]] ) # f2["ORGANIC"][soil_lev] = estimated_oc * bulk_den / 0.58 @@ -709,9 +649,7 @@ def main(): print("Updated : ", f2.PCT_CROP.values) print("Updating PCT_NAT_PFT") - #print (f2.PCT_NAT_PFT) print(f2.PCT_NAT_PFT.values[0]) - #f2.PCT_NAT_PFT.values[0] = [[100.0]] print(f2.PCT_NAT_PFT[0].values) out_dir = args.out_dir @@ -729,13 +667,4 @@ def main(): print(f2.attrs) f2.to_netcdf(path=wfile, mode="w", format="NETCDF3_64BIT") - print( - "Successfully updated surface data file for neon site(" - + site_name - + "):\n - " - + wfile - ) - - -if __name__ == "__main__": - main() + print("Successfully updated surface data file for neon site(" + site_name + "):\n - " + wfile) diff --git a/python/ctsm/site_and_regional/neon_surf_wrapper.py b/python/ctsm/site_and_regional/neon_surf_wrapper.py new file mode 100755 index 0000000000..a2cc619a29 --- /dev/null +++ b/python/ctsm/site_and_regional/neon_surf_wrapper.py @@ -0,0 +1,227 @@ +#! /usr/bin/env python3 +""" +|------------------------------------------------------------------| +|--------------------- Instructions -----------------------------| +|------------------------------------------------------------------| +This script is a simple wrapper for neon sites that performs the +following: + 1) For neon sites, subset surface dataset from global dataset + (i.e. ./subset_data.py ) + 2) Download neon and update the created surface dataset + based on the downloaded neon data. + (i.e. modify_singlept_site_neon.py) + +Instructions for running using conda python environments: + +../../py_env_create +conda activate ctsm_pylib + +""" +# TODO +# Automatic downloading of missing files if they are missing +# -[ ] Download neon sites and dom pft file +# -[ ] Make sure verbose works for printing out commands running + +# Import libraries +from __future__ import print_function + +import os +import logging +import argparse +import subprocess +import tqdm +import pandas as pd + + +def get_parser(): + """ + Get parser object for this script. + """ + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + parser.print_usage = parser.print_help + + parser.add_argument( + "-v", + "--verbose", + help="Verbose mode will print more information. ", + action="store_true", + dest="verbose", + default=False, + ) + + parser.add_argument( + "--16pft", + help="Create and/or modify 16-PFT surface datasets (e.g. for a FATES run) ", + action="store_true", + dest="pft_16", + default=False, + ) + + parser.add_argument( + "-m", + "--mixed", + help="Do not overwrite surface dataset to be just one dominant PFT at 100%", + action="store_true", + dest="mixed", + default=False, + ) + + return parser + + +def execute(command): + """ + Function for running a command on shell. + Args: + command (str): + command that we want to run. + Raises: + Error with the return code from shell. + """ + print("\n", " >> ", *command, "\n") + + try: + subprocess.check_call(command, stdout=open(os.devnull, "w"), stderr=subprocess.STDOUT) + + except subprocess.CalledProcessError as e: + print(e) + + +def main(): + """ + Loop through neon sites and execute subset and modify commands + """ + args = get_parser().parse_args() + + if args.verbose: + logging.basicConfig(level=logging.DEBUG) + + neon_sites = pd.read_csv("neon_sites_dompft.csv") + + for i, row in tqdm.tqdm(neon_sites.iterrows()): + lat = row["Lat"] + lon = row["Lon"] + site = row["Site"] + pft = row["pft"] + clmsite = "1x1_NEON_" + site + print("Now processing site :", site) + + if args.mixed and args.pft_16: + # use surface dataset with 16 pfts, and don't overwrite with 100% 1 dominant PFT + # don't set crop flag + # don't set a dominant pft + subset_command = [ + "./subset_data", + "point", + "--lat", + str(lat), + "--lon", + str(lon), + "--site", + clmsite, + "--create-surface", + "--uniform-snowpack", + "--cap-saturation", + "--verbose", + "--overwrite", + ] + modify_command = [ + "./modify_singlept_site_neon", + "--neon_site", + site, + "--surf_dir", + "subset_data_single_point", + "--16pft", + ] + elif args.pft_16: + # use surface dataset with 16 pfts, but overwrite to 100% 1 dominant PFT + # don't set crop flag + # set dominant pft + subset_command = [ + "./subset_data", + "point", + "--lat", + str(lat), + "--lon", + str(lon), + "--site", + clmsite, + "--dompft", + str(pft), + "--create-surface", + "--uniform-snowpack", + "--cap-saturation", + "--verbose", + "--overwrite", + ] + modify_command = [ + "./modify_singlept_site_neon", + "--neon_site", + site, + "--surf_dir", + "subset_data_single_point", + "--16pft", + ] + elif args.mixed: + # use surface dataset with 78 pfts, and don't overwrite with 100% 1 dominant PFT + # NOTE: FATES will currently not run with a 78-PFT surface dataset + # set crop flag + # don't set dominant pft + subset_command = [ + "./subset_data", + "point", + "--lat", + str(lat), + "--lon", + str(lon), + "--site", + clmsite, + "--crop", + "--create-surface", + "--uniform-snowpack", + "--cap-saturation", + "--verbose", + "--overwrite", + ] + modify_command = [ + "./modify_singlept_site_neon", + "--neon_site", + site, + "--surf_dir", + "subset_data_single_point", + ] + else: + # use surface dataset with 78 pfts, and overwrite to 100% 1 dominant PFT + # NOTE: FATES will currently not run with a 78-PFT surface dataset + # set crop flag + # set dominant pft + subset_command = [ + "./subset_data", + "point", + "--lat", + str(lat), + "--lon", + str(lon), + "--site", + clmsite, + "--crop", + "--dompft", + str(pft), + "--create-surface", + "--uniform-snowpack", + "--cap-saturation", + "--verbose", + "--overwrite", + ] + modify_command = [ + "./modify_singlept_site_neon", + "--neon_site", + site, + "--surf_dir", + "subset_data_single_point", + ] + execute(subset_command) + execute(modify_command) diff --git a/tools/site_and_regional/run_neon.py b/python/ctsm/site_and_regional/run_neon.py similarity index 84% rename from tools/site_and_regional/run_neon.py rename to python/ctsm/site_and_regional/run_neon.py index 84c00715fb..104e325617 100755 --- a/tools/site_and_regional/run_neon.py +++ b/python/ctsm/site_and_regional/run_neon.py @@ -51,37 +51,32 @@ # Import libraries - +import argparse +import datetime +import glob +import logging import os +import re +import shutil import sys import time -import shutil -import logging -import requests -import argparse -import re -import subprocess import pandas as pd -import glob -import datetime -from getpass import getuser # Get the ctsm util tools and then the cime tools. -_CTSM_PYTHON = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "..", "python") -) +_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "python")) sys.path.insert(1, _CTSM_PYTHON) -from ctsm import add_cime_to_path +from CIME import build +from CIME.case import Case +from CIME.utils import safe_copy, expect, symlink_force + from ctsm.path_utils import path_to_ctsm_root +from ctsm.utils import parse_isoduration from ctsm.download_utils import download_file -import CIME.build as build +from ctsm import add_cime_to_path + from standard_script_setup import * -from CIME.case import Case -from CIME.utils import safe_copy, expect, symlink_force, run_cmd_no_fail -from argparse import RawTextHelpFormatter -from CIME.locked_files import lock_file, unlock_file logger = logging.getLogger(__name__) @@ -194,7 +189,7 @@ def get_parser(args, description, valid_neon_sites): ) parser.add_argument( - "--prism", + "--prism", help=""" Uses the PRISM reanaylsis precipitation data for the site instead of the NEON data (only available over Continental US) @@ -205,9 +200,8 @@ def get_parser(args, description, valid_neon_sites): default=False, ) - parser.add_argument( - "--experiment", + "--experiment", help=""" Appends the case name with string for model experiment """, @@ -274,12 +268,11 @@ def get_parser(args, description, valid_neon_sites): """, action="store", dest="user_version", - required = False, - type = str, - choices= ['v1','v2','v3'], + required=False, + type=str, + choices=["v1", "v2", "v3"], ) - args = CIME.utils.parse_args_and_handle_standard_logging_options(args, parser) if "all" in args.neon_sites: @@ -299,7 +292,8 @@ def get_parser(args, description, valid_neon_sites): elif args.run_type == "postad": run_length = "100Y" else: - # The transient run length is set by cdeps atm buildnml to the last date of the available tower data + # The transient run length is set by cdeps atm buildnml to + # the last date of the available tower data # this value is not used run_length = "4Y" else: @@ -310,7 +304,8 @@ def get_parser(args, description, valid_neon_sites): if args.base_case_root: base_case_root = os.path.abspath(args.base_case_root) - # Reduce output level for this script unless --debug or --verbose is provided on the command line + # Reduce output level for this script unless --debug or + # --verbose is provided on the command line if not args.debug and not args.verbose: root_logger = logging.getLogger() root_logger.setLevel(logging.WARN) @@ -332,31 +327,6 @@ def get_parser(args, description, valid_neon_sites): ) -def get_isosplit(s, split): - if split in s: - n, s = s.split(split) - else: - n = 0 - return n, s - - -def parse_isoduration(s): - """ - simple ISO 8601 duration parser, does not account for leap years and assumes 30 day months - """ - # Remove prefix - s = s.split("P")[-1] - - # Step through letter dividers - years, s = get_isosplit(s, "Y") - months, s = get_isosplit(s, "M") - days, s = get_isosplit(s, "D") - - # Convert all to timedelta - dt = datetime.timedelta(days=int(days) + 365 * int(years) + 30 * int(months)) - return int(dt.total_seconds() / 86400) - - class NeonSite: """ A class for encapsulating neon sites. @@ -381,9 +351,7 @@ def __init__(self, name, start_year, end_year, start_month, end_month, finidat): def __str__(self): return ( - str(self.__class__) - + "\n" - + "\n".join((str(item) + " = " for item in (self.__dict__))) + str(self.__class__) + "\n" + "\n".join((str(item) + " = " for item in (self.__dict__))) ) def build_base_case( @@ -408,9 +376,7 @@ def build_base_case( """ print("---- building a base case -------") self.base_case_root = output_root - user_mods_dirs = [ - os.path.join(cesmroot, "cime_config", "usermods_dirs", "NEON", self.name) - ] + user_mods_dirs = [os.path.join(cesmroot, "cime_config", "usermods_dirs", "NEON", self.name)] if not output_root: output_root = os.getcwd() case_path = os.path.join(output_root, self.name) @@ -449,9 +415,15 @@ def build_base_case( existingcompname = case.get_value("COMPSET") match = re.search("^HIST", existingcompname, flags=re.IGNORECASE) if re.search("^HIST", compset, flags=re.IGNORECASE) is None: - expect( match == None, "Existing base case is a historical type and should not be -- rerun with the --orverwrite option" ) + expect( + match is None, + "Existing base case is a historical type and should not be -- rerun with the --overwrite option", + ) else: - expect( match != None, "Existing base case should be a historical type and is not -- rerun with the --orverwrite option" ) + expect( + match is not None, + "Existing base case should be a historical type and is not -- rerun with the --overwrite option", + ) # reset the case case.case_setup(reset=True) case_path = case.get_value("CASEROOT") @@ -471,25 +443,27 @@ def build_base_case( return case_path def diff_month(self): + """ + Determine difference between two dates in months + """ d1 = datetime.datetime(self.end_year, self.end_month, 1) d2 = datetime.datetime(self.start_year, self.start_month, 1) return (d1.year - d2.year) * 12 + d1.month - d2.month - + def get_batch_query(self, case): """ - Function for querying the batch queue query command for a case, depending on the - user's batch system. + Function for querying the batch queue query command for a case, depending on the + user's batch system. Args: case: case object """ - + if case.get_value("BATCH_SYSTEM") == "none": - return "none" - else: - return case.get_value("batch_query") - + return "none" + return case.get_value("batch_query") + def run_case( self, base_case_root, @@ -503,10 +477,34 @@ def run_case( rerun=False, experiment=False, ): + """ + Run case. + + Args: + self + base_case_root: str, opt + file path of base case + run_type: str, opt + transient, post_ad, or ad case, default transient + prism: bool, opt + if True, use PRISM precipitation, default False + run_length: str, opt + length of run, default '4Y' + user_version: str, opt + default 'latest' + overwrite: bool, opt + default False + setup_only: bool, opt + default False; if True, set up but do not run case + no_batch: bool, opt + default False + rerun: bool, opt + default False + experiment: str, opt + name of experiment, default False + """ user_mods_dirs = [ - os.path.join( - self.cesmroot, "cime_config", "usermods_dirs", "NEON", self.name - ) + os.path.join(self.cesmroot, "cime_config", "usermods_dirs", "NEON", self.name) ] expect( os.path.isdir(base_case_root), @@ -516,15 +514,13 @@ def run_case( if user_version: version = user_version else: - version = 'latest' + version = "latest" - print ("using this version:", version) + print("using this version:", version) - if experiment != None: + if experiment is not None: self.name = self.name + "." + experiment - case_root = os.path.abspath( - os.path.join(base_case_root, "..", self.name + "." + run_type) - ) + case_root = os.path.abspath(os.path.join(base_case_root, "..", self.name + "." + run_type)) rundir = None if os.path.isdir(case_root): @@ -538,15 +534,17 @@ def run_case( existingcompname = case.get_value("COMPSET") match = re.search("^HIST", existingcompname, flags=re.IGNORECASE) if re.search("^HIST", compset, flags=re.IGNORECASE) is None: - expect( match == None, "Existing base case is a historical type and should not be -- rerun with the --orverwrite option" ) + expect( + match is None, + "Existing base case is a historical type and should not be -- rerun with the --overwrite option", + ) else: - expect( match != None, "Existing base case should be a historical type and is not -- rerun with the --orverwrite option" ) - if os.path.isfile(os.path.join(rundir, "ESMF_Profile.summary")): - print( - "Case {} appears to be complete, not rerunning.".format( - case_root - ) + expect( + match is not None, + "Existing base case should be a historical type and is not -- rerun with the --overwrite option", ) + if os.path.isfile(os.path.join(rundir, "ESMF_Profile.summary")): + print("Case {} appears to be complete, not rerunning.".format(case_root)) elif not setup_only: print("Resubmitting case {}".format(case_root)) case.submit(no_batch=no_batch) @@ -557,17 +555,13 @@ def run_case( print(f"Use {batch_query} to check its run status") return else: - logger.warning( - "Case already exists in {}, not overwritting.".format(case_root) - ) + logger.warning("Case already exists in {}, not overwritting.".format(case_root)) return if run_type == "postad": adcase_root = case_root.replace(".postad", ".ad") if not os.path.isdir(adcase_root): - logger.warning( - "postad requested but no ad case found in {}".format(adcase_root) - ) + logger.warning("postad requested but no ad case found in {}".format(adcase_root)) return if not os.path.isdir(case_root): @@ -580,15 +574,14 @@ def run_case( # that the shell_commands file is copied, as well as taking care of the DATM inputs. # See https://github.com/ESCOMP/CTSM/pull/1872#pullrequestreview-1169407493 # - basecase.create_clone( - case_root, keepexe=True, user_mods_dirs=user_mods_dirs - ) + basecase.create_clone(case_root, keepexe=True, user_mods_dirs=user_mods_dirs) with Case(case_root, read_only=False) as case: if run_type != "transient": - # in order to avoid the complication of leap years we always set the run_length in units of days. - case.set_value("STOP_OPTION", "ndays") - case.set_value("REST_OPTION", "end") + # in order to avoid the complication of leap years, + # we always set the run_length in units of days. + case.set_value("STOP_OPTION", "ndays") + case.set_value("REST_OPTION", "end") case.set_value("CONTINUE_RUN", False) case.set_value("NEONVERSION", version) if prism: @@ -639,6 +632,9 @@ def run_case( print(f"Use {batch_query} to check its run status") def set_ref_case(self, case): + """ + Set an existing case as the reference case, eg for use with spinup. + """ rundir = case.get_value("RUNDIR") case_root = case.get_value("CASEROOT") if case_root.endswith(".postad"): @@ -660,9 +656,7 @@ def set_ref_case(self, case): case.set_value("RUN_REFDIR", refrundir) case.set_value("RUN_REFCASE", os.path.basename(ref_case_root)) refdate = None - for reffile in glob.iglob( - refrundir + "/{}{}.clm2.r.*.nc".format(self.name, root) - ): + for reffile in glob.iglob(refrundir + "/{}{}.clm2.r.*.nc".format(self.name, root)): m = re.search("(\d\d\d\d-\d\d-\d\d)-\d\d\d\d\d.nc", reffile) if m: refdate = m.group(1) @@ -677,9 +671,7 @@ def set_ref_case(self, case): if not os.path.isdir(os.path.join(rundir, "inputdata")) and os.path.isdir( os.path.join(refrundir, "inputdata") ): - symlink_force( - os.path.join(refrundir, "inputdata"), os.path.join(rundir, "inputdata") - ) + symlink_force(os.path.join(refrundir, "inputdata"), os.path.join(rundir, "inputdata")) case.set_value("RUN_REFDATE", refdate) if case_root.endswith(".postad"): @@ -688,14 +680,16 @@ def set_ref_case(self, case): return True def modify_user_nl(self, case_root, run_type, rundir): + """ + Modify user namelist. If transient, include finidat in user_nl; + Otherwise, adjust user_nl to include different mfilt, nhtfrq, and variables in hist_fincl1. + """ user_nl_fname = os.path.join(case_root, "user_nl_clm") user_nl_lines = None if run_type == "transient": if self.finidat: user_nl_lines = [ - "finidat = '{}/inputdata/lnd/ctsm/initdata/{}'".format( - rundir, self.finidat - ) + "finidat = '{}/inputdata/lnd/ctsm/initdata/{}'".format(rundir, self.finidat) ] else: user_nl_lines = [ @@ -766,16 +760,15 @@ def parse_neon_listing(listing_file, valid_neon_sites): tmp_df = tmp_df[tmp_df[9].str.contains("\d\d\d\d-\d\d.nc")] # -- find all the data versions - versions = tmp_df[7].unique() - #print ("all versions available for ", site_name,":", *versions) + # versions = tmp_df[7].unique() + # print ("all versions available for ", site_name,":", *versions) latest_version = tmp_df[7].iloc[-1] - #print ("latests version available for ", site_name,":", latest_version) + # print ("latests version available for ", site_name,":", latest_version) tmp_df = tmp_df[tmp_df[7].str.contains(latest_version)] # -- remove .nc from the file names tmp_df[9] = tmp_df[9].str.replace(".nc", "", regex=False) - tmp_df2 = tmp_df[9].str.split("-", expand=True) # ignore any prefix in file name and just get year @@ -800,9 +793,7 @@ def parse_neon_listing(listing_file, valid_neon_sites): if site_name in line: finidat = line.split(",")[0].split("/")[-1] - neon_site = NeonSite( - site_name, start_year, end_year, start_month, end_month, finidat - ) + neon_site = NeonSite(site_name, start_year, end_year, start_month, end_month, finidat) logger.debug(neon_site) available_list.append(neon_site) @@ -810,6 +801,10 @@ def parse_neon_listing(listing_file, valid_neon_sites): def main(description): + """ + Determine valid neon sites. Make an output directory if it does not exist. + Loop through requested sites and run CTSM at that site. + """ cesmroot = path_to_ctsm_root() # Get the list of supported neon sites from usermods valid_neon_sites = glob.glob( @@ -847,9 +842,9 @@ def main(description): res = "CLM_USRDAT" if run_type == "transient": - compset = "IHist1PtClm51Bgc" + compset = "IHist1PtClm51Bgc" else: - compset = "I1PtClm51Bgc" + compset = "I1PtClm51Bgc" # -- Looping over neon sites @@ -875,8 +870,3 @@ def main(description): rerun, experiment, ) - - - -if __name__ == "__main__": - main(__doc__) diff --git a/python/ctsm/test/test_sys_modify_singlept_site_neon.py b/python/ctsm/test/test_sys_modify_singlept_site_neon.py new file mode 100755 index 0000000000..b5ded30399 --- /dev/null +++ b/python/ctsm/test/test_sys_modify_singlept_site_neon.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 + +""" +System tests for modify_singlept_site_neon.py +""" + +import os +import unittest +import tempfile +import shutil +import sys + +from ctsm.path_utils import path_to_ctsm_root +from ctsm import unit_testing +from ctsm.site_and_regional.modify_singlept_site_neon import main, get_parser + +# Allow test names that pylint doesn't like; otherwise hard to make them +# readable +# pylint: disable=invalid-name + + +class TestSysModifySingleptSiteNeon(unittest.TestCase): + """System tests for modify_singlept_site_neon""" + + def setUp(self): + """ + Make /_tempdir for use by these tests. + Check tempdir for history files + """ + self._tempdir = tempfile.mkdtemp() + testinputs_path = os.path.join(path_to_ctsm_root(), "python/ctsm/test/testinputs") + self._cfg_file_path = os.path.join( + testinputs_path, "modify_singlept_site_neon_opt_sections.cfg" + ) + + def tearDown(self): + """ + Remove temporary directory + """ + shutil.rmtree(self._tempdir, ignore_errors=True) + + def test_modify_site(self): + """ + Test modifying a singple point site. + This test currently checks that the run fails due to dir structure + + TODO: The primary items to test here are the following: + 1) Fields are overwritten with site-specific data for neon sites + 2) Downloaded data is used in surface dataset + 3) Check specific fields listed in update_metadata for correct output + 4) Check that a netcdf with correct formatting is created + """ + sys.argv = [ + "modify_singlept_site_neon", + "--neon_site", + path_to_ctsm_root() + "/ctsm/cime_config/usermods_dirs/NEON/ABBY", + ] + # TODO: the above requires a full path instead of site name + # because of how run_neon is configured. + # This needs to be fixed in run_neon. + parser = get_parser() + with self.assertRaises(SystemExit): + print( + """This should currently fail due to directory structure in run_neon + and the directory structure listed in sys.argv""" + ) + main() + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() diff --git a/python/ctsm/test/test_sys_run_neon.py b/python/ctsm/test/test_sys_run_neon.py new file mode 100755 index 0000000000..b6814ee2bc --- /dev/null +++ b/python/ctsm/test/test_sys_run_neon.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python3 + +"""System tests for run_neon + +""" + +import glob +import os +import unittest +import tempfile +import shutil +import sys + +from ctsm import unit_testing +from ctsm.site_and_regional.run_neon import main, get_parser +from ctsm.path_utils import path_to_ctsm_root + +# Allow test names that pylint doesn't like; otherwise hard to make them +# readable +# pylint: disable=invalid-name + + +class TestSysRunNeon(unittest.TestCase): + """System tests for run_neon""" + + def setUp(self): + """ + Make /_tempdir for use by these tests. + Check tempdir for history files + """ + self._tempdir = tempfile.mkdtemp() + os.chdir(self._tempdir) # cd to tempdir + + def tearDown(self): + """ + Remove temporary directory + """ + shutil.rmtree(self._tempdir, ignore_errors=True) + + def test_one_site(self): + """ + This test specifies a site to run + Run the tool, check that file structure is set up correctly + """ + + # run the run_neon tool + sys.argv = [ + os.path.join(path_to_ctsm_root(), "tools", "site_and_regional", "run_neon"), + "--neon-sites", + "BART", + "--setup-only", + "--output-root", + self._tempdir, + ] + valid_neon_sites = ["ABBY", "OSBS", "BART"] + parser = get_parser(sys.argv, "description_for_parser", valid_neon_sites) + main("") + + # assert that BART directories were created during setup + self.assertTrue("BART" in glob.glob(self._tempdir + "/BART*")[0]) + + # TODO: Would also be useful to test the following items: + # It might be good to ensure the log files are working as expected? + # Test running transient, ad and post ad cases. + # Test use of base case root. + # Test for using prism? + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() diff --git a/python/ctsm/test/test_unit_iso_utils.py b/python/ctsm/test/test_unit_iso_utils.py new file mode 100755 index 0000000000..c58beef52e --- /dev/null +++ b/python/ctsm/test/test_unit_iso_utils.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python3 + +"""Unit tests for the iso functions in utils +""" + +import unittest + +from ctsm import unit_testing +from ctsm.utils import parse_isoduration, get_isosplit + +# Allow names that pylint doesn't like, because otherwise I find it hard +# to make readable unit test names +# pylint: disable=invalid-name + + +class TestIsoUtils(unittest.TestCase): + """Tests of iso functions in utils""" + + def test_iso_split_for_Year(self): + """ + Tests the get_isosplit function for a strings with Years + """ + iso_string = "0Y" + self.assertEqual(get_isosplit(iso_string, "Y"), ("0", "")) + iso_string = "1Y" + self.assertEqual(get_isosplit(iso_string, "Y"), ("1", "")) + iso_string = "4Y" + self.assertEqual(get_isosplit(iso_string, "Y"), ("4", "")) + iso_string = "100Y" + self.assertEqual(get_isosplit(iso_string, "Y"), ("100", "")) + iso_string = "999999Y" + self.assertEqual(get_isosplit(iso_string, "Y"), ("999999", "")) + + def test_parse_isoduration_for_Years(self): + """ + Tests the parse_isoduration function for iso strings with Years + """ + days_in_year = 365 + iso_string = "0Y" + self.assertEqual(parse_isoduration(iso_string), 0) + iso_string = "1Y" + self.assertEqual(parse_isoduration(iso_string), days_in_year) + iso_string = "4Y" + self.assertEqual(parse_isoduration(iso_string), 4 * days_in_year) + iso_string = "100Y" + self.assertEqual(parse_isoduration(iso_string), 100 * days_in_year) + iso_string = "999999Y" + self.assertEqual(parse_isoduration(iso_string), 999999 * days_in_year) + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() diff --git a/python/ctsm/test/test_unit_modify_singlept_site_neon.py b/python/ctsm/test/test_unit_modify_singlept_site_neon.py new file mode 100755 index 0000000000..ecd96357b3 --- /dev/null +++ b/python/ctsm/test/test_unit_modify_singlept_site_neon.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python3 +""" +Unit tests for modify_singlept_site_neon + +You can run this by: + python -m unittest test_unit_modify_singlept_site_neon.py +""" + +import os +import shutil +import sys +import tempfile +import unittest +from datetime import date +import xarray as xr + +# -- add python/ctsm to path (needed if we want to run the test stand-alone) +_CTSM_PYTHON = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) +sys.path.insert(1, _CTSM_PYTHON) + +from ctsm.path_utils import path_to_ctsm_root + +# pylint: disable=wrong-import-position +from ctsm import unit_testing +from ctsm.site_and_regional.modify_singlept_site_neon import ( + get_neon, + find_surffile, + update_metadata, + update_time_tag, + check_neon_time, +) + +# pylint: disable=invalid-name + + +class TestModifySingleptSiteNeon(unittest.TestCase): + """ + Basic class for testing modify_singlept_site_neon.py. + """ + + def setUp(self): + """ + Make /_tempdir for use by these tests. + Check tempdir for history files + """ + self._tempdir = tempfile.mkdtemp() + + def tearDown(self): + """ + Remove temporary directory + """ + shutil.rmtree(self._tempdir, ignore_errors=True) + + def test_get_neon(self): + """ + Test to see if neon data for valid site name is found + """ + site_name = "ABBY" + neon_dir = self._tempdir + file = get_neon(neon_dir, site_name) + self.assertEqual(file.split("/")[-1][:4], "ABBY", "CSV file did not download as expected") + + def test_get_neon_false_site(self): + """ + Test to see if neon data for invalid site name is found + """ + site_name = "INVALID_SITE" + neon_dir = self._tempdir + with self.assertRaises(SystemExit): + get_neon(neon_dir, site_name) + + def test_find_surffile(self): + """ + Test that surface file does not exist in tempdir and raises system exit error + """ + surf_dir = self._tempdir + site_name = "BART" + pft_16 = True + with self.assertRaises(SystemExit): + find_surffile(surf_dir, site_name, pft_16) + + def test_find_soil_structure(self): + """ + Test to ensure that correct attributes are found for find_soil_structure. + soil_texture_raw_data_file_name should be found, and test should go through sysexit. + """ + surf_file_name = "surfdata_1x1_mexicocityMEX_hist_16pfts_Irrig_CMIP6_simyr2000_c221206.nc" + surf_file = os.path.join( + path_to_ctsm_root(), + "python/ctsm/test/testinputs/", + surf_file_name, + ) + f1 = xr.open_dataset(surf_file) + self.assertEqual( + f1.attrs["Soil_texture_raw_data_file_name"], + "mksrf_soitex.10level.c010119.nc", + "did not retrieve expected surface soil texture filename from surf file", + ) + + def test_update_metadata(self): + """ + Test to ensure that the file was updated today. + """ + surf_file = "surfdata_1x1_mexicocityMEX_hist_16pfts_Irrig_CMIP6_simyr2000_c221206.nc" + neon_file = "dummy_neon_file.nc" + zb_flag = True + f1 = xr.open_dataset( + os.path.join(path_to_ctsm_root(), "python/ctsm/test/testinputs/") + surf_file + ) + f2 = update_metadata(f1, surf_file, neon_file, zb_flag) + today = date.today() + today_string = today.strftime("%Y-%m-%d") + self.assertEqual(f2.attrs["Updated_on"], today_string, "File was not updated as expected") + + def test_update_time_tag(self): + """ + Test that file ending is updated + """ + self.assertEqual( + update_time_tag("test_YYMMDD.nc")[-9:-3], + date.today().strftime("%y%m%d"), + "File ending not as expected", + ) + + def test_check_neon_time(self): + """ + Test that dictionary containing last modified information is correctly downloaded + """ + previous_dir = os.getcwd() + os.chdir(self._tempdir) # cd to tempdir + last_abby_download = check_neon_time()[ + "https://storage.neonscience.org/neon-ncar/NEON/surf_files/v1/ABBY_surfaceData.csv" + ] + self.assertEqual( + len(last_abby_download), + 19, + "last ABBY download has unexpected date format or does not exist", + ) + # Note: this checks that data is not pulled from before 2021; + # we may want to update this occassionally, + # but in any case it confirms that the oldest data is not found + self.assertGreater( + int(last_abby_download[:4]), 2021, "ABBY download is older than expected" + ) + # change back to previous dir once listing.csv file is created in tempdir and test complete + os.chdir(previous_dir) + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() diff --git a/python/ctsm/test/test_unit_neon_surf_wrapper.py b/python/ctsm/test/test_unit_neon_surf_wrapper.py new file mode 100755 index 0000000000..443af2079b --- /dev/null +++ b/python/ctsm/test/test_unit_neon_surf_wrapper.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +""" +Unit tests for neon_surf_wrapper + +You can run this by: + python -m unittest test_unit_neon_surf_wrapper.py +""" + +import unittest +import os +import sys + +# -- add python/ctsm to path (needed if we want to run the test stand-alone) +_CTSM_PYTHON = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) +sys.path.insert(1, _CTSM_PYTHON) + +# pylint: disable=wrong-import-position +from ctsm import unit_testing +from ctsm.site_and_regional.neon_surf_wrapper import get_parser + +# pylint: disable=invalid-name + + +class TestNeonSurfWrapper(unittest.TestCase): + """ + Basic class for testing neon_surf_wrapper.py. + """ + + def test_parser(self): + """ + Test that parser has same defaults as expected + """ + + self.assertEqual(get_parser().argument_default, None, "Parser not working as expected") + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() diff --git a/python/ctsm/test/test_unit_run_neon.py b/python/ctsm/test/test_unit_run_neon.py new file mode 100755 index 0000000000..a35608e249 --- /dev/null +++ b/python/ctsm/test/test_unit_run_neon.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 +""" +Unit tests for run_neon + +You can run this by: + python -m unittest test_unit_run_neon.py +""" + +import unittest +import tempfile +import shutil +import os +import sys + +# -- add python/ctsm to path (needed if we want to run the test stand-alone) +_CTSM_PYTHON = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) +sys.path.insert(1, _CTSM_PYTHON) + +# pylint: disable=wrong-import-position +from ctsm import unit_testing +from ctsm.site_and_regional.run_neon import check_neon_listing + +# pylint: disable=invalid-name + + +class TestRunNeon(unittest.TestCase): + """ + Basic class for testing run_neon.py. + """ + + def setUp(self): + """ + Make /_tempdir for use by these tests. + """ + self._tempdir = tempfile.mkdtemp() + + def tearDown(self): + """ + Remove temporary directory + """ + shutil.rmtree(self._tempdir, ignore_errors=True) + + def test_check_neon_listing(self): + """ + Test that neon listing is available for valid sites + """ + valid_neon_sites = ["ABBY", "BART"] + previous_dir = os.getcwd() + os.chdir(self._tempdir) # cd to tempdir + available_list = check_neon_listing(valid_neon_sites) + self.assertEqual( + available_list[0].name, "ABBY", "available list of actual sites not as expected" + ) + self.assertEqual( + available_list[1].name, "BART", "available list of actual sites not as expected" + ) + # change to previous dir once listing.csv file is created in tempdir and test complete + os.chdir(previous_dir) + + def test_check_neon_listing_misspelled(self): + """ + Test that neon listing is not available for invalid sites + """ + valid_neon_sites = ["INVALID_SITE1", "INVALID_SITE2"] + previous_dir = os.getcwd() + os.chdir(self._tempdir) # cd to tempdir + available_list = check_neon_listing(valid_neon_sites) + self.assertEqual( + available_list, [], "available list of incorrect dummy site not as expected" + ) + # change to previous dir once listing.csv file is created in tempdir and test complete + os.chdir(previous_dir) + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() diff --git a/python/ctsm/test/test_unit_subset_data.py b/python/ctsm/test/test_unit_subset_data.py index fd9aef631d..b8ea5b06f2 100755 --- a/python/ctsm/test/test_unit_subset_data.py +++ b/python/ctsm/test/test_unit_subset_data.py @@ -56,7 +56,7 @@ def test_inputdata_setup_files_basic(self): ) self.assertEqual( files["main_dir"], - "/glade/p/cesmdata/inputdata", + "/glade/campaign/cesm/cesmdata/cseg/inputdata", "main_dir directory not whats expected", ) diff --git a/python/ctsm/test/testinputs/README.md b/python/ctsm/test/testinputs/README.md index 45451b53e1..ef8953d20e 100644 --- a/python/ctsm/test/testinputs/README.md +++ b/python/ctsm/test/testinputs/README.md @@ -6,7 +6,8 @@ Installing Git LFS on your machine is a two-step process; step (1) needs to be done once per machine, and step (2) needs to be done once per user: 1. Install the Git LFS tool: Follow the instructions on the [Git LFS page](https://git-lfs.github.com/) for installing Git LFS on your platform. - - On cheyenne, Git LFS is already available as long as you are using a git + - On derecho the system default version of git already has Git LFS installed. + - On cheyenne and casper, Git LFS is already available as long as you are using a git module rather than the default system-level git. So just make sure that you are always using git via a git module (`module load git`). - On a Mac using homebrew, this can be done with `brew install git-lfs`. diff --git a/python/ctsm/test/testinputs/default_data.cfg b/python/ctsm/test/testinputs/default_data.cfg index 7e841dca54..0425aba133 100644 --- a/python/ctsm/test/testinputs/default_data.cfg +++ b/python/ctsm/test/testinputs/default_data.cfg @@ -1,5 +1,5 @@ [main] -clmforcingindir = /glade/p/cesmdata/inputdata +clmforcingindir = /glade/campaign/cesm/cesmdata/cseg/inputdata [datm_gswp3] dir = atm/datm7/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 42444e32c5..8578ea860c 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -7,7 +7,7 @@ import re import pdb -from datetime import date +from datetime import date, timedelta from getpass import getuser from ctsm.git_utils import get_ctsm_git_short_hash @@ -189,3 +189,33 @@ def write_output(file, file_in, file_out, file_type): file.to_netcdf(path=file_out, mode="w", format="NETCDF3_64BIT") logger.info("Successfully created: %s", file_out) file.close() + + +def get_isosplit(iso_string, split): + """ + Split a string (iso_string) by the character sent in from split + Returns the number for that character split + Only used by parse_isoduration + """ + if split in iso_string: + num, iso_string = iso_string.split(split) + else: + num = 0 + return num, iso_string + + +def parse_isoduration(iso_string): + """ + simple ISO 8601 duration parser, does not account for leap years and assumes 30 day months + """ + # Remove prefix + iso_string = iso_string.split("P")[-1] + + # Step through letter dividers + years, iso_string = get_isosplit(iso_string, "Y") + months, iso_string = get_isosplit(iso_string, "M") + days, iso_string = get_isosplit(iso_string, "D") + + # Convert all to timedelta + delta_t = timedelta(days=int(days) + 365 * int(years) + 30 * int(months)) + return int(delta_t.total_seconds() / 86400) diff --git a/src/biogeochem/CNVegCarbonFluxType.F90 b/src/biogeochem/CNVegCarbonFluxType.F90 index c7aa3469e2..298e7b3053 100644 --- a/src/biogeochem/CNVegCarbonFluxType.F90 +++ b/src/biogeochem/CNVegCarbonFluxType.F90 @@ -3729,8 +3729,8 @@ subroutine RestartBulkOnly ( this, bounds, ncid, flag ) ! BACKWARDS_COMPATIBILITY(wjs/ssr, 2022-06-10) See note in CallRestartvarDimOK() if (CallRestartvarDimOK(ncid, flag, 'mxharvests')) then do k = repr_grain_min, repr_grain_max - data2dptr => this%repr_grainc_to_food_perharv_patch(:,:,k) ! e.g., grainc_to_food_perharv + data2dptr => this%repr_grainc_to_food_perharv_patch(:,:,k) varname = get_repr_rest_fname(k)//'c_to_food_perharv' call restartvar(ncid=ncid, flag=flag, varname=varname, & xtype=ncd_double, & @@ -3742,12 +3742,26 @@ subroutine RestartBulkOnly ( this, bounds, ncid, flag ) readvar=readvar, & scale_by_thickness=.false., & interpinic_flag='interp', data=data2dptr) + + ! e.g., grainc_to_seed_perharv + data2dptr => this%repr_grainc_to_seed_perharv_patch(:,:,k) + varname = get_repr_rest_fname(k)//'c_to_seed_perharv' + call restartvar(ncid=ncid, flag=flag, varname=varname, & + xtype=ncd_double, & + dim1name='pft', & + dim2name='mxharvests', & + switchdim=.true., & + long_name=get_repr_longname(k)//' C to seed per harvest; should only be output annually', & + units='gC/m2', & + readvar=readvar, & + scale_by_thickness=.false., & + interpinic_flag='interp', data=data2dptr) end do end if do k = repr_grain_min, repr_grain_max - data1dptr => this%repr_grainc_to_food_thisyr_patch(:,k) ! e.g., grainc_to_food_thisyr + data1dptr => this%repr_grainc_to_food_thisyr_patch(:,k) varname = get_repr_rest_fname(k)//'c_to_food_thisyr' call restartvar(ncid=ncid, flag=flag, varname=varname, & xtype=ncd_double, & @@ -3755,8 +3769,9 @@ subroutine RestartBulkOnly ( this, bounds, ncid, flag ) long_name=get_repr_longname(k)//' C to food per calendar year; should only be output annually', & units='gC/m2', & interpinic_flag='interp', readvar=readvar, data=data1dptr) - data1dptr => this%repr_grainc_to_seed_thisyr_patch(:,k) + ! e.g., grainc_to_seed_thisyr + data1dptr => this%repr_grainc_to_seed_thisyr_patch(:,k) varname = get_repr_rest_fname(k)//'c_to_seed_thisyr' call restartvar(ncid=ncid, flag=flag, varname=varname, & xtype=ncd_double, & diff --git a/src/biogeochem/CropType.F90 b/src/biogeochem/CropType.F90 index 29c4717ab3..a96e9e939f 100644 --- a/src/biogeochem/CropType.F90 +++ b/src/biogeochem/CropType.F90 @@ -23,6 +23,7 @@ module CropType private ! ! !PUBLIC DATA TYPES: + public :: latbaset ! ! Possible values of cphase @@ -371,43 +372,32 @@ subroutine InitCold(this, bounds) type(bounds_type), intent(in) :: bounds ! ! !LOCAL VARIABLES: - integer :: c, l, g, p, m, ivt ! indices + integer :: l, g, p, ivt ! indices + logical :: latvary_baset character(len=*), parameter :: subname = 'InitCold' !----------------------------------------------------------------------- -!DLL - added wheat & sugarcane restrictions to base T vary by lat + latvary_baset = trim(this%baset_mapping) == baset_map_latvary + if (.not. latvary_baset) then + this%latbaset_patch(bounds%begp:bounds%endp) = nan + end if + do p= bounds%begp,bounds%endp - g = patch%gridcell(p) - ivt = patch%itype(p) + l = patch%landunit(p) this%nyrs_crop_active_patch(p) = 0 - if ( grc%latdeg(g) >= 0.0_r8 .and. grc%latdeg(g) <= 30.0_r8) then - this%latbaset_patch(p)=pftcon%baset(ivt)+12._r8-0.4_r8*grc%latdeg(g) - else if (grc%latdeg(g) < 0.0_r8 .and. grc%latdeg(g) >= -30.0_r8) then - this%latbaset_patch(p)=pftcon%baset(ivt)+12._r8+0.4_r8*grc%latdeg(g) - else - this%latbaset_patch(p)=pftcon%baset(ivt) - end if - if ( trim(this%baset_mapping) == baset_map_constant ) then - this%latbaset_patch(p) = nan - end if - end do -!DLL -- end of mods - - if (use_crop) then - do p= bounds%begp,bounds%endp + if (lun%itype(l) == istcrop) then g = patch%gridcell(p) - l = patch%landunit(p) - c = patch%column(p) + ivt = patch%itype(p) + this%fertnitro_patch(p) = fert_cft(g,ivt) - if (lun%itype(l) == istcrop) then - m = patch%itype(p) - this%fertnitro_patch(p) = fert_cft(g,m) + if (latvary_baset) then + this%latbaset_patch(p) = latbaset(pftcon%baset(ivt), grc%latdeg(g), this%baset_latvary_intercept, this%baset_latvary_slope) end if - end do - end if + end if + end do end subroutine InitCold @@ -655,6 +645,16 @@ subroutine Restart(this, bounds, ncid, cnveg_state_inst, flag) long_name='crop sowing dates for this patch this year', units='day of year', & scale_by_thickness=.false., & interpinic_flag='interp', readvar=readvar, data=this%sdates_thisyr_patch) + call restartvar(ncid=ncid, flag=flag, varname='swindow_starts_thisyr_patch', xtype=ncd_double, & + dim1name='pft', dim2name='mxsowings', switchdim=.true., & + long_name='sowing window start dates for this patch this year', units='day of year', & + scale_by_thickness=.false., & + interpinic_flag='interp', readvar=readvar, data=this%swindow_starts_thisyr_patch) + call restartvar(ncid=ncid, flag=flag, varname='swindow_ends_thisyr_patch', xtype=ncd_double, & + dim1name='pft', dim2name='mxsowings', switchdim=.true., & + long_name='sowing window end dates for this patch this year', units='day of year', & + scale_by_thickness=.false., & + interpinic_flag='interp', readvar=readvar, data=this%swindow_ends_thisyr_patch) ! Fill variable(s) derived from read-in variable(s) if (flag == 'read' .and. readvar) then do p = bounds%begp,bounds%endp @@ -972,4 +972,24 @@ subroutine checkDates( ) end subroutine checkDates + real(r8) function latbaset(baset, latdeg, baset_latvary_intercept, baset_latvary_slope) + ! !ARGUMENTS: + real(r8), intent(in) :: baset + real(r8), intent(in) :: latdeg + real(r8), intent(in) :: baset_latvary_intercept + real(r8), intent(in) :: baset_latvary_slope + + ! Was originally + ! maxlat = baset_latvary_intercept / baset_latvary_slope + ! if (abs(latdeg) > maxlat) then + ! latbaset = baset + ! else + ! latbaset = baset + baset_latvary_intercept - baset_latvary_slope*abs(latdeg) + ! end if + ! But the one-liner below should improve efficiency, at least marginally. + + latbaset = baset + baset_latvary_intercept - min(baset_latvary_intercept, baset_latvary_slope * abs(latdeg)) + + end function latbaset + end module CropType diff --git a/src/biogeochem/test/CMakeLists.txt b/src/biogeochem/test/CMakeLists.txt index ad91c7c995..81fe9bbaf0 100644 --- a/src/biogeochem/test/CMakeLists.txt +++ b/src/biogeochem/test/CMakeLists.txt @@ -1,3 +1,4 @@ add_subdirectory(Species_test) add_subdirectory(CNVegComputeSeed_test) add_subdirectory(CNPhenology_test) +add_subdirectory(Latbaset_test) diff --git a/src/biogeochem/test/Latbaset_test/CMakeLists.txt b/src/biogeochem/test/Latbaset_test/CMakeLists.txt new file mode 100644 index 0000000000..217fc7233c --- /dev/null +++ b/src/biogeochem/test/Latbaset_test/CMakeLists.txt @@ -0,0 +1,6 @@ +set (pfunit_sources + test_Latbaset.pf) + +add_pfunit_ctest(CropTypeLatbaset + TEST_SOURCES "${pfunit_sources}" + LINK_LIBRARIES clm csm_share esmf_wrf_timemgr) diff --git a/src/biogeochem/test/Latbaset_test/test_Latbaset.pf b/src/biogeochem/test/Latbaset_test/test_Latbaset.pf new file mode 100644 index 0000000000..ebb5bfa5e4 --- /dev/null +++ b/src/biogeochem/test/Latbaset_test/test_Latbaset.pf @@ -0,0 +1,119 @@ +module test_Latbaset + + ! Tests of CropType module: latbaset + + use funit + use shr_kind_mod , only : r8 => shr_kind_r8 + use unittestSubgridMod + use unittestSimpleSubgridSetupsMod + use unittestFilterBuilderMod + use CropType, only : latbaset + + implicit none + + @TestCase + type, extends(TestCase) :: TestLatbaset + contains + procedure :: setUp + procedure :: tearDown + end type TestLatbaset + + real(r8) :: baset + real(r8) :: latdeg + real(r8) :: baset_latvary_intercept + real(r8) :: baset_latvary_slope + real(r8) :: expected + +contains + + subroutine setUp(this) + class(TestLatbaset), intent(inout) :: this + end subroutine setUp + + subroutine tearDown(this) + class(TestLatbaset), intent(inout) :: this + + call unittest_subgrid_teardown() + end subroutine tearDown + + real(r8) function latbaset_max_lat(intercept, slope) + real(r8), intent(in) :: intercept + real(r8), intent(in) :: slope + + latbaset_max_lat = intercept / slope + end function latbaset_max_lat + + @Test + subroutine too_far_north(this) + class(TestLatbaset), intent(inout) :: this + + baset = 5._r8 + baset_latvary_intercept = 8.7_r8 + baset_latvary_slope = 0.5_r8 + latdeg = 10._r8 + latbaset_max_lat(baset_latvary_intercept, baset_latvary_slope) + + @assertEqual(baset, latbaset(baset, latdeg, baset_latvary_intercept, baset_latvary_slope)) + end subroutine too_far_north + + @Test + subroutine too_far_south(this) + class(TestLatbaset), intent(inout) :: this + + baset = 5._r8 + baset_latvary_intercept = 8.7_r8 + baset_latvary_slope = 0.5_r8 + latdeg = -10._r8 - latbaset_max_lat(baset_latvary_intercept, baset_latvary_slope) + + @assertEqual(baset, latbaset(baset, latdeg, baset_latvary_intercept, baset_latvary_slope)) + end subroutine too_far_south + + @Test + subroutine at_northern_limit(this) + class(TestLatbaset), intent(inout) :: this + + baset = 5._r8 + baset_latvary_intercept = 12._r8 + baset_latvary_slope = 0.4_r8 + latdeg = latbaset_max_lat(baset_latvary_intercept, baset_latvary_slope) + + @assertEqual(baset, latbaset(baset, latdeg, baset_latvary_intercept, baset_latvary_slope)) + end subroutine at_northern_limit + + @Test + subroutine at_southern_limit(this) + class(TestLatbaset), intent(inout) :: this + + baset = 5._r8 + baset_latvary_intercept = 12._r8 + baset_latvary_slope = 0.4_r8 + latdeg = -latbaset_max_lat(baset_latvary_intercept, baset_latvary_slope) + + @assertEqual(baset, latbaset(baset, latdeg, baset_latvary_intercept, baset_latvary_slope)) + end subroutine at_southern_limit + + @Test + subroutine in_nh(this) + class(TestLatbaset), intent(inout) :: this + + baset = 5._r8 + latdeg = 10._r8 + baset_latvary_intercept = 13._r8 + baset_latvary_slope = 0.3_r8 + + @assertEqual(15._r8, latbaset(baset, latdeg, baset_latvary_intercept, baset_latvary_slope)) + end subroutine in_nh + + @Test + subroutine in_sh(this) + class(TestLatbaset), intent(inout) :: this + + baset = 5._r8 + latdeg = -10._r8 + baset_latvary_intercept = 13._r8 + baset_latvary_slope = 0.3_r8 + + @assertEqual(15._r8, latbaset(baset, latdeg, baset_latvary_intercept, baset_latvary_slope)) + end subroutine in_sh + +end module test_Latbaset + diff --git a/src/biogeophys/AerosolMod.F90 b/src/biogeophys/AerosolMod.F90 index bce2b6f9ab..39ade89fb0 100644 --- a/src/biogeophys/AerosolMod.F90 +++ b/src/biogeophys/AerosolMod.F90 @@ -26,9 +26,6 @@ module AerosolMod public :: AerosolFluxes ! ! !PUBLIC DATA MEMBERS: - real(r8), public, parameter :: snw_rds_min = 54.526_r8 ! minimum allowed snow effective radius (also cold "fresh snow" value) [microns] - real(r8), public :: fresh_snw_rds_max = 204.526_r8 ! maximum warm fresh snow effective radius [microns] - ! type, public :: aerosol_type real(r8), pointer, public :: mss_bcpho_col(:,:) ! mass of hydrophobic BC in snow (col,lyr) [kg] real(r8), pointer, public :: mss_bcphi_col(:,:) ! mass of hydrophillic BC in snow (col,lyr) [kg] @@ -93,7 +90,6 @@ module AerosolMod procedure, private :: InitAllocate procedure, private :: InitHistory procedure, private :: InitCold - procedure, private :: InitReadNML end type aerosol_type @@ -113,7 +109,6 @@ subroutine Init(this, bounds, NLFilename) call this%InitAllocate(bounds) call this%InitHistory(bounds) call this%InitCold(bounds) - call this%InitReadNML(NLFilename) end subroutine Init @@ -294,58 +289,6 @@ subroutine InitCold(this, bounds) end subroutine InitCold !----------------------------------------------------------------------- - subroutine InitReadNML(this, NLFilename) - ! - ! !USES: - ! !USES: - use fileutils , only : getavu, relavu, opnfil - use shr_nl_mod , only : shr_nl_find_group_name - use spmdMod , only : masterproc, mpicom - use shr_mpi_mod , only : shr_mpi_bcast - use clm_varctl , only : iulog - ! - ! !ARGUMENTS: - class(aerosol_type) :: this - character(len=*), intent(in) :: NLFilename ! Input namelist filename - ! - ! !LOCAL VARIABLES: - !----------------------------------------------------------------------- - integer :: ierr ! error code - integer :: unitn ! unit for namelist file - - character(len=*), parameter :: subname = 'Aerosol::InitReadNML' - character(len=*), parameter :: nmlname = 'aerosol' - !----------------------------------------------------------------------- - namelist/aerosol/ fresh_snw_rds_max - - if (masterproc) then - unitn = getavu() - write(iulog,*) 'Read in '//nmlname//' namelist' - call opnfil (NLFilename, unitn, 'F') - call shr_nl_find_group_name(unitn, nmlname, status=ierr) - if (ierr == 0) then - read(unitn, nml=aerosol, iostat=ierr) - if (ierr /= 0) then - call endrun(msg="ERROR reading "//nmlname//" namelist "//errmsg(sourcefile, __LINE__)) - end if - else - call endrun(msg="ERROR could NOT find "//nmlname//" namelist "//errmsg(sourcefile, __LINE__)) - end if - call relavu( unitn ) - end if - - call shr_mpi_bcast (fresh_snw_rds_max , mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) nmlname//' settings:' - write(iulog,nml=aerosol) - write(iulog,*) ' ' - end if - - end subroutine InitReadNML - - !------------------------------------------------------------------------ subroutine Restart(this, bounds, ncid, flag, & h2osoi_ice_col, h2osoi_liq_col) ! diff --git a/src/biogeophys/BareGroundFluxesMod.F90 b/src/biogeophys/BareGroundFluxesMod.F90 index 7dfa83820d..7db214065d 100644 --- a/src/biogeophys/BareGroundFluxesMod.F90 +++ b/src/biogeophys/BareGroundFluxesMod.F90 @@ -190,8 +190,12 @@ subroutine BareGroundFluxes(bounds, num_noexposedvegp, filter_noexposedvegp, & forc_th => atm2lnd_inst%forc_th_downscaled_col , & ! Input: [real(r8) (:) ] atmospheric potential temperature (Kelvin) forc_t => atm2lnd_inst%forc_t_downscaled_col , & ! Input: [real(r8) (:) ] atmospheric temperature (Kelvin) forc_pbot => atm2lnd_inst%forc_pbot_downscaled_col , & ! Input: [real(r8) (:) ] atmospheric pressure (Pa) - forc_rho => atm2lnd_inst%forc_rho_downscaled_col , & ! Input: [real(r8) (:) ] density (kg/m**3) - forc_q => wateratm2lndbulk_inst%forc_q_downscaled_col , & ! Input: [real(r8) (:) ] atmospheric specific humidity (kg/kg) + forc_rho => atm2lnd_inst%forc_rho_downscaled_col , & ! Input: [real(r8) (:) ] density (kg/m**3) + forc_hgt_t => atm2lnd_inst%forc_hgt_t_grc , & ! Input: [real(r8) (:) ] observational height of temperature [m] + forc_hgt_u => atm2lnd_inst%forc_hgt_u_grc , & ! Input: [real(r8) (:) ] observational height of wind [m] + forc_hgt_q => atm2lnd_inst%forc_hgt_q_grc , & ! Input: [real(r8) (:) ] observational height of specific humidity [m] + + forc_q => wateratm2lndbulk_inst%forc_q_downscaled_col , & ! Input: [real(r8) (:) ] atmospheric specific humidity (kg/kg) watsat => soilstate_inst%watsat_col , & ! Input: [real(r8) (:,:) ] volumetric soil water at saturation (porosity) soilbeta => soilstate_inst%soilbeta_col , & ! Input: [real(r8) (:) ] soil wetness relative to field capacity @@ -360,12 +364,10 @@ subroutine BareGroundFluxes(bounds, num_noexposedvegp, filter_noexposedvegp, & z0qg_patch(p) = z0hg_patch(p) ! Update the forcing heights for new roughness lengths - ! TODO(KWO, 2022-03-15) Only for Meier2022 for now to maintain bfb with ZengWang2007 - if (z0param_method == 'Meier2022') then - forc_hgt_u_patch(p) = forc_hgt_u_patch(g) + z0mg_patch(p) + displa(p) - forc_hgt_t_patch(p) = forc_hgt_t_patch(g) + z0hg_patch(p) + displa(p) - forc_hgt_q_patch(p) = forc_hgt_q_patch(g) + z0qg_patch(p) + displa(p) - end if + forc_hgt_u_patch(p) = forc_hgt_u(g) + z0mg_patch(p) + displa(p) + forc_hgt_t_patch(p) = forc_hgt_t(g) + z0hg_patch(p) + displa(p) + forc_hgt_q_patch(p) = forc_hgt_q(g) + z0qg_patch(p) + displa(p) + thvstar = tstar*(1._r8+0.61_r8*forc_q(c)) + 0.61_r8*forc_th(c)*qstar zeta(p) = zldis(p)*vkc*grav*thvstar/(ustar(p)**2*thv(c)) diff --git a/src/biogeophys/BiogeophysPreFluxCalcsMod.F90 b/src/biogeophys/BiogeophysPreFluxCalcsMod.F90 index 11842560ee..62643d680c 100644 --- a/src/biogeophys/BiogeophysPreFluxCalcsMod.F90 +++ b/src/biogeophys/BiogeophysPreFluxCalcsMod.F90 @@ -138,7 +138,7 @@ subroutine SetZ0mDisp(bounds, num_nolakep, filter_nolakep, & type(canopystate_type) , intent(inout) :: canopystate_inst ! ! !LOCAL VARIABLES: - integer :: fp, p + integer :: fp, p, l character(len=*), parameter :: subname = 'SetZ0mDisp' real(r8) :: U_ustar ! wind at canopy height divided by friction velocity (unitless) @@ -164,6 +164,7 @@ subroutine SetZ0mDisp(bounds, num_nolakep, filter_nolakep, & do fp = 1, num_nolakep p = filter_nolakep(fp) + l = patch%landunit(p) if( .not.(patch%is_fates(p))) then select case (z0param_method) @@ -199,8 +200,13 @@ subroutine SetZ0mDisp(bounds, num_nolakep, filter_nolakep, & / 2._r8)**(-0.5_r8) / (pftcon%z0v_LAImax(patch%itype(p))) / pftcon%z0v_c(patch%itype(p)) if ( htop(p) <= 1.e-10_r8 )then - write(iulog,*) ' nstep = ', get_nstep(), ' htop = ', htop(p) - call endrun(subgrid_index=p, subgrid_level=subgrid_level_patch, msg=errMsg(sourcefile, __LINE__)) + if (lun%itype(l) == istcrop) then + z0m(p) = 0._r8 + displa(p) = 0._r8 + else + write(iulog,*) ' nstep = ', get_nstep(), ' htop = ', htop(p) + call endrun(subgrid_index=p, subgrid_level=subgrid_level_patch, msg=errMsg(sourcefile, __LINE__)) + end if else z0m(p) = htop(p) * (1._r8 - displa(p) / htop(p)) * exp(-0.4_r8 * U_ustar + & log(pftcon%z0v_cw(patch%itype(p))) - 1._r8 + pftcon%z0v_cw(patch%itype(p))**(-1._r8)) diff --git a/src/biogeophys/CanopyFluxesMod.F90 b/src/biogeophys/CanopyFluxesMod.F90 index ae0832a2b2..f152e761eb 100644 --- a/src/biogeophys/CanopyFluxesMod.F90 +++ b/src/biogeophys/CanopyFluxesMod.F90 @@ -927,12 +927,9 @@ subroutine CanopyFluxes(bounds, num_exposedvegp, filter_exposedvegp, z0qv(p) = z0mv(p) ! Update the forcing heights - ! TODO(KWO, 2022-03-15) Only for Meier2022 for now to maintain bfb with ZengWang2007 - if (z0param_method == 'Meier2022') then - forc_hgt_u_patch(p) = forc_hgt_u(g) + z0mv(p) + displa(p) - forc_hgt_t_patch(p) = forc_hgt_t(g) + z0hv(p) + displa(p) - forc_hgt_q_patch(p) = forc_hgt_q(g) + z0qv(p) + displa(p) - end if + forc_hgt_u_patch(p) = forc_hgt_u(g) + z0mv(p) + displa(p) + forc_hgt_t_patch(p) = forc_hgt_t(g) + z0hv(p) + displa(p) + forc_hgt_q_patch(p) = forc_hgt_q(g) + z0qv(p) + displa(p) end do diff --git a/src/biogeophys/LakeFluxesMod.F90 b/src/biogeophys/LakeFluxesMod.F90 index fdd105ad8f..fb5f723839 100644 --- a/src/biogeophys/LakeFluxesMod.F90 +++ b/src/biogeophys/LakeFluxesMod.F90 @@ -373,16 +373,9 @@ subroutine LakeFluxes(bounds, num_lakec, filter_lakec, num_lakep, filter_lakep, ! Surface temperature and fluxes ! Update forcing heights for updated roughness lengths - ! TODO(KWO, 2022-03-15) Only for Meier2022 for now to maintain bfb with ZengWang2007 - if (z0param_method == 'Meier2022') then - forc_hgt_u_patch(p) = forc_hgt_u(g) + z0mg(p) - forc_hgt_t_patch(p) = forc_hgt_t(g) + z0hg(p) - forc_hgt_q_patch(p) = forc_hgt_q(g) + z0qg(p) - else - forc_hgt_u_patch(p) = forc_hgt_u(g) + z0mg(p) - forc_hgt_t_patch(p) = forc_hgt_t(g) + z0mg(p) - forc_hgt_q_patch(p) = forc_hgt_q(g) + z0mg(p) - end if + forc_hgt_u_patch(p) = forc_hgt_u(g) + z0mg(p) + forc_hgt_t_patch(p) = forc_hgt_t(g) + z0hg(p) + forc_hgt_q_patch(p) = forc_hgt_q(g) + z0qg(p) ! Find top layer jtop(c) = snl(c) + 1 @@ -626,12 +619,9 @@ subroutine LakeFluxes(bounds, num_lakec, filter_lakec, num_lakep, filter_lakep, end if ! Update forcing heights for updated roughness lengths - ! TODO(KWO, 2022-03-15) Only for Meier2022 for now to maintain bfb with ZengWang2007 - if (z0param_method == 'Meier2022') then - forc_hgt_u_patch(p) = forc_hgt_u(g) + z0mg(p) - forc_hgt_t_patch(p) = forc_hgt_t(g) + z0hg(p) - forc_hgt_q_patch(p) = forc_hgt_q(g) + z0qg(p) - end if + forc_hgt_u_patch(p) = forc_hgt_u(g) + z0mg(p) + forc_hgt_t_patch(p) = forc_hgt_t(g) + z0hg(p) + forc_hgt_q_patch(p) = forc_hgt_q(g) + z0qg(p) end do ! end of filtered pft loop diff --git a/src/biogeophys/SnowHydrologyMod.F90 b/src/biogeophys/SnowHydrologyMod.F90 index 4698e1136d..9fb1a52dbc 100644 --- a/src/biogeophys/SnowHydrologyMod.F90 +++ b/src/biogeophys/SnowHydrologyMod.F90 @@ -79,7 +79,14 @@ module SnowHydrologyMod real(r8) :: rho_max ! Wind drift compaction / maximum density (kg/m3) real(r8) :: tau_ref ! Wind drift compaction / reference time (48*3600) (s) real(r8) :: scvng_fct_mlt_sf ! Scaling factor modifying scavenging factors for BC, OC, and dust species inclusion in meltwater (-) + real(r8) :: scvng_fct_mlt_bcphi ! scavenging factor for hydrophillic BC inclusion in meltwater [frc] + real(r8) :: scvng_fct_mlt_bcpho ! scavenging factor for hydrophobic BC inclusion in meltwater [frc] + real(r8) :: scvng_fct_mlt_dst1 ! scavenging factor for dust species 1 inclusion in meltwater [frc] + real(r8) :: scvng_fct_mlt_dst2 ! scavenging factor for dust species 2 inclusion in meltwater [frc] + real(r8) :: scvng_fct_mlt_dst3 ! scavenging factor for dust species 3 inclusion in meltwater [frc] + real(r8) :: scvng_fct_mlt_dst4 ! scavenging factor for dust species 4 inclusion in meltwater [frc] real(r8) :: ceta ! Overburden compaction constant (kg/m3) + real(r8) :: snw_rds_min ! minimum allowed snow effective radius (also cold "fresh snow" value) [microns] end type params_type type(params_type), private :: params_inst @@ -120,14 +127,8 @@ module SnowHydrologyMod ! 7= dust species 3 ! 8= dust species 4 ! - real(r8), public, parameter :: scvng_fct_mlt_bcphi = 0.20_r8 ! scavenging factor for hydrophillic BC inclusion in meltwater [frc] - real(r8), public, parameter :: scvng_fct_mlt_bcpho = 0.03_r8 ! scavenging factor for hydrophobic BC inclusion in meltwater [frc] real(r8), public, parameter :: scvng_fct_mlt_ocphi = 0.20_r8 ! scavenging factor for hydrophillic OC inclusion in meltwater [frc] real(r8), public, parameter :: scvng_fct_mlt_ocpho = 0.03_r8 ! scavenging factor for hydrophobic OC inclusion in meltwater [frc] - real(r8), public, parameter :: scvng_fct_mlt_dst1 = 0.02_r8 ! scavenging factor for dust species 1 inclusion in meltwater [frc] - real(r8), public, parameter :: scvng_fct_mlt_dst2 = 0.02_r8 ! scavenging factor for dust species 2 inclusion in meltwater [frc] - real(r8), public, parameter :: scvng_fct_mlt_dst3 = 0.01_r8 ! scavenging factor for dust species 3 inclusion in meltwater [frc] - real(r8), public, parameter :: scvng_fct_mlt_dst4 = 0.01_r8 ! scavenging factor for dust species 4 inclusion in meltwater [frc] ! The following are public for the sake of unit testing integer, parameter, public :: LoTmpDnsSlater2017 = 2 ! For temperature below -15C use equation from Slater 2017 @@ -316,8 +317,22 @@ subroutine readParams( ncid ) call readNcdioScalar(ncid, 'tau_ref', subname, params_inst%tau_ref) ! Scaling factor modifying scavenging factors for BC, OC, and dust species inclusion in meltwater (-) call readNcdioScalar(ncid, 'scvng_fct_mlt_sf', subname, params_inst%scvng_fct_mlt_sf) + ! scavenging factor for hydrophillic BC inclusion in meltwater [frc] + call readNcdioScalar(ncid, 'scvng_fct_mlt_bcphi', subname, params_inst%scvng_fct_mlt_bcphi) + ! scavenging factor for hydrophobic BC inclusion in meltwater [frc] + call readNcdioScalar(ncid, 'scvng_fct_mlt_bcpho', subname, params_inst%scvng_fct_mlt_bcpho) + ! scavenging factor for dust species 1 inclusion in meltwater [frc] + call readNcdioScalar(ncid, 'scvng_fct_mlt_dst1', subname, params_inst%scvng_fct_mlt_dst1) + ! scavenging factor for dust species 2 inclusion in meltwater [frc] + call readNcdioScalar(ncid, 'scvng_fct_mlt_dst2', subname, params_inst%scvng_fct_mlt_dst2) + ! scavenging factor for dust species 3 inclusion in meltwater [frc] + call readNcdioScalar(ncid, 'scvng_fct_mlt_dst3', subname, params_inst%scvng_fct_mlt_dst3) + ! scavenging factor for dust species 4 inclusion in meltwater [frc] + call readNcdioScalar(ncid, 'scvng_fct_mlt_dst4', subname, params_inst%scvng_fct_mlt_dst4) ! Overburden compaction constant (kg/m3) call readNcdioScalar(ncid, 'ceta', subname, params_inst%ceta) + ! minimum allowed snow effective radius (also cold "fresh snow" value) [microns] + call readNcdioScalar(ncid, 'snw_rds_min', subname, params_inst%snw_rds_min) end subroutine readParams @@ -1586,7 +1601,7 @@ subroutine CalcAndApplyAerosolFluxes(bounds, num_snowc, filter_snowc, & ! BCPHI: ! 1. flux with meltwater: qout_bc_phi(c) = qflx_snow_percolation(c,j)*params_inst%scvng_fct_mlt_sf* & - scvng_fct_mlt_bcphi*(mss_bcphi(c,j)/mss_liqice) + params_inst%scvng_fct_mlt_bcphi*(mss_bcphi(c,j)/mss_liqice) if (qout_bc_phi(c)*dtime > mss_bcphi(c,j)) then qout_bc_phi(c) = mss_bcphi(c,j)/dtime mss_bcphi(c,j) = 0._r8 @@ -1598,7 +1613,7 @@ subroutine CalcAndApplyAerosolFluxes(bounds, num_snowc, filter_snowc, & ! BCPHO: ! 1. flux with meltwater: qout_bc_pho(c) = qflx_snow_percolation(c,j)*params_inst%scvng_fct_mlt_sf* & - scvng_fct_mlt_bcpho*(mss_bcpho(c,j)/mss_liqice) + params_inst%scvng_fct_mlt_bcpho*(mss_bcpho(c,j)/mss_liqice) if (qout_bc_pho(c)*dtime > mss_bcpho(c,j)) then qout_bc_pho(c) = mss_bcpho(c,j)/dtime mss_bcpho(c,j) = 0._r8 @@ -1634,7 +1649,7 @@ subroutine CalcAndApplyAerosolFluxes(bounds, num_snowc, filter_snowc, & ! DUST 1: ! 1. flux with meltwater: qout_dst1(c) = qflx_snow_percolation(c,j)*params_inst%scvng_fct_mlt_sf* & - scvng_fct_mlt_dst1*(mss_dst1(c,j)/mss_liqice) + params_inst%scvng_fct_mlt_dst1*(mss_dst1(c,j)/mss_liqice) if (qout_dst1(c)*dtime > mss_dst1(c,j)) then qout_dst1(c) = mss_dst1(c,j)/dtime mss_dst1(c,j) = 0._r8 @@ -1646,7 +1661,7 @@ subroutine CalcAndApplyAerosolFluxes(bounds, num_snowc, filter_snowc, & ! DUST 2: ! 1. flux with meltwater: qout_dst2(c) = qflx_snow_percolation(c,j)*params_inst%scvng_fct_mlt_sf* & - scvng_fct_mlt_dst2*(mss_dst2(c,j)/mss_liqice) + params_inst%scvng_fct_mlt_dst2*(mss_dst2(c,j)/mss_liqice) if (qout_dst2(c)*dtime > mss_dst2(c,j)) then qout_dst2(c) = mss_dst2(c,j)/dtime mss_dst2(c,j) = 0._r8 @@ -1658,7 +1673,7 @@ subroutine CalcAndApplyAerosolFluxes(bounds, num_snowc, filter_snowc, & ! DUST 3: ! 1. flux with meltwater: qout_dst3(c) = qflx_snow_percolation(c,j)*params_inst%scvng_fct_mlt_sf* & - scvng_fct_mlt_dst3*(mss_dst3(c,j)/mss_liqice) + params_inst%scvng_fct_mlt_dst3*(mss_dst3(c,j)/mss_liqice) if (qout_dst3(c)*dtime > mss_dst3(c,j)) then qout_dst3(c) = mss_dst3(c,j)/dtime mss_dst3(c,j) = 0._r8 @@ -1670,7 +1685,7 @@ subroutine CalcAndApplyAerosolFluxes(bounds, num_snowc, filter_snowc, & ! DUST 4: ! 1. flux with meltwater: qout_dst4(c) = qflx_snow_percolation(c,j)*params_inst%scvng_fct_mlt_sf* & - scvng_fct_mlt_dst4*(mss_dst4(c,j)/mss_liqice) + params_inst%scvng_fct_mlt_dst4*(mss_dst4(c,j)/mss_liqice) if (qout_dst4(c)*dtime > mss_dst4(c,j)) then qout_dst4(c) = mss_dst4(c,j)/dtime mss_dst4(c,j) = 0._r8 @@ -3936,7 +3951,6 @@ function MassWeightedSnowRadius( rds1, rds2, swtot, zwtot ) result(mass_weighted ! Calculate the mass weighted snow radius when two layers are combined ! ! !USES: - use AerosolMod , only : snw_rds_min use SnowSnicarMod, only : snw_rds_max implicit none ! !ARGUMENTS: @@ -3951,8 +3965,8 @@ function MassWeightedSnowRadius( rds1, rds2, swtot, zwtot ) result(mass_weighted if ( mass_weighted_snowradius > snw_rds_max ) then mass_weighted_snowradius = snw_rds_max - else if ( mass_weighted_snowradius < snw_rds_min ) then - mass_weighted_snowradius = snw_rds_min + else if ( mass_weighted_snowradius < params_inst%snw_rds_min ) then + mass_weighted_snowradius = params_inst%snw_rds_min end if end function MassWeightedSnowRadius diff --git a/src/biogeophys/SnowSnicarMod.F90 b/src/biogeophys/SnowSnicarMod.F90 index 9a95188232..5bc1c61edb 100644 --- a/src/biogeophys/SnowSnicarMod.F90 +++ b/src/biogeophys/SnowSnicarMod.F90 @@ -18,7 +18,6 @@ module SnowSnicarMod use shr_const_mod , only : SHR_CONST_RHOICE use abortutils , only : endrun use decompMod , only : bounds_type, subgrid_level_column - use AerosolMod , only : snw_rds_min use atm2lndType , only : atm2lnd_type use WaterStateBulkType , only : waterstatebulk_type use WaterDiagnosticBulkType , only : waterdiagnosticbulk_type @@ -42,6 +41,8 @@ module SnowSnicarMod real(r8) :: snw_rds_refrz ! Effective radius of re-frozen snow (microns) real(r8) :: C2_liq_Brun89 ! Constant for liquid water grain growth [m3 s-1], ! from Brun89: corrected for LWC in units of percent + real(r8) :: fresh_snw_rds_max ! maximum warm fresh snow effective radius [microns] + real(r8) :: snw_rds_min ! minimum allowed snow effective radius (also cold "fresh snow" value) [microns] end type params_type type(params_type), private :: params_inst ! @@ -66,7 +67,6 @@ module SnowSnicarMod integer, parameter :: snw_rds_max_tbl = 1500 ! maximum effective radius defined in Mie lookup table [microns] integer, parameter :: snw_rds_min_tbl = 30 ! minimium effective radius defined in Mie lookup table [microns] - integer, parameter :: snw_rds_min_int = nint(snw_rds_min) ! minimum allowed snow effective radius as integer [microns] real(r8), parameter :: snw_rds_max = 1500._r8 ! maximum allowed snow effective radius [microns] real(r8), parameter :: min_snw = 1.0E-30_r8 ! minimum snow mass required for SNICAR RT calculation [kg m-2] @@ -182,6 +182,10 @@ subroutine readParams( ncid ) call readNcdioScalar(ncid, 'snw_rds_refrz', subname, params_inst%snw_rds_refrz) ! constant for liquid water grain growth [m3 s-1], from Brun89: corrected for LWC in units of percent call readNcdioScalar(ncid, 'C2_liq_Brun89', subname, params_inst%C2_liq_Brun89) + ! maximum warm fresh snow effective radius [microns] + call readNcdioScalar(ncid, 'fresh_snw_rds_max', subname, params_inst%fresh_snw_rds_max) + ! minimum allowed snow effective radius (also cold "fresh snow" value) [microns] + call readNcdioScalar(ncid, 'snw_rds_min', subname, params_inst%snw_rds_min) end subroutine readParams @@ -677,7 +681,7 @@ subroutine SNICAR_RT (bounds, num_nourbanc, filter_nourbanc, & snl_lcl = -1 h2osno_ice_lcl(0) = h2osno_lcl h2osno_liq_lcl(0) = 0._r8 - snw_rds_lcl(0) = snw_rds_min_int + snw_rds_lcl(0) = nint(params_inst%snw_rds_min) else flg_nosnl = 0 snl_lcl = snl(c_idx) @@ -1628,10 +1632,10 @@ subroutine SnowAge_grain(bounds, & !LvK extra boundary check, to prevent when using old restart file with lower snw_rds_min than current run - snw_rds(c_idx,i) = max(snw_rds(c_idx,i), snw_rds_min) + snw_rds(c_idx,i) = max(snw_rds(c_idx,i), params_inst%snw_rds_min) ! change in snow effective radius, using best-fit parameters - dr_fresh = snw_rds(c_idx,i)-snw_rds_min + dr_fresh = snw_rds(c_idx,i) - params_inst%snw_rds_min dr = (bst_drdt0 * (bst_tau / (dr_fresh + bst_tau))**(1._r8 / bst_kappa)) * (dtime / secsphr) ! @@ -1701,7 +1705,7 @@ subroutine SnowAge_grain(bounds, & !********** 5. CHECK BOUNDARIES *********** ! ! boundary check - snw_rds(c_idx,i) = max(snw_rds(c_idx,i), snw_rds_min) + snw_rds(c_idx,i) = max(snw_rds(c_idx,i), params_inst%snw_rds_min) snw_rds(c_idx,i) = min(snw_rds(c_idx,i), snw_rds_max) ! set top layer variables for history files @@ -1720,7 +1724,7 @@ subroutine SnowAge_grain(bounds, & do fc = 1, num_nosnowc c_idx = filter_nosnowc(fc) if (h2osno_no_layers(c_idx) > 0._r8) then - snw_rds(c_idx,0) = snw_rds_min + snw_rds(c_idx,0) = params_inst%snw_rds_min endif enddo @@ -1744,7 +1748,7 @@ real(r8) function FreshSnowRadius(c_idx, atm2lnd_inst) ! Author: Leo VanKampenhout ! ! !USES: - use AerosolMod , only : fresh_snw_rds_max + ! ! !ARGUMENTS: integer, intent(in) :: c_idx ! column index type(atm2lnd_type) , intent(in) :: atm2lnd_inst ! Forcing from atmosphere @@ -1753,16 +1757,17 @@ real(r8) function FreshSnowRadius(c_idx, atm2lnd_inst) !----------------------------------------------------------------------- real(r8), parameter :: tmin = tfrz - 30._r8 ! start of linear ramp real(r8), parameter :: tmax = tfrz - 0._r8 ! end of linear ramp - real(r8), parameter :: gs_min = snw_rds_min ! minimum value - real(r8) :: gs_max ! maximum value + real(r8) :: gs_min ! minimum value + real(r8) :: gs_max ! maximum value associate( & forc_t => atm2lnd_inst%forc_t_downscaled_col & ! Input: [real(r8) (:) ] atmospheric temperature (Kelvin) ) - if ( fresh_snw_rds_max <= snw_rds_min )then - FreshSnowRadius = snw_rds_min + if ( params_inst%fresh_snw_rds_max <= params_inst%snw_rds_min )then + FreshSnowRadius = params_inst%snw_rds_min else - gs_max = fresh_snw_rds_max + gs_max = params_inst%fresh_snw_rds_max + gs_min = params_inst%snw_rds_min if (forc_t(c_idx) < tmin) then FreshSnowRadius = gs_min diff --git a/src/biogeophys/SoilTemperatureMod.F90 b/src/biogeophys/SoilTemperatureMod.F90 index b868224a60..0dc8876d24 100644 --- a/src/biogeophys/SoilTemperatureMod.F90 +++ b/src/biogeophys/SoilTemperatureMod.F90 @@ -47,7 +47,7 @@ module SoilTemperatureMod ! o The thermal conductivity of soil is computed from ! the algorithm of Johansen (as reported by Farouki 1981), and the ! conductivity of snow is from the formulation used in - ! SNTHERM (Jordan 1991). + ! Sturm (1997) or Jordan (1991) p. 18 depending on namelist option. ! o Boundary conditions: ! F = Rnet - Hg - LEg (top), F= 0 (base of the soil column). ! o Soil / snow temperature is predicted from heat conduction @@ -100,7 +100,7 @@ subroutine SoilTemperature(bounds, num_urbanl, filter_urbanl, num_urbanc, filter ! o The thermal conductivity of soil is computed from ! the algorithm of Johansen (as reported by Farouki 1981), and the ! conductivity of snow is from the formulation used in - ! SNTHERM (Jordan 1991). + ! Sturm (1997) or Jordan (1991) p. 18 depending on namelist option. ! o Boundary conditions: ! F = Rnet - Hg - LEg (top), F= 0 (base of the soil column). ! o Soil / snow temperature is predicted from heat conduction @@ -611,18 +611,20 @@ subroutine SoilThermProp (bounds, num_urbanc, filter_urbanc, num_nolakec, filter ! ! (2) The thermal conductivity of soil is computed from the algorithm of ! Johansen (as reported by Farouki 1981), and of snow is from the - ! formulation used in SNTHERM (Jordan 1991). + ! formulation used in Sturm (1997) or Jordan (1991) p. 18 depending on + ! namelist option. ! The thermal conductivities at the interfaces between two neighboring ! layers (j, j+1) are derived from an assumption that the flux across ! the interface is equal to that from the node j to the interface and the ! flux from the interface to the node j+1. ! ! !USES: + use shr_log_mod , only : errMsg => shr_log_errMsg use clm_varpar , only : nlevsno, nlevgrnd, nlevurb, nlevsoi, nlevmaxurbgrnd use clm_varcon , only : denh2o, denice, tfrz, tkwat, tkice, tkair, cpice, cpliq, thk_bedrock, csol_bedrock use landunit_varcon , only : istice, istwet use column_varcon , only : icol_roof, icol_sunwall, icol_shadewall, icol_road_perv, icol_road_imperv - use clm_varctl , only : iulog + use clm_varctl , only : iulog, snow_thermal_cond_method ! ! !ARGUMENTS: type(bounds_type) , intent(in) :: bounds @@ -647,6 +649,8 @@ subroutine SoilThermProp (bounds, num_urbanc, filter_urbanc, num_nolakec, filter real(r8) :: fl ! volume fraction of liquid or unfrozen water to total water real(r8) :: satw ! relative total water content of soil. real(r8) :: zh2osfc + + character(len=*),parameter :: subname = 'SoilThermProp' !----------------------------------------------------------------------- call t_startf( 'SoilThermProp' ) @@ -734,11 +738,27 @@ subroutine SoilThermProp (bounds, num_urbanc, filter_urbanc, num_nolakec, filter endif endif - ! Thermal conductivity of snow, which from Jordan (1991) pp. 18 + ! Thermal conductivity of snow ! Only examine levels from snl(c)+1 -> 0 where snl(c) < 1 if (snl(c)+1 < 1 .AND. (j >= snl(c)+1) .AND. (j <= 0)) then bw(c,j) = (h2osoi_ice(c,j)+h2osoi_liq(c,j))/(frac_sno(c)*dz(c,j)) - thk(c,j) = tkair + (7.75e-5_r8 *bw(c,j) + 1.105e-6_r8*bw(c,j)*bw(c,j))*(tkice-tkair) + select case (snow_thermal_cond_method) + case ('Jordan1991') + thk(c,j) = tkair + (7.75e-5_r8 *bw(c,j) + 1.105e-6_r8*bw(c,j)*bw(c,j))*(tkice-tkair) + case ('Sturm1997') + ! Implemented by Vicky Dutch (VRD), Nick Rutter, and + ! Leanne Wake (LMW) + ! https://tc.copernicus.org/articles/16/4201/2022/ + ! Code provided by Adrien Dams to Will Wieder + if (bw(c,j) <= 156) then !LMW or 0.156 ? + thk(c,j) = 0.023 + 0.234*(bw(c,j)/1000) !LMW - units changed by VRD + else !LMW + thk(c,j) = 0.138 - 1.01*(bw(c,j)/1000) +(3.233*((bw(c,j)/1000)*(bw(c,j)/1000))) ! LMW Sturm I think + end if + case default + write(iulog,*) subname//' ERROR: unknown snow_thermal_cond_method value: ', snow_thermal_cond_method + call endrun(msg=errMsg(sourcefile, __LINE__)) + end select end if end do diff --git a/src/biogeophys/WaterDiagnosticBulkType.F90 b/src/biogeophys/WaterDiagnosticBulkType.F90 index 426ddb464c..057062777f 100644 --- a/src/biogeophys/WaterDiagnosticBulkType.F90 +++ b/src/biogeophys/WaterDiagnosticBulkType.F90 @@ -108,12 +108,10 @@ module WaterDiagnosticBulkType type, private :: params_type real(r8) :: zlnd ! Momentum roughness length for soil, glacier, wetland (m) + real(r8) :: snw_rds_min ! minimum allowed snow effective radius (also cold "fresh snow" value) [microns] end type params_type type(params_type), private :: params_inst - ! minimum allowed snow effective radius (also "fresh snow" value) [microns] - real(r8), public, parameter :: snw_rds_min = 54.526_r8 - character(len=*), parameter, private :: sourcefile = & __FILE__ !------------------------------------------------------------------------ @@ -136,6 +134,8 @@ subroutine readParams( ncid ) ! Momentum roughness length for soil, glacier, wetland (m) call readNcdioScalar(ncid, 'zlnd', subname, params_inst%zlnd) + ! minimum allowed snow effective radius (also cold "fresh snow" value) [microns] + call readNcdioScalar(ncid, 'snw_rds_min', subname, params_inst%snw_rds_min) end subroutine readParams @@ -750,11 +750,11 @@ subroutine InitBulkCold(this, bounds, & do c = bounds%begc,bounds%endc if (snl(c) < 0) then - this%snw_rds_col(c,snl(c)+1:0) = snw_rds_min + this%snw_rds_col(c,snl(c)+1:0) = params_inst%snw_rds_min this%snw_rds_col(c,-nlevsno+1:snl(c)) = 0._r8 - this%snw_rds_top_col(c) = snw_rds_min + this%snw_rds_top_col(c) = params_inst%snw_rds_min elseif (h2osno_input_col(c) > 0._r8) then - this%snw_rds_col(c,0) = snw_rds_min + this%snw_rds_col(c,0) = params_inst%snw_rds_min this%snw_rds_col(c,-nlevsno+1:-1) = 0._r8 this%snw_rds_top_col(c) = spval this%sno_liq_top_col(c) = spval @@ -1236,7 +1236,7 @@ subroutine ResetBulk(this, column) integer , intent(in) :: column ! column index !----------------------------------------------------------------------- - this%snw_rds_col(column,0) = snw_rds_min + this%snw_rds_col(column,0) = params_inst%snw_rds_min end subroutine ResetBulk diff --git a/src/cpl/share_esmf/cropcalStreamMod.F90 b/src/cpl/share_esmf/cropcalStreamMod.F90 index 46696eeba9..0ea63f2c6d 100644 --- a/src/cpl/share_esmf/cropcalStreamMod.F90 +++ b/src/cpl/share_esmf/cropcalStreamMod.F90 @@ -7,7 +7,8 @@ module cropcalStreamMod ! Read crop calendars from streams ! ! !USES: - use ESMF + use ESMF , only : ESMF_LogFoundError, ESMF_LOGERR_PASSTHRU, ESMF_Finalize + use ESMF , only : ESMF_END_ABORT use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL, CS => shr_kind_CS use dshr_strdata_mod , only : shr_strdata_type use decompMod , only : bounds_type @@ -322,6 +323,7 @@ subroutine cropcal_interp(bounds, num_pcropp, filter_pcropp, crop_inst) integer :: n, g integer :: lsize integer :: rc + integer :: begp, endp real(r8), pointer :: dataptr1d_swindow_start(:) real(r8), pointer :: dataptr1d_swindow_end (:) real(r8), pointer :: dataptr1d_cultivar_gdds(:) @@ -342,6 +344,9 @@ subroutine cropcal_interp(bounds, num_pcropp, filter_pcropp, crop_inst) ! Place all data from each type into a temporary 2d array lsize = bounds%endg - bounds%begg + 1 + begp = bounds%begp + endp= bounds%endp + dayspyr = get_curr_days_per_year() ! Read prescribed sowing window start dates from input files @@ -397,17 +402,17 @@ subroutine cropcal_interp(bounds, num_pcropp, filter_pcropp, crop_inst) ! Ensure that, if mxsowings > 1, sowing windows are ordered such that ENDS are monotonically increasing. This is necessary because of how get_swindow() works. if (mxsowings > 1) then - if (any(ends(:,2:mxsowings) <= ends(:,1:mxsowings-1) .and. & - ends(:,2:mxsowings) >= 1)) then + if (any(ends(begp:endp,2:mxsowings) <= ends(begp:endp,1:mxsowings-1) .and. & + ends(begp:endp,2:mxsowings) >= 1)) then write(iulog, *) 'Sowing window inputs must be ordered such that end dates are monotonically increasing.' call ESMF_Finalize(endflag=ESMF_END_ABORT) end if end if ! Handle invalid sowing window values - if (any(starts < 1 .or. ends < 1)) then + if (any(starts(begp:endp,:) < 1 .or. ends(begp:endp,:) < 1)) then ! Fail if not allowing fallback to paramfile sowing windows - if ((.not. allow_invalid_swindow_inputs) .and. any(all(starts < 1, dim=2) .and. patch%wtgcell > 0._r8 .and. patch%itype >= npcropmin)) then + if ((.not. allow_invalid_swindow_inputs) .and. any(all(starts(begp:endp,:) < 1, dim=2) .and. patch%wtgcell > 0._r8 .and. patch%itype >= npcropmin)) then write(iulog, *) 'At least one crop in one gridcell has invalid prescribed sowing window start date(s). To ignore and fall back to paramfile sowing windows, set allow_invalid_swindow_inputs to .true.' write(iulog, *) 'Affected crops:' do ivt = npcropmin, mxpft @@ -422,7 +427,7 @@ subroutine cropcal_interp(bounds, num_pcropp, filter_pcropp, crop_inst) call ESMF_Finalize(endflag=ESMF_END_ABORT) ! Fail if a sowing window start date is prescribed without an end date (or vice versa) - else if (any((starts >= 1 .and. ends < 1) .or. (starts < 1 .and. ends >= 1))) then + else if (any((starts(begp:endp,:) >= 1 .and. ends(begp:endp,:) < 1) .or. (starts(begp:endp,:) < 1 .and. ends(begp:endp,:) >= 1))) then write(iulog, *) 'Every prescribed sowing window start date must have a corresponding end date.' call ESMF_Finalize(endflag=ESMF_END_ABORT) end if diff --git a/src/main/clm_varctl.F90 b/src/main/clm_varctl.F90 index acefe4acda..6e89f0952e 100644 --- a/src/main/clm_varctl.F90 +++ b/src/main/clm_varctl.F90 @@ -221,6 +221,8 @@ module clm_varctl ! which snow cover fraction parameterization to use character(len=64), public :: snow_cover_fraction_method + ! which snow thermal conductivity parameterization to use + character(len=25), public :: snow_thermal_cond_method ! atmospheric CO2 molar ratio (by volume) (umol/mol) real(r8), public :: co2_ppmv = 355._r8 ! diff --git a/src/main/controlMod.F90 b/src/main/controlMod.F90 index 5937e55b04..deb8c044d8 100644 --- a/src/main/controlMod.F90 +++ b/src/main/controlMod.F90 @@ -199,7 +199,8 @@ subroutine control_init(dtime) clump_pproc, & create_crop_landunit, nsegspc, co2_ppmv, & albice, soil_layerstruct_predefined, soil_layerstruct_userdefined, & - soil_layerstruct_userdefined_nlevsoi, use_subgrid_fluxes, snow_cover_fraction_method, & + soil_layerstruct_userdefined_nlevsoi, use_subgrid_fluxes, & + snow_thermal_cond_method, snow_cover_fraction_method, & irrigate, run_zero_weight_urban, all_active, & crop_fsat_equals_zero, for_testing_run_ncdiopio_tests, & for_testing_use_second_grain_pool, for_testing_use_repr_structure_pool, & @@ -850,6 +851,7 @@ subroutine control_spmd() ! physics variables call mpi_bcast (nsegspc, 1, MPI_INTEGER, 0, mpicom, ier) call mpi_bcast (use_subgrid_fluxes , 1, MPI_LOGICAL, 0, mpicom, ier) + call mpi_bcast (snow_thermal_cond_method, len(snow_thermal_cond_method), MPI_CHARACTER, 0, mpicom, ier) call mpi_bcast (snow_cover_fraction_method , len(snow_cover_fraction_method), MPI_CHARACTER, 0, mpicom, ier) call mpi_bcast (z0param_method , len(z0param_method), MPI_CHARACTER, 0, mpicom, ier) call mpi_bcast (use_z0m_snowmelt, 1, MPI_LOGICAL, 0, mpicom, ier) diff --git a/src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90 b/src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90 index b8e7137a32..7185febca6 100644 --- a/src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90 +++ b/src/soilbiogeochem/SoilBiogeochemDecompCascadeMIMICSMod.F90 @@ -251,12 +251,12 @@ subroutine readParams ( ncid ) call ncd_io(trim(tString), params_inst%mimics_fmet(:), 'read', ncid, readvar=readv) if ( .not. readv ) call endrun(msg=trim(errCode)//trim(tString)//errMsg(sourcefile, __LINE__)) - allocate(params_inst%mimics_fchem_r(4)) + allocate(params_inst%mimics_fchem_r(2)) tString='mimics_fchem_r' call ncd_io(trim(tString), params_inst%mimics_fchem_r(:), 'read', ncid, readvar=readv) if ( .not. readv ) call endrun(msg=trim(errCode)//trim(tString)//errMsg(sourcefile, __LINE__)) - allocate(params_inst%mimics_fchem_k(4)) + allocate(params_inst%mimics_fchem_k(2)) tString='mimics_fchem_k' call ncd_io(trim(tString), params_inst%mimics_fchem_k(:), 'read', ncid, readvar=readv) if ( .not. readv ) call endrun(msg=trim(errCode)//trim(tString)//errMsg(sourcefile, __LINE__)) @@ -828,10 +828,8 @@ subroutine decomp_rates_mimics(bounds, num_bgc_soilc, filter_bgc_soilc, & real(r8):: mimics_fmet_p4 real(r8):: mimics_fchem_r_p1 real(r8):: mimics_fchem_r_p2 - real(r8):: mimics_fchem_r_p3 real(r8):: mimics_fchem_k_p1 real(r8):: mimics_fchem_k_p2 - real(r8):: mimics_fchem_k_p3 real(r8):: mimics_tau_mod_min real(r8):: mimics_tau_mod_max real(r8):: mimics_tau_mod_factor @@ -1092,10 +1090,8 @@ subroutine decomp_rates_mimics(bounds, num_bgc_soilc, filter_bgc_soilc, & mimics_fmet_p4 = params_inst%mimics_fmet(4) mimics_fchem_r_p1 = params_inst%mimics_fchem_r(1) mimics_fchem_r_p2 = params_inst%mimics_fchem_r(2) - mimics_fchem_r_p3 = params_inst%mimics_fchem_r(3) mimics_fchem_k_p1 = params_inst%mimics_fchem_k(1) mimics_fchem_k_p2 = params_inst%mimics_fchem_k(2) - mimics_fchem_k_p3 = params_inst%mimics_fchem_k(3) mimics_tau_mod_min = params_inst%mimics_tau_mod_min mimics_tau_mod_max = params_inst%mimics_tau_mod_max mimics_tau_mod_factor = params_inst%mimics_tau_mod_factor @@ -1186,9 +1182,9 @@ subroutine decomp_rates_mimics(bounds, num_bgc_soilc, filter_bgc_soilc, & ! Used in the update of certain pathfrac terms that vary with time ! in the next loop fchem_m1 = min(1._r8, max(0._r8, mimics_fchem_r_p1 * & - exp(mimics_fchem_r_p2 * fmet) * mimics_fchem_r_p3)) + exp(mimics_fchem_r_p2 * fmet))) fchem_m2 = min(1._r8, max(0._r8, mimics_fchem_k_p1 * & - exp(mimics_fchem_k_p2 * fmet) * mimics_fchem_k_p3)) + exp(mimics_fchem_k_p2 * fmet))) do j = 1,nlevdecomp ! vmax ends up in units of per hour but is expected @@ -1283,6 +1279,7 @@ subroutine decomp_rates_mimics(bounds, num_bgc_soilc, filter_bgc_soilc, & ! The right hand side is OXIDAT in the testbed (line 1145) decomp_k(c,j,i_chem_som) = (term_1 + term_2) * w_d_o_scalars + ! Currently, mimics_densdep = 1 so as to have no effect decomp_k(c,j,i_cop_mic) = tau_m1 * & m1_conc**(mimics_densdep - 1.0_r8) * w_d_o_scalars favl = min(1.0_r8, max(0.0_r8, 1.0_r8 - fphys_m1(c,j) - fchem_m1)) diff --git a/src/utils/clmfates_interfaceMod.F90 b/src/utils/clmfates_interfaceMod.F90 index 4b9ce297ed..48dc3297c0 100644 --- a/src/utils/clmfates_interfaceMod.F90 +++ b/src/utils/clmfates_interfaceMod.F90 @@ -7,11 +7,9 @@ module CLMFatesInterfaceMod ! ! This is also the only location where CLM code is allowed to see FATES memory ! structures. - ! The routines here, that call FATES library routines, will not pass any types defined - ! by the driving land model (HLM). - ! - ! either native type arrays (int,real,log, etc) or packed into fates boundary condition - ! structures. + ! The routines here, that call FATES library routines, cannot pass most types defined + ! by the driving land model (HLM), only native type arrays (int,real,log, etc), implementations + ! of fates abstract classes, and references into fates boundary condition structures. ! ! Note that CLM/ALM does use Shared Memory Parallelism (SMP), where processes such as ! the update of state variables are forked. However, IO is not assumed to be @@ -112,6 +110,7 @@ module CLMFatesInterfaceMod use shr_log_mod , only : errMsg => shr_log_errMsg use clm_varcon , only : dzsoi_decomp use FuncPedotransferMod, only: get_ipedof + use CLMFatesParamInterfaceMod, only: fates_param_reader_ctsm_impl ! use SoilWaterPlantSinkMod, only : Compute_EffecRootFrac_And_VertTranSink_Default ! Used FATES Modules @@ -128,6 +127,10 @@ module CLMFatesInterfaceMod use FatesInterfaceMod , only : set_fates_ctrlparms use FatesInterfaceMod , only : UpdateFatesRMeansTStep use FatesInterfaceMod , only : InitTimeAveragingGlobals + + use FatesParametersInterface, only : fates_param_reader_type + use FatesParametersInterface, only : fates_parameters_type + use FatesInterfaceMod , only : DetermineGridCellNeighbors use FatesHistoryInterfaceMod, only : fates_hist @@ -283,6 +286,7 @@ subroutine CLMFatesGlobals1(surf_numpft,surf_numcft,maxsoil_patches) integer :: pass_sp integer :: pass_masterproc logical :: verbose_output + type(fates_param_reader_ctsm_impl) :: var_reader call t_startf('fates_globals1') @@ -326,6 +330,7 @@ subroutine CLMFatesGlobals1(surf_numpft,surf_numcft,maxsoil_patches) end if + ! The following call reads in the parameter file ! and then uses that to determine the number of patches ! FATES requires. We pass that to CLM here @@ -334,7 +339,7 @@ subroutine CLMFatesGlobals1(surf_numpft,surf_numcft,maxsoil_patches) ! and allocations on the FATES side, which require ! some allocations from CLM (like soil layering) - call SetFatesGlobalElements1(use_fates,surf_numpft,surf_numcft) + call SetFatesGlobalElements1(use_fates,surf_numpft,surf_numcft,var_reader) maxsoil_patches = fates_maxPatchesPerSite @@ -3590,4 +3595,6 @@ subroutine GetAndSetTime() end subroutine GetAndSetTime + !----------------------------------------------------------------------- + end module CLMFatesInterfaceMod diff --git a/src/utils/clmfates_paraminterfaceMod.F90 b/src/utils/clmfates_paraminterfaceMod.F90 index dedd8629cc..ea27f563bf 100644 --- a/src/utils/clmfates_paraminterfaceMod.F90 +++ b/src/utils/clmfates_paraminterfaceMod.F90 @@ -5,17 +5,30 @@ module CLMFatesParamInterfaceMod use shr_kind_mod, only : r8 => shr_kind_r8, SHR_KIND_CL use FatesGlobals, only : fates_log use FatesParametersInterface, only : fates_parameters_type + use FatesParametersInterface, only : fates_param_reader_type use EDParamsMod, only : FatesRegisterParams, FatesReceiveParams use SFParamsMod, only : SpitFireRegisterParams, SpitFireReceiveParams use PRTInitParamsFATESMod, only : PRTRegisterParams, PRTReceiveParams use FatesSynchronizedParamsMod, only : FatesSynchronizedParamsInst implicit none + public :: fates_param_reader_ctsm_impl + ! + type, extends(fates_param_reader_type) :: fates_param_reader_ctsm_impl + private - ! NOTE(bja, 2017-01) these methods can NOT be part of the clmi-fates - ! nterface type because they are called before the instance is + ! !PRIVATE MEMBER DATA: + + contains + ! !PUBLIC MEMBER FUNCTIONS: + procedure, public :: Read ! Read params from disk + + end type fates_param_reader_ctsm_impl + + + ! NOTE(bja, 2017-01) these methods can NOT be part of the clm-fates + ! interface type because they are called before the instance is ! initialized. - public :: FatesReadParameters public :: FatesReadPFTs private :: ParametersFromNetCDF private :: SetParameterDimensions @@ -28,44 +41,6 @@ module CLMFatesParamInterfaceMod contains - !----------------------------------------------------------------------- - subroutine FatesReadParameters() - use clm_varctl, only : use_fates, paramfile, fates_paramfile - use spmdMod, only : masterproc - - implicit none - - character(len=32) :: subname = 'FatesReadParameters' - class(fates_parameters_type), allocatable :: fates_params - logical :: is_host_file - - if (masterproc) then - write(fates_log(), *) 'clmfates_interfaceMod.F90::'//trim(subname)//' :: CLM reading ED/FATES '//' parameters ' - end if - - allocate(fates_params) - call fates_params%Init() ! fates_params class, in FatesParameterInterfaceMod - call FatesRegisterParams(fates_params) !EDParamsMod, only operates on fates_params class - call SpitFireRegisterParams(fates_params) !SpitFire Mod, only operates of fates_params class - call PRTRegisterParams(fates_params) ! PRT mod, only operates on fates_params class - call FatesSynchronizedParamsInst%RegisterParams(fates_params) !Synchronized params class in Synchronized params mod, only operates on fates_params class - - is_host_file = .false. - call ParametersFromNetCDF(fates_paramfile, is_host_file, fates_params) - - is_host_file = .true. - call ParametersFromNetCDF(paramfile, is_host_file, fates_params) - - call FatesReceiveParams(fates_params) - call SpitFireReceiveParams(fates_params) - call PRTReceiveParams(fates_params) - call FatesSynchronizedParamsInst%ReceiveParams(fates_params) - - call fates_params%Destroy() - deallocate(fates_params) - - end subroutine FatesReadParameters - !----------------------------------------------------------------------- subroutine FatesReadPFTs() @@ -238,4 +213,23 @@ subroutine ParametersFromNetCDF(filename, is_host_file, fates_params) end subroutine ParametersFromNetCDF !----------------------------------------------------------------------- + subroutine Read(this, fates_params ) + ! + ! !DESCRIPTION: + ! Read 'fates_params' parameters from storage. + ! + ! USES + use clm_varctl, only : fname_len, paramfile, fates_paramfile + ! !ARGUMENTS: + class(fates_param_reader_ctsm_impl) :: this + class(fates_parameters_type), intent(inout) :: fates_params + !----------------------------------------------------------------------- + logical :: is_host_file = .false. + + call ParametersFromNetCDF(fates_paramfile, is_host_file, fates_params) + + end subroutine Read + + !----------------------------------------------------------------------- + end module CLMFatesParamInterfaceMod diff --git a/test/tools/README b/test/tools/README index a2acbcae40..c545f625b8 100644 --- a/test/tools/README +++ b/test/tools/README @@ -11,7 +11,7 @@ To use... ./test_driver.sh -i -on cheyenne +on Derecho qcmd -l walltime=08:00:00 -- ./test_driver.sh -i >& run.out & @@ -33,7 +33,7 @@ To run neon-specific tests, please use login nodes: env CLM_INPUT_TESTS=`pwd`/tests_pretag_nompi_neon ./test_driver.sh -i > & run_neon.out & -Intended for use on NCAR machines cheyenne, geyser (DAV) and hobart. +Intended for use on NCAR machines Derecho, Casper (DAV) and izumi. II. RUNNING test_driver.sh TOOLS TESTING: diff --git a/test/tools/test_driver.sh b/test/tools/test_driver.sh index d4676cca69..6b1811c8ab 100755 --- a/test/tools/test_driver.sh +++ b/test/tools/test_driver.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/sh # # test_driver.sh: driver script for the offline testing of CLM of tools # @@ -26,15 +26,74 @@ hostname=`hostname` echo $hostname case $hostname in - ##cheyenne - cheyenne* | r*i*n*) - submit_script="test_driver_cheyenne${cur_time}.sh" + ##Derecho + derecho* | dec*) + submit_script="test_driver_derecho${cur_time}.sh" ##vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv writing to batch script vvvvvvvvvvvvvvvvvvv cat > ./${submit_script} << EOF #!/bin/sh # +interactive="YES" +input_file="tests_pretag_derecho_nompi" +c_threads=128 + +export INITMODULES="/glade/u/apps/derecho/23.06/spack/opt/spack/lmod/8.7.20/gcc/7.5.0/pdxb/lmod/lmod/init/sh" +. \$INITMODULES + +module --force purge +module load ncarenv +module load craype +module load intel +module load mkl +module load ncarcompilers +module load netcdf +module load nco +module load ncl + +#omp threads +if [ -z "\$CLM_THREADS" ]; then #threads NOT set on command line + export CLM_THREADS=\$c_threads +fi + +# Stop on first failed test +if [ -z "\$CLM_SOFF" ]; then #CLM_SOFF NOT set + export CLM_SOFF=FALSE +fi + +export CESM_MACH="derecho" +export CESM_COMP="intel" + +export NETCDF_DIR=\$NETCDF +export INC_NETCDF=\$NETCDF/include +export LIB_NETCDF=\$NETCDF/lib +export MAKE_CMD="gmake -j " +export CFG_STRING="" +export TOOLS_MAKE_STRING="USER_FC=ifort USER_LINKER=ifort USER_CPPDEFS=-DLINUX" +export MACH_WORKSPACE=\$SCRATCH +export CPRNC_EXE="$CESMDATAROOT/cprnc/cprnc" +dataroot="$CESMDATAROOT/inputdata" +export TOOLSLIBS="" +export REGRID_PROC=1 +export TOOLS_CONF_STRING="--mpilib mpi-serial" + + +echo_arg="" + +EOF +#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ writing to batch script ^^^^^^^^^^^^^^^^^^^ + ;; + + ##cheyenne + cheyenne* | r*i*n*) + submit_script="test_driver_cheyenne${cur_time}.sh" + +#vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv writing to batch script vvvvvvvvvvvvvvvvvvv +at > ./${submit_script} << EOF +!/bin/sh + + interactive="YES" input_file="tests_pretag_cheyenne_nompi" c_threads=36 @@ -314,7 +373,7 @@ EOF ;; * ) - echo "Only setup to work on: cheyenne, hobart and izumi" + echo "Only setup to work on: derecho, cheyenne, hobart and izumi" exit @@ -633,7 +692,7 @@ case $arg1 in * ) echo "" echo "**********************" - echo "usage on cheyenne, hobart, and izumi: " + echo "usage on derecho, cheyenne, hobart, and izumi: " echo "./test_driver.sh -i" echo "" echo "valid arguments: " diff --git a/test/tools/tests_pretag_derecho_nompi b/test/tools/tests_pretag_derecho_nompi new file mode 100644 index 0000000000..5fdaf335ae --- /dev/null +++ b/test/tools/tests_pretag_derecho_nompi @@ -0,0 +1,9 @@ +smba1 blba1 +smbd1 blbd1 +sm0a1 bl0a1 +sm0c1 bl0c1 +smaa2 blaa2 +smba1 blba1 +smb81 blb81 +smbc1 blbc1 +smbd1 blbd1 diff --git a/tools/contrib/ssp_anomaly_forcing_smooth b/tools/contrib/ssp_anomaly_forcing_smooth index ae3d189a8b..362e47c67d 100755 --- a/tools/contrib/ssp_anomaly_forcing_smooth +++ b/tools/contrib/ssp_anomaly_forcing_smooth @@ -3,7 +3,7 @@ ssp_anomaly_forcing_smooth -Create anomoly forcing datasets for SSP scenarios that can be used by CESM datm model +Create anomaly forcing datasets for SSP scenarios that can be used by CESM datm model load proper modules first, i.e. @@ -21,6 +21,169 @@ import numpy as np import netCDF4 as netcdf4 +# Adds global attributes, returning hdir and fdir +def add_global_attributes(ds, historydate, histdir, sspdir, num_ens, climo_year, climo_base_nyrs, dpath, dfile, hist_yrstart, hist_yrend, ssp_yrstart, ssp_yrend, timetag): + ds.Created_on = timetag + + ds.title = "anomaly forcing data" + ds.note1 = ( + "Anomaly/scale factors calculated relative to " + + str(climo_year - (climo_base_nyrs - 1) / 2) + + "-" + + str(climo_year + (climo_base_nyrs - 1) / 2) + ) + ds.history = historydate + ": created by " + sys.argv[0] + stdout = os.popen("git describe") + ds.gitdescribe = stdout.read().rstrip() + ds.Source = "CMIP6 CESM simulations" + ds.Conventions = "CF-1.0" + ds.comment = ( + "Monthly scale factors for given SSP scenario compared to a climatology based on" + + " data centered on " + + str(climo_year) + + " over the range given in note1" + ) + ds.number_of_ensemble_members = str(num_ens) + ds.Created_by = getuser() + + for nens in range(num_ens): + hdir = dpath + histdir[nens] + dfile + fdir = dpath + sspdir[nens] + dfile + if nens == 0: + ds.Created_from_historical_dirs = hdir + ds.Created_from_scenario_dirs = fdir + else: + ds.Created_from_historical_dirs += ", " + hdir + ds.Created_from_scenario_dirs += ", " + fdir + + ds.History_years = str(hist_yrstart) + "," + str(hist_yrend) + ds.Scenario_years = str(ssp_yrstart) + "," + str(ssp_yrend) + ds.institution = "National Center for Atmospheric Research" + return hdir,fdir + + +def create_fill_latlon(ds, data, var_name): + + ds.createDimension(var_name, int(data.size)) + wl = ds.createVariable(var_name, np.float64, (var_name,)) + + if var_name == "lat": + wl.units = "degrees_north" + wl.long_name = "Latitude" + elif var_name == "lon": + wl.units = "degrees_east" + wl.long_name = "Longitude" + wl.mode = "time-invariant" + + wl[:] = data + + return ds + + +def create_fill_time(ds, time, ntime, ssp_time_units=None, ssp_time_longname=None, adj_time=False): + if ntime is not None: + ntime = int(ntime) + ds.createDimension("time", ntime) + + wtime = ds.createVariable("time", np.float64, ("time",)) + + if ssp_time_units is not None: + wtime.units = ssp_time_units + if ssp_time_longname is not None: + wtime.long_name = ssp_time_longname + wtime.calendar = "noleap" + + # adjust time to middle of month + if adj_time: + wtime_offset = 15 - time[0] + wtime[:] = time + wtime_offset + else: + wtime[:] = time + + return ds + + +def create_fill_ancillary_vars(ds, landfrac, landmask, area): + + wmask = ds.createVariable("landmask", np.int32, ("lat", "lon")) + warea = ds.createVariable("area", np.float64, ("lat", "lon")) + wfrac = ds.createVariable("landfrac", np.float64, ("lat", "lon")) + + warea.units = "km2" + wfrac.units = "unitless" + wmask.units = "unitless" + + warea.long_name = "Grid cell area" + wfrac.long_name = "Grid cell land fraction" + wmask.long_name = "Grid cell land mask" + + warea.mode = "time-invariant" + wfrac.mode = "time-invariant" + wmask.mode = "time-invariant" + + # write to file -------------------------------------------- + wmask[:, :] = landmask + wfrac[:, :] = landfrac + warea[:, :] = area + + return ds + + +def add_to_dataset(ds, var_name, data, units=None, mode=None, historical_source_files=None, scenario_source_files=None, long_name=None, cell_methods=None): + dims = ("time", "lat", "lon") + data_type = np.float64 + + wvar = ds.createVariable( + var_name, + data_type, + dims, + fill_value=data_type(1.0e36), + ) + + wvar[:, :, :] = data + + if units is not None: + wvar.units = units + if mode is not None: + wvar.mode = mode + if historical_source_files is not None: + wvar.historical_source_files = historical_source_files + if scenario_source_files is not None: + wvar.scenario_source_files = scenario_source_files + if long_name is not None: + wvar.long_name = long_name + if cell_methods is not None: + wvar.cell_methods = cell_methods + + return ds + + +def create_fill_forcing(ds, field_out, units, anomsf, field_out_wind, f, hdir, fdir, histfiles, sspfiles, long_name, anom_fld): + + historical_source_files = "".join(histfiles).replace(hdir, "") + scenario_source_files = "".join(sspfiles).replace(fdir, "") + mode = "time-dependent" + + if field_out[f] == "sfcWind": + long_name = str(long_name) + " U component " + anomsf[f] + var_name = field_out_wind[0] + data = anom_fld / np.sqrt(2) + else: + long_name = str(long_name) + " " + anomsf[f] + var_name = field_out[f] + data = anom_fld + # Was missing cell_methods attribute in original + ds = add_to_dataset(ds, var_name, data, units=units[f], mode=mode, historical_source_files=historical_source_files, scenario_source_files=scenario_source_files, long_name=long_name) + + if field_out[f] == "sfcWind": + long_name = long_name.replace("U component", "V component") + var_name = field_out_wind[1] + # Was missing mode attribute in original + ds = add_to_dataset(ds, var_name, data, units=units[f], historical_source_files=historical_source_files, scenario_source_files=scenario_source_files, long_name=long_name, cell_methods="time: mean") + + return ds + + parser = argparse.ArgumentParser(description="Create anomaly forcing") parser.add_argument( "sspnum", @@ -31,16 +194,25 @@ parser.add_argument( ) parser.add_argument( "--write_climo", + "--write-climo", help="write out climatology files and exit", action="store_true", default=False, ) parser.add_argument( "--print_ssps", + "--print-ssps", help="Just print out directory names and exit", action="store_true", default=False, ) +parser.add_argument( + "--output_dir", "--output-dir", + help="Top-level output directory (default: ./anomaly_forcing/). Sub-directory will be created for the selected scenario.", + type=str, + default=os.path.join(".", "anomaly_forcing"), +) + args = parser.parse_args() if args.sspnum == 0: @@ -48,7 +220,7 @@ if args.sspnum == 0: # ------------------------------------------------------- -print("Create anomoly forcing data that can be used by CTSM in CESM") +print("Create anomaly forcing data that can be used by CTSM in CESM") # Input and output directories make sure they exist datapath = "/glade/campaign/collections/cmip/CMIP6/timeseries-cmip6/" # Path on casper @@ -109,15 +281,13 @@ _v2 is just used for restart files that have been spatially interpolated """ -spath = "./" if os.path.exists(datapath): print("Input data directory:" + datapath) else: sys.exit("Could not find input directory: " + datapath) -if os.path.exists(spath): - print("Output data directory:" + spath) -else: - sys.exit("Could not find output directory: " + spath) +if not os.path.exists(args.output_dir): + os.makedirs(args.output_dir) +print("Output data directory:" + args.output_dir) # Settings to run with today = datetime.date.today() @@ -165,9 +335,6 @@ if args.print_ssps: sspnum = args.sspnum -# hist_case needed? -hist_case = "b.e21.BHIST.f09_g17.CMIP6-historical.010" - if sspnum == 1: # SSP1-26 ssptag = "SSP1-2.6" @@ -196,25 +363,15 @@ if num_ens != len(histdir): print("number of ensemble members not the same") sys.exit("number of members different") -# test w/ 1 ensemble member -num_ens = 3 - # Setup output directory -sspoutdir = "anomaly_forcing/CMIP6-" + ssptag +sspoutdir = "CMIP6-" + ssptag -outdir = spath + sspoutdir +outdir = os.path.join(args.output_dir, sspoutdir) if not os.path.exists(outdir): os.makedirs(outdir) print("Output specific data directory :" + outdir) -# historical files are split by 50 year periods; use last period -hist_suffix = ["200001-201412.nc"] # not standardized?! -# hist_suffix = ['-201412.nc'] -# projections are split 2015/2064 2065/2100 -ssp_suffix = ["201501-206412.nc", "206501-210012.nc"] -# ssp_suffix = ['-206412.nc','-210012.nc'] - climo_year = 2015 # ten years on either side (21 years total) climo_base_nyrs = 21 @@ -244,7 +401,11 @@ field_out_wind = ["uas", "vas"] nfields = len(field_in) +output_format = "NETCDF3_64BIT_DATA" + # -- Loop over forcing fields ------------------------------------ + + for f in range(nfields): # -- Loop over ensemble members ------------------------------ @@ -482,71 +643,31 @@ for f in range(nfields): if write_climo: # Use NetCDF4 format, because using older NetCDF formats are too slow w = netcdf4.Dataset( - outdir + field_out[f] + "_climo" + creationdate + ".nc", + os.path.join(outdir, field_out[f] + "_climo" + creationdate + ".nc"), "w", - format="NETCDF3_64BIT_DATA" + format=output_format, ) - w.createDimension("lat", int(nlat)) - w.createDimension("lon", int(nlon)) - w.createDimension("time", int(nmo)) - - wtime = w.createVariable("time", np.float64, ("time",)) - wlat = w.createVariable("lat", np.float64, ("lat",)) - wlon = w.createVariable("lon", np.float64, ("lon",)) - wvar = w.createVariable( - field_out[f], - np.float64, - ("time", "lat", "lon"), - fill_value=np.float64(1.0e36), - ) - wtime[ - :, - ] = time[0:12] - wlon[ - :, - ] = lon - wlat[ - :, - ] = lat - wvar[:, :, :] = climo + w = create_fill_latlon(w, lat, "lat") + w = create_fill_latlon(w, lon, "lon") + w = create_fill_time(w, time[0:12], nmo) + + add_to_dataset(w, field_out[f], climo) w.close() # Use NetCDF4 format, because using older NetCDF formats are too slow w = netcdf4.Dataset( - outdir + field_out[f] + "_smooth" + creationdate + ".nc", + os.path.join(outdir, field_out[f] + "_smooth" + creationdate + ".nc"), "w", - format="NETCDF3_64BIT_DATA" - ) - w.createDimension("lat", int(nlat)) - w.createDimension("lon", int(nlon)) - w.createDimension("time", int(tm)) - - wtime = w.createVariable("time", np.float64, ("time",)) - wlat = w.createVariable("lat", np.float64, ("lat",)) - wlon = w.createVariable("lon", np.float64, ("lon",)) - wvar = w.createVariable( - field_out[f], - np.float64, - ("time", "lat", "lon"), - fill_value=np.float64(1.0e36), + format=output_format, ) - wvar2 = w.createVariable( - "smooth_" + field_out[f], - np.float64, - ("time", "lat", "lon"), - fill_value=np.float64(1.0e36), - ) - - wtime[:] = time - wlon[ - :, - ] = lon - wlat[ - :, - ] = lat - wvar[:, :, :] = temp_fld - wvar2[:, :, :] = stemp_fld + w = create_fill_latlon(w, lat, "lat") + w = create_fill_latlon(w, lon, "lon") + w = create_fill_time(w, time, tm) + + add_to_dataset(w, field_out[f], temp_fld) + add_to_dataset(w, "smooth_" + field_out[f], stemp_fld) w.close() + print("Exit early after writing out climatology\n\n") sys.exit() @@ -556,9 +677,9 @@ for f in range(nfields): # Use NetCDF4 format, because using older NetCDF formats are too slow # Will need to convert to CDF5 format at the end, as we can't seem to # output in CDF5 format using netCDF4 python interfaces - outfilename = outdir + "/" + "af.allvars" + outfile_suffix + outfilename = os.path.join(outdir, "af.allvars" + outfile_suffix) print("Creating: " + outfilename) - outfile = netcdf4.Dataset(outfilename, "w", format="NETCDF3_64BIT_DATA") + outfile = netcdf4.Dataset(outfilename, "w", format=output_format) # creation date on the file command = 'date "+%Y/%m/%d"' @@ -566,148 +687,21 @@ for f in range(nfields): x = x2.communicate() timetag = x[0].decode("utf-8").strip() - outfile.Created_on = timetag + # Add global attributes and get hdir/fdir + hdir, fdir = add_global_attributes(outfile, historydate, histdir, sspdir, num_ens, climo_year, climo_base_nyrs, dpath, dfile, hist_yrstart, hist_yrend, ssp_yrstart, ssp_yrend, timetag) - outfile.title = "anomaly forcing data" - outfile.note1 = ( - "Anomaly/scale factors calculated relative to " - + str(climo_year - (climo_base_nyrs - 1) / 2) - + "-" - + str(climo_year + (climo_base_nyrs - 1) / 2) - ) - outfile.history = historydate + ": created by " + sys.argv[0] - stdout = os.popen("git describe") - outfile.gitdescribe = stdout.read().rstrip() - outfile.Source = "CMIP6 CESM simulations" - outfile.Conventions = "CF-1.0" - outfile.comment = ( - "Monthly scale factors for given SSP scenario compared to a climatology based on" - + " data centered on " - + str(climo_year) - + " over the range given in note1" - ) - outfile.number_of_ensemble_members = str(num_ens) - outfile.Created_by = getuser() - - for nens in range(num_ens): - hdir = dpath + histdir[nens] + dfile - fdir = dpath + sspdir[nens] + dfile - if nens == 0: - outfile.Created_from_historical_dirs = hdir - outfile.Created_from_scenario_dirs = fdir - else: - outfile.Created_from_historical_dirs += ", " + hdir - outfile.Created_from_scenario_dirs += ", " + fdir - - outfile.History_years = str(hist_yrstart) + "," + str(hist_yrend) - outfile.Scenario_years = str(ssp_yrstart) + "," + str(ssp_yrend) - outfile.institution = "National Center for Atmospheric Research" - - outfile.createDimension("lat", size=int(nlat)) - outfile.createDimension("lon", size=int(nlon)) - outfile.createDimension("time", None) - - wtime = outfile.createVariable("time", np.float64, ("time",)) - wlat = outfile.createVariable("lat", np.float64, ("lat",)) - wlon = outfile.createVariable("lon", np.float64, ("lon",)) - wmask = outfile.createVariable("landmask", np.int32, ("lat", "lon")) - warea = outfile.createVariable("area", np.float64, ("lat", "lon")) - wfrac = outfile.createVariable("landfrac", np.float64, ("lat", "lon")) - wtime.units = ssp_time_units - wlon.units = "degrees_east" - wlat.units = "degrees_north" - warea.units = "km2" - wfrac.units = "unitless" - wmask.units = "unitless" + # Create dimensions + outfile = create_fill_latlon(outfile, lat, "lat") + outfile = create_fill_latlon(outfile, lon, "lon") + outfile = create_fill_time(outfile, ssp_time, None, ssp_time_units=ssp_time_units, ssp_time_longname=ssp_time_longname, adj_time=True) - # wtime.long_name = 'Months since January '+str(fut_yrstart) - wtime.long_name = ssp_time_longname - wlon.long_name = "Longitude" - wlat.long_name = "Latitude" - warea.long_name = "Grid cell area" - wfrac.long_name = "Grid cell land fraction" - wmask.long_name = "Grid cell land mask" - wlon.mode = "time-invariant" - wlat.mode = "time-invariant" - warea.mode = "time-invariant" - wfrac.mode = "time-invariant" - wmask.mode = "time-invariant" - - wtime.calendar = "noleap" - - # write to file -------------------------------------------- - # wtime_offset = 0 - # adjust time to middle of month - # wtime_offset = -15 - wtime_offset = 15 - ssp_time[0] - wtime[:] = ssp_time + wtime_offset - wtime.calendar = "noleap" - wlon[:] = lon - wlat[:] = lat - wmask[:, :] = landmask - wfrac[:, :] = landfrac - warea[:, :] = area + # Create and fill ancillary variables + outfile = create_fill_ancillary_vars(outfile, landfrac, landmask, area) # -- End if on open file - if field_out[f] == "sfcWind": - wvar = outfile.createVariable( - field_out_wind[0], - np.float64, - ("time", "lat", "lon"), - fill_value=np.float64(1.0e36), - ) - else: - wvar = outfile.createVariable( - field_out[f], - np.float64, - ("time", "lat", "lon"), - fill_value=np.float64(1.0e36), - ) - wvar.units = units[f] - wvar.mode = "time-dependent" - - # write to file -------------------------------------------- - if field_out[f] == "sfcWind": - wvar.long_name = str(long_name) + " U component " + anomsf[f] - else: - wvar.long_name = str(long_name) + " " + anomsf[f] - - if field_out[f] == "sfcWind": - wvar[:, :, :] = anom_fld / np.sqrt(2) - else: - wvar[:, :, :] = anom_fld - - # List of source files - wvar.historical_source_files = "".join(histfiles).replace(hdir, "") - wvar.scenario_source_files = "".join(sspfiles).replace(fdir, "") - - # create second wind field for V component - if field_out[f] == "sfcWind": - command = 'date "+%y%m%d"' - x2 = subprocess.Popen(command, stdout=subprocess.PIPE, shell="True") - x = x2.communicate() - timetag = x[0].decode("utf-8").strip() - - wvar = outfile.createVariable( - field_out_wind[1], - np.float64, - ("time", "lat", "lon"), - fill_value=np.float64(1.0e36), - ) - wvar.units = units[f] - wvar.cell_methods = "time: mean" - wvar.long_name = str(long_name) + " V component " + anomsf[f] - - # write to file -------------------------------------------- - wvar[:, :, :] = anom_fld / np.sqrt(2) - - # List of source files - wvar.historical_source_files = "".join(histfiles).replace(hdir, "") - wvar.scenario_source_files = "".join(sspfiles).replace(fdir, "") - - # -- end if statement for write for V field -------- + outfile = create_fill_forcing(outfile, field_out, units, anomsf, field_out_wind, f, hdir, fdir, histfiles, sspfiles, long_name, anom_fld) # -- End Loop over forcing fields ------------------------------------ outfile.close() -print("\n\nSuccessfully made anomoly forcing datasets\n") +print("\n\nSuccessfully made anomaly forcing datasets\n") diff --git a/tools/modify_input_files/README.fsurdat_modifier b/tools/modify_input_files/README.fsurdat_modifier index 58dbadb902..885227c928 100644 --- a/tools/modify_input_files/README.fsurdat_modifier +++ b/tools/modify_input_files/README.fsurdat_modifier @@ -27,10 +27,17 @@ tools/modify_input_files/modify_fsurdat_template.cfg Instructions ------------ -To run on Cheyenne/Casper/Izumi +To run on various machines: 1) (Un)load, execute, and activate the following: +1a) First step to activate conda on your system +1a) Casper: module unload python module load conda +1a) Izumi: +module load python +1a) Derecho (nothing needs to be done for this step) +1a) Elsewhere (do what's needed to activate conda) +1b) On all systems ./py_env_create conda activate ctsm_pylib (Use "deactivate" to reverse the latter.) diff --git a/tools/modify_input_files/README.mesh_mask_modifier b/tools/modify_input_files/README.mesh_mask_modifier index 13d98b9e7f..4e25e73826 100644 --- a/tools/modify_input_files/README.mesh_mask_modifier +++ b/tools/modify_input_files/README.mesh_mask_modifier @@ -10,10 +10,17 @@ tools/modify_input_files/modify_mesh_template.cfg Instructions ------------ -To run on Cheyenne/Casper/Izumi +To run on various machines: 1) (Un)load, execute, and activate the following: +1a) First step to activate conda on your system +1a) Casper: module unload python module load conda +1a) Izumi: +module load python +1a) Derecho (nothing needs to be done +1a) Elsewhere (do what's needed to activate conda) +1b) On all systems ./py_env_create conda activate ctsm_py (Use "deactivate" to reverse the latter.) @@ -49,7 +56,7 @@ In your copy of the CTSM (say, ~user/ctsm), go to the appropriate tool: Enter the following (or similar) selections in modify_fill_indianocean.cfg: -mesh_mask_in = /glade/p/cesmdata/cseg/inputdata/share/meshes/fv0.9x1.25_141008_polemod_ESMFmesh.nc +mesh_mask_in = /glade/campaign/cesm/cesmdata/cseg/inputdata/share/meshes/fv0.9x1.25_141008_polemod_ESMFmesh.nc mesh_mask_out = fv0.9x1.25_141008_polemod_ESMFmesh_modified.nc landmask_file = .../path_to_your_copy_of/fill_indianocean.nc diff --git a/tools/site_and_regional/README b/tools/site_and_regional/README index 723ddb4a97..7b36b7d2b9 100644 --- a/tools/site_and_regional/README +++ b/tools/site_and_regional/README @@ -3,9 +3,9 @@ $CTSMROOT/tools/site_and_regional/README The purpose of this directory is to contain all of the scripts that involve creating CTSM input data files for single site as well as regional cases. -The python scripts require the following settings before running on cheyenne: +The python scripts require the following settings before running: -module load conda +(Do what's needed to make conda available on your system) ../../py_env_create conda activate ctsm_pylib diff --git a/tools/site_and_regional/default_data.cfg b/tools/site_and_regional/default_data.cfg index 7e841dca54..0425aba133 100644 --- a/tools/site_and_regional/default_data.cfg +++ b/tools/site_and_regional/default_data.cfg @@ -1,5 +1,5 @@ [main] -clmforcingindir = /glade/p/cesmdata/inputdata +clmforcingindir = /glade/campaign/cesm/cesmdata/cseg/inputdata [datm_gswp3] dir = atm/datm7/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 diff --git a/tools/site_and_regional/modify_singlept_site_neon b/tools/site_and_regional/modify_singlept_site_neon new file mode 100755 index 0000000000..1b790a74ca --- /dev/null +++ b/tools/site_and_regional/modify_singlept_site_neon @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 +""" +This is a just top-level skeleton script that calls +modify_singlept_site_neon.py. +The original code (modify_singlept_site_neon.py) is located under +python/ctsm/site_and_regional folder. + +For full instructions on how to run the code and different options, +please check python/ctsm/site_and_regional/modify_singlept_site_neon.py file. + +This script is for modifying surface dataset at neon sites +using data available from the neon server. + +After creating a single point surface data file from a global +surface data file using subset_data.py, use this script to +overwrite some fields with site-specific data for neon sites. + +This script will do the following: +- Download neon data for the specified site if it does not exist + in the specified directory : (i.e. ../../../neon_surf_files). +- Modify surface dataset with downloaded data. + +---------------------------------------------------------------- +To see all available options for modifying surface datasets at +tower sites: + ./modify_singlept_site_neon --help +---------------------------------------------------------------- +Instructions for running using conda python environments: +../../py_env_create +conda activate ctsm_pylib +""" + +import os +import sys + +# -- add python/ctsm to path +_CTSM_PYTHON = os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python" +) +sys.path.insert(1, _CTSM_PYTHON) + +from ctsm.site_and_regional.modify_singlept_site_neon import main + +if __name__ == "__main__": + main() diff --git a/tools/site_and_regional/neon_surf_wrapper b/tools/site_and_regional/neon_surf_wrapper new file mode 100755 index 0000000000..306d38a774 --- /dev/null +++ b/tools/site_and_regional/neon_surf_wrapper @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 +""" +This is a just top-level skeleton script that calls +neon_surf_wrapper.py. +The original code (neon_surf_wrapper.py) is located under +python/ctsm/site_and_regional folder. + +For full instructions on how to run the code and different options, +please check python/ctsm/site_and_regional/neon_surf_wrapper.py file. + +This script is a simple wrapper for neon sites that performs the +following: + 1) For neon sites, subset surface dataset from global dataset + (i.e. ./subset_data.py ) + 2) Download neon and update the created surface dataset + based on the downloaded neon data. + (i.e. modify_singlept_site_neon.py) + +---------------------------------------------------------------- +Instructions for running using conda python environments: +../../py_env_create +conda activate ctsm_pylib +""" + +import os +import sys + +# -- add python/ctsm to path +_CTSM_PYTHON = os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python" +) +sys.path.insert(1, _CTSM_PYTHON) + +from ctsm.site_and_regional.neon_surf_wrapper import main + +if __name__ == "__main__": + main() diff --git a/tools/site_and_regional/neon_surf_wrapper.py b/tools/site_and_regional/neon_surf_wrapper.py deleted file mode 100755 index 3271c72f08..0000000000 --- a/tools/site_and_regional/neon_surf_wrapper.py +++ /dev/null @@ -1,157 +0,0 @@ -#! /usr/bin/env python3 -""" -|------------------------------------------------------------------| -|--------------------- Instructions -----------------------------| -|------------------------------------------------------------------| -This script is a simple wrapper for neon sites that performs the -following: - 1) For neon sites, subset surface dataset from global dataset - (i.e. ./subset_data.py ) - 2) Download neon and update the created surface dataset - based on the downloaded neon data. - (i.e. modify_singlept_site_neon.py) - -Instructions for running using conda python environments: - -../../py_env_create -conda activate ctsm_py - -""" -# TODO -# Automatic downloading of missing files if they are missing -#-[ ] Download neon sites and dom pft file -#-[ ] Make sure verbose works for printing out commands running - -# Import libraries -from __future__ import print_function - -import os -import sys -import tqdm -import logging -import argparse -import subprocess - -import pandas as pd - - - - -def get_parser(): - """ - Get parser object for this script. - """ - parser = argparse.ArgumentParser(description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - - parser.print_usage = parser.print_help - - parser.add_argument('-v','--verbose', - help='Verbose mode will print more information. ', - action="store_true", - dest="verbose", - default=False) - - parser.add_argument('--16pft', - help='Create and/or modify 16-PFT surface datasets (e.g. for a FATES run) ', - action="store_true", - dest="pft_16", - default=False) - - parser.add_argument('-m', '--mixed', - help='Do not overwrite surface dataset to be just one dominant PFT at 100%', - action="store_true", - dest="mixed", - default=False) - - - return parser - - -def execute(command): - """ - Function for running a command on shell. - Args: - command (str): - command that we want to run. - Raises: - Error with the return code from shell. - """ - print ('\n',' >> ',*command,'\n') - - try: - subprocess.check_call(command, stdout=open(os.devnull, "w"), stderr=subprocess.STDOUT) - - except subprocess.CalledProcessError as e: - #raise RuntimeError("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output)) - #print (e.ouput) - print (e) - - - - - - -def main(): - - args = get_parser().parse_args() - - if args.verbose: - logging.basicConfig(level=logging.DEBUG) - - - neon_sites = pd.read_csv('neon_sites_dompft.csv') - - - for i, row in tqdm.tqdm(neon_sites.iterrows()): - lat = row['Lat'] - lon = row['Lon'] - site = row['Site'] - pft = row['pft'] - clmsite = "1x1_NEON_"+site - print ("Now processing site :", site) - - if args.mixed and args.pft_16: - # use surface dataset with 16 pfts, and don't overwrite with 100% 1 dominant PFT - # don't set crop flag - # don't set a dominant pft - subset_command = ['./subset_data','point','--lat',str(lat),'--lon',str(lon), - '--site',clmsite, '--create-surface','--uniform-snowpack', - '--cap-saturation','--verbose','--overwrite'] - modify_command = ['./modify_singlept_site_neon.py', '--neon_site', site, '--surf_dir', - 'subset_data_single_point', '--16pft'] - elif args.pft_16: - # use surface dataset with 16 pfts, but overwrite to 100% 1 dominant PFT - # don't set crop flag - # set dominant pft - subset_command = ['./subset_data','point','--lat',str(lat),'--lon',str(lon), - '--site',clmsite,'--dompft',str(pft),'--create-surface', - '--uniform-snowpack','--cap-saturation','--verbose','--overwrite'] - modify_command = ['./modify_singlept_site_neon.py', '--neon_site', site, '--surf_dir', - 'subset_data_single_point', '--16pft'] - elif args.mixed: - # use surface dataset with 78 pfts, and don't overwrite with 100% 1 dominant PFT - # NOTE: FATES will currently not run with a 78-PFT surface dataset - # set crop flag - # don't set dominant pft - subset_command = ['./subset_data','point','--lat',str(lat),'--lon',str(lon), - '--site',clmsite,'--crop','--create-surface', - '--uniform-snowpack','--cap-saturation','--verbose','--overwrite'] - modify_command = ['./modify_singlept_site_neon.py', '--neon_site', site, '--surf_dir', - 'subset_data_single_point'] - else: - # use surface dataset with 78 pfts, and overwrite to 100% 1 dominant PFT - # NOTE: FATES will currently not run with a 78-PFT surface dataset - # set crop flag - # set dominant pft - subset_command = ['./subset_data', 'point', '--lat', str(lat), '--lon', str(lon), - '--site', clmsite,'--crop', '--dompft', str(pft), '--create-surface', - '--uniform-snowpack', '--cap-saturation', '--verbose', '--overwrite'] - modify_command = ['./modify_singlept_site_neon.py', '--neon_site', site, '--surf_dir', - 'subset_data_single_point'] - execute(subset_command) - execute(modify_command) - -if __name__ == "__main__": - main() - diff --git a/tools/site_and_regional/run_neon b/tools/site_and_regional/run_neon new file mode 100755 index 0000000000..ad930f50e3 --- /dev/null +++ b/tools/site_and_regional/run_neon @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 +""" +This is a just top-level skeleton script that calls +run_neon.py. +The original code (run_neon.py) is located under +python/ctsm/site_and_regional folder. + +For full instructions on how to run the code and different options, +please check python/ctsm/site_and_regional/run_neon.py file. + +This script first creates and builds a generic base case. +Next, it will clone the base_case for different neon sites and run +types to reduce the need to build ctsm everytime. + +This script will do the following: + 1) Create a generic base case for cloning. + 2) Make the case for the specific neon site(s). + 3) Make changes to the case, for: + a. AD spinup + b. post-AD spinup + c. transient + #--------------- + d. SASU or Matrix spinup + 4) Build and submit the case. + +---------------------------------------------------------------- +To see all available options for running tower sites: + ./run_neon --help +---------------------------------------------------------------- +Instructions for running using conda python environments: +../../py_env_create +conda activate ctsm_pylib +""" + +import os +import sys + +# -- add python/ctsm to path +_CTSM_PYTHON = os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, "python" +) +sys.path.insert(1, _CTSM_PYTHON) + +from ctsm.site_and_regional.run_neon import main + +if __name__ == "__main__": + main(__doc__)