From 4147127baa976ca3c5bf2eeae5e6ad4cb7852128 Mon Sep 17 00:00:00 2001 From: Shayon Shakoorzadeh Date: Tue, 9 Apr 2024 16:31:45 -0400 Subject: [PATCH 01/11] initial commit --- CMakeLists.txt | 2 ++ gcmpy/CMakeLists.txt | 24 ++++++++++++++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 gcmpy/CMakeLists.txt diff --git a/CMakeLists.txt b/CMakeLists.txt index 86c8b109..2b38a9d5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,3 +1,5 @@ +add_subdirectory(gcmpy) + ecbuild_add_executable ( TARGET GEOSgcm.x SOURCES GEOSgcm.F90 diff --git a/gcmpy/CMakeLists.txt b/gcmpy/CMakeLists.txt new file mode 100644 index 00000000..9703faf9 --- /dev/null +++ b/gcmpy/CMakeLists.txt @@ -0,0 +1,24 @@ +# cp (makes exe) +set (programs + construct_extdata_yaml_list.py + ) + +install ( + PROGRAMS ${programs} + DESTINATION bin/gcmpy + ) + +# processed files +set (setup_scripts + gcm_setup + gmichem_setup + geoschemchem_setup + stratchem_setup + ) + +foreach (file ${setup_scripts}) + configure_file(${file} ${file} @ONLY) + install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/${file} DESTINATION bin/gcmpy) +endforeach () + + From 93554269fa6500fbabb3fe7d90f7f997c675155c Mon Sep 17 00:00:00 2001 From: Shayon Shakoorzadeh Date: Tue, 9 Apr 2024 17:46:40 -0400 Subject: [PATCH 02/11] updated directories --- gcmpy/CMakeLists.txt | 14 +++++--------- gcmpy/scripts/CMakeLists.txt | 16 ++++++++++++++++ gcmpy/yaml/CMakeLists.txt | 16 ++++++++++++++++ 3 files changed, 37 insertions(+), 9 deletions(-) create mode 100644 gcmpy/scripts/CMakeLists.txt create mode 100644 gcmpy/yaml/CMakeLists.txt diff --git a/gcmpy/CMakeLists.txt b/gcmpy/CMakeLists.txt index 9703faf9..8846312e 100644 --- a/gcmpy/CMakeLists.txt +++ b/gcmpy/CMakeLists.txt @@ -1,7 +1,8 @@ # cp (makes exe) -set (programs - construct_extdata_yaml_list.py - ) +add_subdirectory(scripts) +add_subdirectory(yamls) + +set (programs) install ( PROGRAMS ${programs} @@ -9,12 +10,7 @@ install ( ) # processed files -set (setup_scripts - gcm_setup - gmichem_setup - geoschemchem_setup - stratchem_setup - ) +set (setup_scripts) foreach (file ${setup_scripts}) configure_file(${file} ${file} @ONLY) diff --git a/gcmpy/scripts/CMakeLists.txt b/gcmpy/scripts/CMakeLists.txt new file mode 100644 index 00000000..43be2a33 --- /dev/null +++ b/gcmpy/scripts/CMakeLists.txt @@ -0,0 +1,16 @@ +set (programs + #fill + ) + +install ( + PROGRAMS ${programs} + DESTINATION bin/gcmpy/scripts + ) + +# processed files +set (setup_scripts) + +foreach (file ${setup_scripts}) + configure_file(${file} ${file} @ONLY) + install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/${file} DESTINATION bin/gcmpy) +endforeach () diff --git a/gcmpy/yaml/CMakeLists.txt b/gcmpy/yaml/CMakeLists.txt new file mode 100644 index 00000000..36e3ed48 --- /dev/null +++ b/gcmpy/yaml/CMakeLists.txt @@ -0,0 +1,16 @@ +set (programs + #fill + ) + +install ( + PROGRAMS ${programs} + DESTINATION bin/gcmpy/yaml + ) + +# processed files +set (setup_scripts) + +foreach (file ${setup_scripts}) + configure_file(${file} ${file} @ONLY) + install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/${file} DESTINATION bin/gcmpy) +endforeach () From 0e5e3afc3b3184bd5ed2c50884a1a6c0acb46c22 Mon Sep 17 00:00:00 2001 From: Shayon Shakoorzadeh Date: Tue, 9 Apr 2024 17:48:26 -0400 Subject: [PATCH 03/11] Add scripts via upload --- gcmpy/scripts/atmosphere.py | 333 +++++++++++++++++++ gcmpy/scripts/env.py | 134 ++++++++ gcmpy/scripts/generate_question.py | 59 ++++ gcmpy/scripts/gocart.py | 48 +++ gcmpy/scripts/land.py | 66 ++++ gcmpy/scripts/model.py | 408 ++++++++++++++++++++++++ gcmpy/scripts/ocean.py | 257 +++++++++++++++ gcmpy/scripts/process_questions.py | 232 ++++++++++++++ gcmpy/scripts/test_generate_question.py | 9 + gcmpy/scripts/utility.py | 66 ++++ 10 files changed, 1612 insertions(+) create mode 100644 gcmpy/scripts/atmosphere.py create mode 100644 gcmpy/scripts/env.py create mode 100644 gcmpy/scripts/generate_question.py create mode 100644 gcmpy/scripts/gocart.py create mode 100644 gcmpy/scripts/land.py create mode 100644 gcmpy/scripts/model.py create mode 100644 gcmpy/scripts/ocean.py create mode 100644 gcmpy/scripts/process_questions.py create mode 100644 gcmpy/scripts/test_generate_question.py create mode 100644 gcmpy/scripts/utility.py diff --git a/gcmpy/scripts/atmosphere.py b/gcmpy/scripts/atmosphere.py new file mode 100644 index 00000000..ead977d4 --- /dev/null +++ b/gcmpy/scripts/atmosphere.py @@ -0,0 +1,333 @@ +from env import answerdict +from utility import color + +class atmosphere: + def __init__(self): + self.use_SHMEM = False + self.force_das = "#" + self.force_gcm = "#" + self.num_readers = 1 + self.num_writers = 1 + self.DT = answerdict["heartbeat"].q_answer + self.DT_solar = None + self.DT_irrad = None + self.DT_ocean = None + self.DT_long = None + self.IM = int(answerdict["AM_horizontal_res"].q_answer[1:]) + self.JM = self.IM * 6 + self.NX = None + self.NY = None + self.use_hydrostatic = answerdict["use_hydrostatic"].q_answer + self.microphysics = answerdict["AM_microphysics"].q_answer + self.IM_hist = self.IM * 4 + self.JM_hist = self.JM * 2 + 1 + self.gridfile = f"Gnomonic_c{self.IM}.dat" + self.job_sgmt = None + self.num_sgmt = None + self.res = f"CF{self.IM:04}x6C" + self.post_NDS = None + self.NX_convert = 2 + self.NY_convert = 24 + self.CONUS = "#" + self.stretch_factor = None + self.gridname = f"PE{self.IM}x{self.JM}-CF" + self.res_dateline = f"{self.IM}x{self.JM}" + self.BACM_1M = "#" + self.GFDL_1M = "#" + self.MGB2_2M = "#" + self.GFDL_hydro = ".TRUE." + self.GFDL_prog_ccn = "prog_ccn = .true." + self.GFDL_use_ccn = "use_ccn = .true." + self.MP_turnoff_wsub = None + self.FV_make_NH = None + self.FV_hydro = None + self.schmidt = None + self.target_lon = None + self.target_lat = None + + # for debugging purposes + def print_vars(self): + all_vars = vars(self) + for var_name, var_value in all_vars.items(): + print(f"{color.BLUE}{var_name}: {var_value}{color.RESET}") + + def hres(self, ocean_NX, ocean_NY): + match answerdict["AM_horizontal_res"].q_answer: + case "c12": + self.DT_solar = 3600 + self.DT_irrad = 3600 + self.DT_ocean = self.DT_irrad + self.DT_long = self.DT + if answerdict["OM_name"].q_answer == "MOM6": + self.NX = 1 + else: + self.NX = 2 + self.NY = self.NX * 6 + self.job_sgmt = f"{15:08}" + self.num_sgmt = 20 + self.post_NDS = 4 + self.NX_convert = 1 + self.NY_convert = 6 + + case "c24": + self.DT_solar = 3600 + self.DT_irrad = 3600 + self.DT_ocean = self.DT_irrad + self.DT_long = self.DT + self.NX = 4 + self.NY = self.NX * 6 + self.job_sgmt = f"{15:08}" + self.num_sgmt = 20 + self.post_NDS = 4 + self.NX_convert = 1 + self.NY_convert = 6 + + case "c48": + self.DT_solar = 3600 + self.DT_irrad = 3600 + self.DT_ocean = self.DT_irrad + self.DT_long = self.DT + self.NX = 4 + self.NY = self.NX * 6 + self.IM_hist = 180 + self.JM_hist = 91 + self.job_sgmt = f"{15:08}" + self.num_sgmt = 20 + self.post_NDS = 4 + + case "c90": + self.DT_solar = 3600 + self.DT_irrad = 3600 + self.DT_long = self.DT + match answerdict["OM_name"].q_answer: + case "MIT": + self.NX = 10 + self.NY = 36 + self.DT_ocean = self.DT + case "MOM5","MOM6": + self.NX = ocean_NX + self.NY = ocean_NY + self.DT_ocean = self.DT + case _: + self.NX = 3 + self.NY = self.NX * 6 + self.DT_ocean = self.DT_irrad + self.job_sgmt = f"{32:08}" + self.num_sgmt = 4 + self.post_NDS = 8 + + case "c180": + self.DT_solar = 3600 + self.DT_irrad = 3600 + self.DT_long = self.DT + if answerdict["OM_coupled"].q_answer == True: + self.NX = ocean_NX + self.NY = ocean_NY + self.DT_ocean = self.DT + else: + self.NX = 6 + self.NY = self.NX * 6 + self.DT_ocean = self.DT_irrad + self.job_sgmt = f"{16:08}" + self.num_sgmt = 1 + self.post_NDS = 8 + self.num_readers = 2 + + case "c360": + self.DT_solar = 3600 + self.DT_irrad = 3600 + self.DT_ocean = self.DT_irrad + self.DT_long = self.DT + self.NX = 12 + self.NY = self.NX * 6 + self.num_readers = 4 + self.job_sgmt = f"{5:08}" + self.num_sgmt = 1 + self.post_NDS = 12 + self.NX_convert = 4 + + case "c720": + self.DT_solar = 3600 + self.DT_irrad = 3600 + self.DT_ocean = self.DT_irrad + self.DT_long = 450 + self.NX = 24 + self.NY = self.NX * 6 + self.num_readers = 6 + self.job_sgmt = f"{5:08}" + self.num_sgmt = 1 + self.post_NDS = 16 + self.NX_convert = 8 + self.use_SHMEM = True + + case "c1440": + self.DT_solar = 1800 + self.DT_irrad = 1800 + self.DT_ocean = self.DT_irrad + self.DT_long = 300 + self.NX = 48 + self.NY = self.NX * 6 + self.num_readers = 6 + self.job_sgmt = f"{1:08}" + self.num_sgmt = 1 + self.post_NDS = 32 + self.NX_convert = 8 + self.use_SHMEM = True + + case "c2880": + self.DT_solar = 1800 + self.DT_irrad = 1800 + self.DT_ocean = self.DT_irrad + self.DT_long = 300 + self.NX = 96 + self.NY = self.NX * 6 + self.num_readers = 6 + self.job_sgmt = f"{1:08}" + self.num_sgmt = 1 + self.post_NDS = 32 + self.NX_convert = 8 + self.use_SHMEM = True + + case "c5760": + self.DT_solar = 900 + self.DT_irrad = 900 + self.DT_ocean = self.DT_irrad + self.DT_long = 300 + self.NX = 192 + self.NY = self.NX * 6 + self.num_readers = 6 + self.job_sgmt = f"{1:08}" + self.num_sgmt = 1 + self.post_NDS = 32 + self.NX_convert = 8 + self.use_SHMEM = True + + case "c270": + self.DT_solar = 3600 + self.DT_irrad = 3600 + self.DT_ocean = self.DT_irrad + self.DT_long = self.DT + self.NX = 18 + self.NY = self.NX * 6 + self.num_readers = 6 + self.job_sgmt = f"{1:08}" + self.num_sgmt = 1 + self.post_NDS = 32 + self.NX_convert = 8 + self.use_SHMEM = True + self.CONUS = "" + self.stretch_factor = 2.5 + + case "c540": + self.DT_solar = 3600 + self.DT_irrad = 3600 + self.DT_ocean = self.DT_irrad + self.DT_long = self.DT + self.NX = 36 + self.NY = self.NX * 6 * 2 + self.num_readers = 6 + self.job_sgmt = f"{1:08}" + self.num_sgmt = 1 + self.post_NDS = 32 + self.NX_convert = 8 + self.use_SHMEM = True + self.CONUS = "" + self.stretch_factor = 2.5 + + case "c1080": + self.DT_solar = 900 + self.DT_irrad = 900 + self.DT_ocean = self.DT_irrad + self.DT_long = 300 + self.NX = 72 + self.NY = self.NX * 6 * 2 + self.num_readers = 6 + self.job_sgmt = f"{1:08}" + self.num_sgmt = 1 + self.post_NDS = 32 + self.NX_convert = 8 + self.use_SHMEM = True + self.CONUS = "" + self.stretch_factor = 2. + + case "c1536": + self.DT_solar = 900 + self.DT_irrad = 900 + self.DT_ocean = self.DT_irrad + self.DT_long = 300 + self.NX = 96 + self.NY = self.NX * 6 + self.num_readers = 6 + self.job_sgmt = f"{5:08}" + self.num_sgmt = 1 + self.post_NDS = 16 + self.NX_convert = 8 + self.use_SHMEM = True + self.CONUS = "" + self.stretch_factor = 3.0 + + case "c2160": + self.DT_solar = 900 + self.DT_irrad = 900 + self.DT_ocean = self.DT_irrad + self.DT_long = 300 + self.NX = 192 + self.NY = self.NX * 6 * 2 + self.num_readers = 6 + self.job_sgmt = f"{5:08}" + self.num_sgmt = 1 + self.post_NDS = 32 + self.NX_convert = 8 + self.use_SHMEM = True + self.CONUS = "" + self.stretch_factor = 2.5 + + if answerdict["OM_name"].q_answer == "MIT": + self.DT_ocean = self.DT + + def set_microphysics(self): + match self.microphysics: + case "BACM_1M": + self.BACM_1M = "" + self.DT_long = 450 + case "GFDL_1M": + self.GFDL_1M = "" + case "MGB2_2M": + self.MGB2_2M = "" + + def set_turnoff_wsub(self): + if self.microphysics == "MGB2_2M": + self.MP_turnoff_wsub = "#DELETE" + else: + self.MP_turnoff_wsub = "" + + # settings for fvcore_layour.rc + def set_fvcore_layout(self): + match self.use_hydrostatic: + case True: + self.FV_make_NH = "Make_NH = .F." + self.FV_hydro = "hydrostatic = .T." + case False: + self.FV_make_NH = "Make_NH = .T." + self.FV_hydro = "hydrostatic = .F." + if self.microphysics == "MGB2_2M": + self.FV_hydro = ".FALSE." + + def set_CONUS(self): + if self.CONUS == "#": + self.schmidt = "do_schmidt = .false." + self.stretch_factor = "stretch_fac = 1.0" + self.target_lon = "target_lon = 0.0" + self.target_lat = "target_lat = 0.0" + else: + self.schmidt = "do_schmidt = .true." + self.stretch_factor = "stretch_fac = $STRETCH_FACTOR" + self.target_lon = "target_lon = -98.35" + self.target_lat = "target_lat = 39.5" + + + def config(self, ocean_NX, ocean_NY): + self.hres(ocean_NX, ocean_NY) + self.set_microphysics() + self.set_fvcore_layout() + self.set_CONUS() diff --git a/gcmpy/scripts/env.py b/gcmpy/scripts/env.py new file mode 100644 index 00000000..1f02b253 --- /dev/null +++ b/gcmpy/scripts/env.py @@ -0,0 +1,134 @@ +import os, sys, platform +import process_questions as pq +from utility import envdict, pathdict, color, exceptions +import questionary + +# PROBABLY WANT TO REMOVE/MODIFY THIS! +''' +# Check if GEOSgcm.x is here, which means you are in the correct directory +if not os.path.exists(os.path.join(pathdict['BIN'], 'GEOSgcm.x')): + exceptions.raise_user_exception( + "You are trying to run " + color.color_file(pathdict['SCRIPTNAME']) + \ + " in the " + color.color_path(pathdict['CWD']) + " directory." + \ + "\nThis is no longer supported.\nPlease run from the " + color.color_path('bin/') + \ + " directory in your installation.") + exit(1) +''' + + +####################################################################### +# Check for Command Line Flags +####################################################################### +# Set default behavior of switches +linkx = False +exe_verb = 'copied' +bool_usingSingularity = False + +# Loop through arguments and mtch to each case +# If argument is not recognized, display usage and exit +for arg in enumerate(sys.argv[1:]): + if (arg[-1] == '--link'): + linkx = True + elif (arg[-1] == '--singularity'): + bool_usingSingularity == True + elif (arg[-1] == '--help' or arg[-1] == '-h'): + exceptions.printusage() + else: + exceptions.raiseuserexception("Command line arguemnt \"" + arg[-1] + "\" not \ + recognized. \nSee usage:\n" ) + exceptions.printusage() + + +####################################################################### +# Determine site +####################################################################### +envdict['node'] = platform.node() +envdict['arch'] = platform.system() +#print(f"{color.RED}{pathdict['bin']}{color.RESET}") +envdict['site'] = open(os.path.join(pathdict['etc'], 'SITE.rc'), 'r').read().split()[-1] + +####################################################################### +# Test for Compiler and MPI Setup +####################################################################### +# Extract BASEDIR tail +basedir = open(os.path.join(pathdict['etc'], 'BASEDIR.rc'), 'r').read().split()[-1] +mpi = os.path.split(basedir)[-1] + +# Map MPI dirname to correct MPI implementation +if any(tag in mpi for tag in ['openmpi','hpcx']): mpi = 'openmpi' +elif any(tag in mpi for tag in ['impi', 'intelmpi']): mpi = 'intelmpi' +elif 'mvapich2' in mpi: mpi = 'mvapich2' +elif 'mpich' in mpi: mpi = 'mpich' +elif 'mpt' in mpi: mpi = 'mpt' +else: mpi = 'intelmpi' +envdict['mpi'] = mpi +#print("MPI implementation is: " + color.GREEN + MPI) + + +answerdict = pq.process() +#for i in answerdict: +# print(answerdict[i].q_answer) + + +####################################################################### +# Set Number of CPUs per Node +####################################################################### +# ----------------------------> PUT IN HANDLE CLASS FROM process_questions.py (MAYBE) <------------------------ +if envdict['site'] == 'NCCS': + if answerdict['processor'].q_answer == 'Haswell': + envdict['n_CPUs'] = 28 + elif answerdict['processor'].q_answer == 'Skylake': + envdict['n_CPUs'] = 40 + elif answerdict['processor'].q_answer == 'Cascade': + ''' + NCCS currently recommends that users do not run with + 48 cores per n_CPUs on SCU16 due to OS issues and + recommends that CPU-intensive works run with 46 or less + cores. As 45 is a multiple of 3, it's the best value + that doesn't waste too much + ''' + envdict['n_CPUs'] = 45 + +elif envdict['site'] == 'NAS': + if answerdict['processor'].q_answer == 'Haswell': + envdict['n_CPUs'] = 24 + elif answerdict['processor'].q_answer == 'Broadwell': + envdict['n_CPUs'] = 28 + elif answerdict['processor'].q_answer == 'Skylake': + envdict['n_CPUs'] = 40 + elif answerdict['processor'].q_answer == 'Cascade': + envdict['n_CPUs'] = 40 + elif answerdict['processor'].q_answer == 'AMD': + envdict['n_CPUs'] = 128 + +elif envdict['site'] == 'AWS' or envdict['site'] == 'AZURE': + # Because we do not know the name of the model or the number of CPUs + # per node. We ask the user to set these variables in the script + print(color.RED + "\nSince you are running on ", envdict['site'], \ + " you must set the processor and # of CPUs yourself.") + # ASK FOR PROCESSOR TYPE <------------------------------------------------------------------------------- + envdict['n_CPUs'] = questionary.text("Enter the number of CPUs per node: ").ask() + +else: + envdict['site'] = 'UNKNOWN' + if envdict['arch'] == 'Linux': + # Get the number of CPU cores on Linux + try: + with open('/proc/cpuinfo') as f: + cpuinfo = f.read() + envdict['n_CPUs'] = cpuinfo.count('processor') + except IOError: + print(color.RED + "ERROR: Unable to retrieve the number of CPUs.") + sys.exit(1) + elif envdict['arch'] == 'Darwin': + # Get the number of CPU cores on macOS + try: + import multiprocessing + envdict['n_CPUs'] = multiprocessing.cpu_count() + except NotImplementedError: + print(color.RED + "ERROR: Unable to retrieve the number of CPUs.") + sys.exit(1) + else: + print(f"ERROR: Unknown architecture", envdict['arch']) + sys.exit(1) + diff --git a/gcmpy/scripts/generate_question.py b/gcmpy/scripts/generate_question.py new file mode 100644 index 00000000..e2efbe20 --- /dev/null +++ b/gcmpy/scripts/generate_question.py @@ -0,0 +1,59 @@ +import os +import questionary +from questionary import Validator, ValidationError + +''' +This class will generate questionary questions based on a set of +parameters specified in a YAML file. It is designed to be as modular +as possible, so keep it that way! (Handle specific cases in +process_questions) +''' +class generateQuestion: + def __init__(self, q_name, q_type, q_prompt, q_choices, q_default, q_follows_up): + self.q_name = q_name + self.q_type = q_type + self.q_prompt = q_prompt + self.q_choices = q_choices + self.q_default = q_default + self.q_follows_up = q_follows_up + self.q_answer = None + + + # Check if a question "follows_up" another and its conditions are met + # to determine whether it should be asked or not + def should_ask(self, answerdict): + if self.q_follows_up: + for prev_question, accepted_answers in self.q_follows_up: + if answerdict[prev_question].q_answer in accepted_answers or ('any' in accepted_answers and answerdict[prev_question].q_answer != None): + return True + return False + return True + + + # loads the questionary api based on yaml configurations + def load_question(self, answerdict): + + # check if the yaml entry should be skipped + if not self.should_ask(answerdict): + return + + # if should_ask() returns true, call correct questionary API + elif self.q_type == 'text': + answer = questionary.text(self.q_prompt, default=self.q_default, validate=qValidator.val_text).ask() + elif self.q_type == 'confirm': + answer = questionary.confirm(self.q_prompt, default=self.q_default).ask() + elif self.q_type == 'select': + answer = questionary.select(self.q_prompt, choices=self.q_choices).ask() + elif self.q_type == 'path': + answer = questionary.path(self.q_prompt, default=self.q_default).ask() + + self.q_answer = answer + +# input validation used for questionary library +class qValidator(): + def val_text(input): + if len(str(input)) == 0 or input.isspace(): + return "please answer the question!" + else: + return True + diff --git a/gcmpy/scripts/gocart.py b/gcmpy/scripts/gocart.py new file mode 100644 index 00000000..d6d9c9db --- /dev/null +++ b/gcmpy/scripts/gocart.py @@ -0,0 +1,48 @@ +from env import answerdict +from utility import color + +class gocart: + def __init__(self): + self.aerosol = answerdict["gocart_aerosols"].q_answer + self.emissions = f"{answerdict['gocart_emission'].q_answer}_EMISSIONS" + self.data_driven = None + self.OPS_species = "#" + self.CMIP_species = "#" + self.MERRA2OX_species = "#" + self.pchem_clim_years = "" + self.gocart = None + self.gocart_hist = None + self.aero_provider = "GOCART2G" + self.RATS_provider = "PCHEM" + + # for debugging purposes + def print_vars(self): + all_vars = vars(self) + for var_name, var_value in all_vars.items(): + print(f"{color.BLUE}{var_name}: {var_value}{color.RESET}") + + def set_gocart(self): + match self.aerosol: + case "Actual": + self.data_driven = False + self.gocart = "" + self.gocart_hist = "" + case "Climatological": + self.data_driven = True + self.gocart = "" + self.gocart_hist = "" + + def set_emissions(self): + match self.emissions.split("_")[0]: + case "AMIP": + self.MERRA2OX_species = "" + self.pchem_clim_years = 1 + case "OPS": + self.OPS_species = "" + self.pchem_clim_years = 39 + + + def config(self): + self.set_gocart() + self.set_emissions() + diff --git a/gcmpy/scripts/land.py b/gcmpy/scripts/land.py new file mode 100644 index 00000000..5bc46d64 --- /dev/null +++ b/gcmpy/scripts/land.py @@ -0,0 +1,66 @@ +from env import answerdict +from utility import color + +class land: + def __init__(self): + self.land_choice = answerdict["LS_model"].q_answer + self.bcs = answerdict["LS_boundary_conditions"].q_answer + self.bound_parameters = None + self.emip_BCS_IN = None + self.emip_oldland = None + self.emip_newland = None + self.emip_MERRA2 = None + self.HIST_catchment = None + self.GCMRUN_catchment = None + + # for debugging purposes + def print_vars(self): + all_vars = vars(self) + for var_name, var_value in all_vars.items(): + print(f"{color.CYAN}{var_name}: {var_value}{color.RESET}") + + def set_bcs(self): + match self.bcs: + case "Icarus": + self.bound_parameters = "#DELETE" + self.emip_BCS_IN = "Ganymed-4_0" + self.emip_oldland = "" + self.emip_newland = "#DELETE" + self.emip_MERRA2 = "MERRA2" + case "Icarus-NLv3": + self.bound_parameters = "" + self.emip_BCS_IN = "Icarus-NLv3" + self.emip_oldland = "#DELETE" + self.emip_newland = "" + self.emip_MERRA2 = "MERRA2_NewLand" + + def set_catchment(self): + if self.bcs == "Icarus-NLv3": + match self.land_choice: + case "Catchment": + self.HIST_catchment = "#DELETE" + self.GCMRUN_catchment = "#DELETE" + case "CatchmentCN-CLM4.0": + self.HIST_catchment = "" + self.GCMRUN_catchment = "" + print(f"{color.RED}IMPORTANT: please set LAND_PARAMS: to CN_CLM40 in RC/GEOS_SurfaceGridComp.rc in the experiment directory.{color.RESET}") + case "CatchmentCN-CLM4.5": + self.HIST_catchment = "" + self.GCMRUN_catchment = "" + print(f"{color.RED}IMPORTANT: please set LAND_PARAMS: to CN_CLM45 in RC/GEOS_SurfaceGridComp.rc in the experiment directory.{color.RESET}") + else: + self.land_choice = "Catchment" + self.HIST_catchment = "#DELETE" + self.GCMRUN_catchment = "#DELETE" + + def config(self): + self.set_bcs() + self.set_catchment() + + + + + + + + diff --git a/gcmpy/scripts/model.py b/gcmpy/scripts/model.py new file mode 100644 index 00000000..8c3209c8 --- /dev/null +++ b/gcmpy/scripts/model.py @@ -0,0 +1,408 @@ +from ocean import ocean +from atmosphere import atmosphere as atmos +from land import land +from gocart import gocart +from env import answerdict, linkx +from utility import envdict, pathdict +import math, os, shutil, tempfile, yaml +from pathlib import Path + + +# combines all models (atmos, ocean, land, gocart) into one big one +class model: + def __init__(self): + self.ocean = ocean() + self.atmos = atmos() + self.land = land() + self.gocart = gocart() + self.is_FCST = False + self.fv_cubed = "" + self.bcs_res = None + self.tile_data = None + self.tile_bin = None + self.interpolate_SST = None + self.job_sgmt = None + self.begin_date = "18910301 000000" + self.end_date = "29990302 210000" + self.n_oserver_nodes = None + self.n_backend_pes = None + self.n_nodes = None + self.exp_dir = answerdict['exp_dir'].q_answer + self.oserver_restart = "NO" + + + def print_all_vars(self): + self.atmos.print_vars() + self.land.print_vars() + self.gocart.print_vars() + + + def config_models(self): + self.ocean.config() + self.atmos.config(self.ocean.NX, self.ocean.NY) + self.land.config() + self.gocart.config() + + + # setup some variables idk + def set_some_stuff(self): + if self.atmos.IM_hist >= self.ocean.IM: + self.interpolate_SST = True + else: + self.interpolate_SST = False + self.bcs_res = f"{self.atmos.res}_{self.ocean.res}" + self.tile_data = f"{self.atmos.res}_{self.ocean.res}_Pfafstetter.til" + self.tile_bin = f"{self.atmos.res}_{self.ocean.res}_Pfafstetter.TIL" + self.job_sgmt = f"{self.atmos.job_sgmt} 000000" + + + # setup experiment nodes + def set_nodes(self): + model_npes = self.atmos.NX * self.atmos.NY + + # Calculate OSERVER nodes based on recommended algorithm + if answerdict["io_server"].q_answer == True: + + # First we calculate the number of model nodes + n_model_nodes = math.ceil(model_NPES / envdict["n_CPUs"]) + + # Next the number of frontend PEs is 10% of the model PEs + n_frontend_pes = math.ceil(model_NPES * 0.1) + + # Now we roughly figure out the number of collections in the HISTORY.rc + n_hist_collections = 0 + with open(answerdict['history_template'].q_answer, 'r') as file: + in_collections = False + for line in file: + if line.split(' ', 1)[0] == "COLLECTIONS:": + in_collections = True + continue + if in_collections and line.split(' ', 1)[0] != "#": + n_hist_collections += 1 + if line.strip() == "::": + break + + # The total number of oserver PEs is frontend PEs plus number of history collections + n_oserver_pes = n_frontend_pes + n_hist_collections + + # calculate the number of oserver nodes + n_oserver_nodes = math.ceil(n_oserver_pes / envdict["n_CPUs"]) + + # The number of backend PEs is the number of history collections divided by the number of oserver nodes + n_backend_pes = math.ceil(n_hist_collections / n_oserver_nodes) + + # multigroup requires at least two backend pes + if (n_backend_pes < 2): n_backend_pes = 2 + + # Calculate the total number of nodes to request from batch + nodes = n_model_nodes + n_oserver_nodes + + else: + self.nodes = math.ceil(model_npes / envdict["n_CPUs"]) + self.n_oserver_nodes = 0 + self.n_backend_pes = 0 + + + + def set_stuff(self): + self.set_nodes() + # Longer job names are now supported with SLURM and PBS. Limits seem to be 1024 characters with SLURM + # and 230 with PBS. To be safe, we will limit to 200 + run_n = f"{answerdict['experiment_id'].q_answer[:200]}_RUN" # RUN Job Name + run_fn = f"{answerdict['experiment_id'].q_answer[:200]}_FCST" # Forecast Job Name + post_n = f"{answerdict['experiment_id'].q_answer[:200]}_POST" # POST Job Name + plot_n = f"{answerdict['experiment_id'].q_answer[:200]}_PLT" # PLOT Job Name + move_n = f"{answerdict['experiment_id'].q_answer[:200]}_PLT" # MOVE Job Name + archive_n = f"{answerdict['experiment_id'].q_answer[:200]}_ARCH" # ARCHIVE Job Name + regress_n = f"{answerdict['experiment_id'].q_answer[:200]}_RGRS" # REGRESS Job Name + + + # Here we need to convert POST_NDS to total tasks. Using 16 cores + # per task as a good default + post_npes = self.atmos.post_NDS * 16 + NPCUS = (post_npes + envdict["n_CPUs"] - 1)/envdict["n_CPUs"] + + ''' + Definition for each variable in the following if-else block: + + batch_cmd - PBS Batch command + batch_group - PBS Syntax for GROUP + batch_time - PBS Syntax for walltime + batch_jobname - PBS Syntax for job name + batch_outputname - PBS Syntax for job output name + batch_joinouterr - PBS Syntax for joining output and error + run_FT - Wallclock Time for gcm_forecast.j + run_FT - Wallclock Time for gcm_run.j + post_T - Wallclock Time for gcm_post.j + plot_T - Wallclock Time for gcm_plot.j + archive_T - Wallclock Time for gcm_archive.j + run_Q - Batch queue name for gcm_run.j + run_P - PE Configuration for gcm_run.j + run_FP - PE Configuration for gcm_forecast.j + post_Q - Batch queue name for gcm_post.j + plot_Q - Batch queue name for gcm_plot.j + move_Q - Batch queue name for gcm_moveplot.j + archive_Q - Batch queue name for gcm_archive.j + post_P - PE Configuration for gcm_post.j + plot_P - PE Configuration for gcm_plot.j + archive_P - PE Configuration for gcm_archive.j + move_P - PE Configuration for gcm_moveplot.j + bcs_dir - Location of Boundary Conditions + replay_ana_expID - Default Analysis Experiment for REPLAY + replay_ana_location - Default Analysis Location for REPLAY + M2_replay_ana_location - Default Analysis Location for M2 REPLAY + sst_dir - Location of SST Boundary Conditions + chem_dir - Locations of Aerosol Chemistry BCs + work_dir - User work directory <----------------- change this later + gwdrs_dir - Location of GWD_RIDGE files + coupled_dir - Coupled Ocean/Atmos Forcing + ''' + + if envdict['site'] == "NAS": + batch_cmd = "qsub" + batch_group = "PBS -W group_list=" + batch_time = "PBS -l walltime=" + batch_jobname = "PBS -N" + batch_outputname = "PBS -o " + batch_joinouterr = "PBS -j oe -k oed" + run_FT = "6:00:00" + run_T = "8:00:00" + post_T = "8:00:00" + plot_T = "8:00:00" + archive_T = "8:00:00" + run_Q = f"PBS -q normal" + run_P = f"PBS -l select={self.nodes}:ncpus={envdict['n_CPUs']}:mpiprocs={envdict['n_CPUs']}:model={answerdict['processor']}" + run_FP = f"PBS -l select=24:ncpus={envdict['n_CPUs']}:mpiprocs={envdict['n_CPUs']}:model={answerdict['processor']}" + post_Q = "PBS -q normal" + plot_Q = "PBS -q normal" + move_Q = "PBS -q normal" + archive_Q = "PBS -q normal" + post_P = f"PBS -l select={NPCUS}:ncpus={envdict['n_CPUs']}:mpiprocs={envdict['n_CPUs']}:model={answerdict['processor']}" + plot_P = f"PBS -l select=1:ncpus={envdict['n_CPUs']}:mpiprocs=1:model={answerdict['processor']}" + archive_P = f"PBS -l select=1:ncpus={envdict['n_CPUs']}:mpiprocs={envdict['n_CPUs']}:model={answerdict['processor']}" + move_P = "PBS -l select=1:ncpus=1" + boundary_path = "/nobackup/gmao_SIteam/ModelData" + bcs_dir = f"{boundary_path}/bcs/{self.land.bcs}/{self.land.bcs}_{self.ocean.tag}" + replay_ana_expID = "ONLY_MERRA2_SUPPORTED" + replay_ana_location = "ONLY_MERRA2_SUPPORTED" + M2_replay_ana_location = f"{boundary_path}/merra2/data"# + + # defines location of SST Boundary Conditions + oceanres = f"{self.ocean.IM}x{self.ocean.JM}" + if oceanres == "1440x720": + sst_dir = f"{boundary_path}/fvInput/g5gcm/bcs/SST/{oceanres}" + else: + sst_dir = f"{boundary_path}/fvInput/g5gcm/bcs/realtime/{self.ocean.sst_name}/{oceanres}" + if self.ocean.gridtype_abrv == "LL": + sst_dir = "/nobackupp2/estrobac/geos5/SSTDIR" + + chem_dir = f"{boundary_path}/fvInput_nc3" + work_dir = f"/nobackup/{os.environ.get('LOGNAME')}" + gwdrs_dir = f"{boundary_path}/GWD_RIDGE" + + # Coupled Ocean/Atmos Forcing + if self.ocean.name == "MIT": + coupled_dir = "/nobackupp2/estrobac/geos5/GRIDDIR" + else: + coupled_dir = f"{boundary_path}/aogcm" + + + elif envdict['site'] == "NCCS": + batch_cmd = "sbatch" + batch_group = "SBATCH --account=" + batch_time = "SBATCH --time=" + batch_jobname = "SBATCH --job-name=" + batch_outputname = "SBATCH --output=" + batch_joinouterr = "DELETE" + run_FT = "06:00:00" + run_T = "12:00:00" + post_T = "8:00:00" + plot_T = "12:00:00" + archive_T = "2:00:00" + run_Q = f"SBATCH --constraint={answerdict['processor']}" + run_P = f"SBATCH --nodes={self.nodes} --ntasks-per-node={envdict['n_CPUs']}" + run_FP = f"SBATCH --nodes={self.nodes} --ntasks-per-node={envdict['n_CPUs']}" + post_Q = f"SBATCH --constraint={answerdict['processor']}" + plot_Q = f"SBATCH --constraint={answerdict['processor']}" + move_Q = "SBATCH --partition=datamove" + archive_Q = "SBATCH --partition=datamove" + post_P = f"SBATCH --nodes={NPCUS} --ntasks-per-node={envdict['n_CPUs']}" + plot_P = f"SBATCH --nodes=4 --ntasks=4" + archive_P = "SBATCH --ntasks=1" + move_P = "SBATCH --ntasks=1" + boundary_path = "/discover/nobackup/projects/gmao" + bcs_dir = f"{boundary_path}bcs_shared/fvInput/ExtData/esm/tiles/{self.land.bcs}" + replay_ana_expID = "x0039" + replay_ana_location = f"{boundary_path}/g6dev/ltakacs/x0039" + M2_replay_ana_location = f"{boundary_path}/merra2/data" + + + # define location of SST Boundary Conditions + oceanres = f"{self.ocean.IM}x{self.ocean.JM}" + if oceanres == "1440x720": + sst_dir = f"{os.environ.get('SHARE')}/gmao_ops/fvInput/g5gcm/bcs/SST/{self.ocean.IM}x{self.ocean.JM}" + else: + sst_dir = f"{os.environ.get('SHARE')}/gmao_ops/fvInput/g5gcm/bcs/realtime/{self.ocean.sst_name}/{self.ocean.IM}x{self.ocean.JM}" + if self.ocean.gridtype_abrv == "LL": + sst_dir = "/discover/nobackup/estrobac/geos5/SSTDIR" + + chem_dir = f"{os.environ.get('SHARE')}/gmao_ops/fvInput_nc3" + work_dir = f"/discover/nobackup/{os.environ.get('LOGNAME')}" + gwdrs_dir = f"{boundary_path}/osse2/stage/BCS_FILES/GWD_RIDGE" + + # Coupled Ocean/Atmos Forcing + if self.ocean.name == "MIT": + coupled_dir = "/gpfsm/dnb32/estrobac/geos5/GRIDDIR" + else: + coupled_dir = f"{boundary_path}/bcs_shared/make_bcs_inputs/ocean" + + + elif envdict['site'] == "AWS" or envdict['SITE'] == "Azure": + batch_cmd = "sbatch" + batch_group = "#DELETE" + batch_time = "SBATCH --time=" + batch_jobname = "SBATCH --job-name=" + batch_outputname = "SBATCH --output=" + batch_joinouterr = "DELETE" + run_FT = "06:00:00" + run_T = "12:00:00" + post_T = "8:00:00" + plot_T = "12:00:00" + archive_T = "1:00:00" + run_Q = f"SBATCH --constraint={answerdict['processor']}" + run_P = f"SBATCH --nodes={self.nodes} --ntasks-per-node={envdict['n_CPUs']}" + run_FP = f"SBATCH --nodes={self.nodes} --ntasks-per-node={envdict['n_CPUs']}" + post_Q = "NULL" + plot_Q = "NULL" + move_Q = "NULL" + archive_Q = "NULL" + post_P = f"SBATCH --ntasks={post_npes}" + plot_P = f"SBATCH --nodes=4 --ntasks=4" + archive_P = "SBATCH --ntasks=1" + move_P = "SBATCH --ntasks=1" + boundary_path = "/ford1/share/gmao_SIteam/ModelData" + bcs_dir = f"{boundary_path}/bcs/{self.land.bcs}_{self.ocean.tag}" + replay_ana_expID = "REPLAY_UNSUPPORTED" + replay_ana_location = "REPLAY_UNSUPPORTED" + M2_replay_ana_location = "REPLAY_UNSUPPORTED" + sst_dir = f"{boundary_path}/{self.ocean.sst_name}/{self.ocean.IM}x{self.ocean.JM}" + chem_dir = f"{boundary_path}/fvInput_nc3" + work_dir = os.environ.get('HOME') + gwdrs_dir = f"{boundary_path}/GWD_RIDGE" + coupled_dir = f"{boundary_path}/aogcm" + + else: + # These are defaults for the desktop + batch_cmd = "sbatch" + batch_group = "SBATCH --account=" + batch_time = "SBATCH --time=" + batch_jobname = "SBATCH --job-name=" + batch_outputname = "SBATCH --output=" + batch_joinouterr = "DELETE" + run_FT = "06:00:00" + run_T = "12:00:00" + post_T = "8:00:00" + plot_T = "12:00:00" + archive_T = "1:00:00" + run_Q = "NULL" + run_P = "NULL" + run_FP = "NULL" + post_Q = "NULL" + plot_Q = "NULL" + move_Q = "NULL" + archive_Q = "NULL" + post_P = "NULL" + plot_P = "NULL" + archive_P = "NULL" + move_P = "NULL" + boundary_path = "/ford1/share/gmao_SIteam/ModelData" + bcs_dir = f"{boundary_path}/bcs/{self.land.bcs} /{self.land.bcs}_{self.ocean.tag}" + replay_ana_expID = "REPLAY_UNSUPPORTED" + replay_ana_location = "REPLAY_UNSUPPORTED" + M2_replay_ana_location = "REPLAY_UNSUPPORTED" + sst_dir = f"{boundary_path}/{self.ocean.sst_name}/{self.ocean.IM}x{self.ocean.JM}" + chem_dir = f"{boundary_path}/fvInput_nc3" + work_dir = os.environ.get('HOME') + gwdrs_dir = f"{boundary_path}/GWD_RIDGE" + coupled_dir = f"{boundary_path}/aogcm" + + # By default on desktop, just ignore IOSERVER for now + self.atmos.NX = 1 + self.atmos.NY = 6 + answerdict["io_server"] = False + self.n_oserver_nodes = 0 + self.n_backend_pes = 0 + + ''' + def set_hist_temp(self): + tmphist_d, tmphist_path = tempfile.mkstemp() + print(self.ocean.history_template) + shutil.copy(self.ocean.history_template, tmphist_path) + return tmphist_d, tmphist_path + ''' + + ''' + mainly used to create .{*}root files and/or populate them + ''' + def create_dotfile(self, path, content): + try: + path = Path(path) + path.parent.mkdir(parents=True, exist_ok=True) + path.touch() + with open(path, 'w') as file: + file.write(os.path.dirname(content)) + except Exception as e: + print(f"An error occurred while creating directory: {str(e)}") + exit(1) + + + ####################################################################### + # Copy Model Executable and RC Files to Experiment Directory + ####################################################################### + def RC_setup(self): + + # Make the experiment directory and the RC directory inside of it + RC_dir = os.path.join(self.exp_dir, 'RC') + + # Delete the destination directory if it exists + if os.path.exists(RC_dir): + shutil.rmtree(RC_dir) + + # Copy over all files and subdirs in install/etc, keeping symlinks, and ignoring *.tmpl files + shutil.copytree(pathdict['etc'], RC_dir, symlinks=True, ignore=shutil.ignore_patterns('*.tmpl')) + + # Copy or symlink GEOSgcm.x (((IGNORE SINGULARITY/NATIVE BUILDS FOR NOW!!))) + geosgcmx_path = os.path.join(pathdict['bin'], 'GEOSgcm.x') + if linkx == True: + os.symlink(geosgcmx_path, os.path.join(self.exp_dir, 'GEOSgcm.x')) + else: + shutil.copy(geosgcmx_path, self.exp_dir) + + ####################################################################### + # Set Recommended MPI Stack Settings + ####################################################################### + def mpistacksettings(self): + + # load mpi config from YAML + with open('mpi_config.yaml') as file: + mpidict = yaml.load(file, Loader=yaml.FullLoader) + + # retrieve config from correlating mpi setting being used + mpi_config = mpidict.get(envdict['mpi']) + + print(mpi_config) + + + + + +mymodel = model() +mymodel.config_models() +#mymodel.print_all_vars() +mymodel.set_nodes() +mymodel.set_stuff() +mymodel.create_dotfile(f"{os.environ.get('HOME')}/.HOMDIRroot", answerdict['home_dir'].q_answer) +mymodel.create_dotfile(f"{os.environ.get('HOME')}/.EXPDIRroot", answerdict['exp_dir'].q_answer) +mymodel.create_dotfile(f"{os.environ.get('HOME')}/.GROUProot", answerdict['group_root'].q_answer) +mymodel.RC_setup() +mymodel.mpistacksettings() diff --git a/gcmpy/scripts/ocean.py b/gcmpy/scripts/ocean.py new file mode 100644 index 00000000..bf53153f --- /dev/null +++ b/gcmpy/scripts/ocean.py @@ -0,0 +1,257 @@ +from env import answerdict +from utility import color +from datetime import date + +class ocean: + def __init__(self): + self.name = answerdict["OM_name"].q_answer + self.coupled = answerdict["OM_coupled"].q_answer + self.gridtype = "" + self.gridtype_abrv = "" + self.gridname = "" + self.data = "" + self.preload = "" + self.history_template = answerdict["history_template"].q_answer + self.IM = None + self.JM = None + self.LM = answerdict["OM_vertical_res"].q_answer + self.IMO = None + self.JMO = None + self.res = "" + self.tag = "Reynolds" + self.sst_name = "" + self.sst_file = "" + self.ice_file = "" + self.kpar_file = "" + self.ostia = "" + self.out = "" + self.NX = None + self.NY = None + self.NF = None + self.latlon = "" + self.cube = "" + self.n_procs = None + self.MOM5 = "" + self.MOM6 = "" + self.MIT = "" + self.mpt_shepherd = "" + + # for debugging purposes + def print_vars(self): + all_vars = vars(self) + for var_name, var_value in all_vars.items(): + print(f"{color.CYAN}{var_name}: {var_value}{color.RESET}") + + def set_IMO(self): + self.IMO = f"{str(self.IM):04}" + + def set_JMO(self): + self.JMO = f"{str(self.JM):04}" + + def set_res(self): + hres = answerdict["OM_horizontal_res"].q_answer + if self.coupled == False and hres == "CS": + self.res = f"{self.gridtype_abrv}{self.IMO}x6C" + elif self.coupled == False: + self.res = f"{self.gridtype_abrv}{self.IMO}xPE{self.JMO}" + elif self.coupled == True: + self.res = f"{self.gridtype_abrv}{self.IMO}x{self.gridtype_abrv}{self.JMO}" + # Testing at NAS shows that coupled runs *require* MPI_SHEPHERD=true + # to run. We believe this is due to LD_PRELOAD. For now we only set + # this for coupled runs. + self.mpt_shepherd = "setenv MPI_SHEPHERD true" + + def set_gridname(self): + if self.gridtype_abrv == "CF": + self.gridname = f"OC{self.IM}x{self.JM}-{self.gridtype_abrv}" + elif self.name == "MIT": + self.gridname = f"{self.gridtype_abrv}{self.IM}x{self.JM}-{self.gridtype_abrv}" + else: + self.gridname = f"PE{self.IM}x{self.JM}-{self.gridtype_abrv}" + + def set_kpar_file(self): + self.kpar_file = f"SEAWIFS_KPAR_mon_clim.{self.IM}x{self.JM}" + + def coupled_hres(self): + match self.name: + case "MOM5": + self.name = "MOM" + self.preload = "env LD_PRELOAD=$GEOSDIR/lib/libmom.dylib" + mom5_warning = ( + ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n" + "You have chosen to set up a coupled model experiment with MOM5.\n" + "Be aware that such a set up is _known_ to have problems. See following for more details:\n" + "https://github.com/GEOS-ESM/MOM5/issues/19\n" + "If your intent is to help _fix_ above issue, your help is much appreciated. Thank you and good luck!\n" + "Otherwise, until this above issue is _fixed_ you are on your own with above choice.\n" + "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<" + ) + print(color.GREEN + mom5_warning + color.RESET) + case "MOM6": + self.preload = "env LD_PRELOAD=$GEOSDIR/lib/libmom6.dylib" + case "MIT": + self.gridtype_abrv = "llc" + + match self.name: + case "MIT": + match answerdict["OM_MIT_horizontal_res"].q_answer: + case "cs32": + self.JM = 32 + self.IM = self.JM * 6 + self.gridtype_abrv = "CM" + case "llc90": + self.gridtype_abrv = "LL" + if answerdict["AM_horizontal_res"].q_answer == "c48": + self.JM = 30 + self.IM = self.JM * 96 + else: + self.JM = 15 + self.IM = self.JM * 360 + case "llc1080": + self.gridtype_abrv = "LL" + self.JM = 60 + self.IM = self.JM * 2880 + case "llc2160": + self.gridtype_abrv = "LL" + self.JM = 72 + self.IM = self.JM * 7776 + case "MOM", "MOM6": + temp = answerdict["OM_MOM_horizontal_res"].q_answer.split() + self.IM = int(temp[0]) + self.JM = int(temp[1]) + self.gridtype = "TM" + + + def coupled_vres(self): + if answerdict["AM_horizontal_res"].q_answer == "c12": + self.NX = 3 + self.NY = 2 + else: + self.NX = 36 + self.NY = 10 + + self.n_procs = self.NX*self.NY + + match self.name: + case "MOM", "MOM6": + self.gridtype = "Tripolar" + case "MIT": + if self.gridtype_abrv == "CM": + self.NX = 6 + self.NY = 1 + else: + match answerdict["AM_horizontal_res"].q_answer: + case "c48": + self.NX = 96 + self.NY = 1 + case "c90": + self.NX = 360 + self.NY = 1 + case "c720": + self.NX = 2880 + self.NY = 1 + case "c1440": + self.NX = 7776 + self.NY = 1 + + def uncoupled_hres(self): + todays_date = date.today() + match answerdict["OM_horizontal_res"].q_answer: + case "o1": + temp_res = "360 180" + self.IM, self.JM = map(int, temp_res.split()) + self.gridtype = "LatLon" + self.NF = 1 + self.tag = "Reynolds" + self.sst_name = "SST" + self.out = "c" + self.sst_file = f"dataoceanfile_MERRA_sst_1971-current.{self.IM}x{self.JM}.LE" + self.ice_file = f"dataoceanfile_MERRA_fraci_1971-current.{self.IM}x{self.JM}.LE" + self.set_kpar_file() + self.gridtype_abrv = "DE" + self.latlon = "" + self.cube = "#DELETE" + self.ostia = "#DELETE" + self.data = "" + case "o2": + temp_res = "1440 720" + self.IM, self.JM = map(int, temp_res.split()) + self.gridtype = "LatLon" + self.NF = 1 + self.tag = "MERRA-2" + self.sst_name = "MERRA2" + self.out = "e" + self.sst_file = f"dataoceanfile_MERRA2_SST.{self.IM}x{self.JM}.{todays_date.year}.data" + self.ice_file = f"dataoceanfile_MERRA2_ICE.{self.IM}x{self.JM}.{todays_date.year}.data" + self.set_kpar_file() + self.gridtype_abrv = "DE" + self.latlon = "" + self.cube = "#DELETE" + self.ostia = "" + self.data = "" + case "o3": + temp_res = "2880 1440" + self.IM, self.JM = map(int, temp_res.split()) + self.gridtype = "LatLon" + self.NF = 1 + self.tag = "Ostia" + self.sst_name = "OSTIA_REYNOLDS" + self.out = "f" + self.sst_file = f"dataoceanfile_OSTIA_REYNOLDS_SST.{OGCM_IM}x{OGCM_JM}.{todays_date.year}.data" + self.ice_file = f"dataoceanfile_OSTIA_REYNOLDS_ICE.{OGCM_IM}x{OGCM_JM}.{todays_date.year}.data" + self.set_kpar_file() + self.gridtype_abrv = "DE" + self.latlon = "" + self.cube = "#DELETE" + self.ostia = "" + self.data = "" + case "CS": + if int(answerdict["AM_horizontal_res"].q_answer[1:]) >= 90: + self.IM = int(answerdict["AM_horizontal_res"].q_answer[1:]) + self.JM = self.IM * 6 + self.gridtype = "Cubed-Sphere" + self.NF = 6 + self.tag = "Ostia" + self.sst_name = "OSTIA_REYNOLDS" + self.out = "f" + self.sst_file = f"dataoceanfile_OSTIA_REYNOLDS_SST.{self.IM}x{self.JM}.{todays_date.year}.data" + self.ice_file = f"dataoceanfile_OSTIA_REYNOLDS_ICE.{self.IM}x{self.JM}.{todays_date.year}.data" + self.set_kpar_file() + self.gridtype_abrv = "CF" + self.latlon = "#DELETE" + self.cube = "" + self.ostia = "" + self.data = "#DELETE" + else: + print(color.RED + "ERROR: Cubed-Sphere Ocean with " + color.BLUE + \ + answerdict["AM_horizontal_res"].q_answer + color.RED + \ + " is not currently supported. Must be c90 or higher!") + exit(1) + + self.set_IMO() + self.set_JMO() + self.set_res() + self.LM = 34 + self.model = f"Data Ocean ({answerdict['AM_horizontal_res'].q_answer})" + self.coupled = "#DELETE" + self.MOM5 = "#DELETE" + self.MOM6 = "#DELETE" + self.MIT = "#DELETE" + + + + # ocean model driver + def config(self): + match answerdict["OM_coupled"].q_answer: + case True: + self.coupled_hres() + self.coupled_vres() + case False: + self.uncoupled_hres() + + + + + + + diff --git a/gcmpy/scripts/process_questions.py b/gcmpy/scripts/process_questions.py new file mode 100644 index 00000000..00411572 --- /dev/null +++ b/gcmpy/scripts/process_questions.py @@ -0,0 +1,232 @@ +import yaml, re, os +from generate_question import generateQuestion +from utility import envdict, pathdict, color + +""" +This class handles special cases where a question"s properties need to be checked/dynamically +changed at runtime based on certain conditions (e.g. input validation) +""" +class handle: + + @staticmethod + def select_type(answerdict, i): + if answerdict[i].q_type == "select" and answerdict[i].q_answer != None: + # as of right now, we only want the first word of every select-type question. + # If that changes it's probably best to delete this function. + answerdict[i].q_answer = answerdict[i].q_answer.split(None, 1)[0] + + @staticmethod + def experiment_desc(answerdict, i): + if i == "experiment_description" and answerdict[i].q_answer != None: + while answerdict["experiment_description"].q_answer == answerdict["experiment_id"].q_answer: + print(color.RED + "The experiment description must be different from the ID!") + answerdict[i].load_question(answerdict) + + @staticmethod + def processor_choices(answerdict, i): + if i == "processor": + if envdict["site"] == "NCCS": + answerdict[i].q_choices = ["Skylake", "Cascade Lake"] + elif envdict["site"] == "NAS": + answerdict[i].q_choices = ["Skylake", "Haswell", "Broadwell", "Cascade Lake", "AMD Rome"] + else: + exit(1) + + print(color.GREEN + "NOTE Due to how FV3 is compiled by default, Sandy Bridge\n" + \ + "and Ivy Bridge are not supported by current GEOS" + color.RESET) + + @staticmethod + def MIT_hres_choices(answerdict, i): + # This is a dumb case, but these are the only known ocean resolutions that work + # with these atmosphere resolutions, so we will only give the users these choices + if i == "OM_MIT_horizontal_res": + if answerdict["AM_horizontal_res"].q_answer == "c720": + answerdict[i].q_choices = ["llc1080 (1/12-deg, Lat-Lon-Cube)"] + elif answerdict["AM_horizontal_res"].q_answer == "c1440": + answerdict[i].q_choices = ["llc2160 (1/24-deg, Lat-Lon-Cube)"] + + @staticmethod + def MOM_hres_default(answerdict, i): + if i == "OM_MOM_horizontal_res" and \ + answerdict["OM_name"].q_answer == "MOM6" and \ + answerdict["AM_horizontal_res"].q_answer == "c12": + answerdict[i].q_default = "72 36" + + @staticmethod + def OM_hres_valid(answerdict, i): + if i == "OM_MOM_horizontal_res" and answerdict[i].q_answer != None: + #input validation using regex + while not re.match(r"^\d+\s\d+$", answerdict[i].q_answer): + print(color.RED + "please enter exactly 2 numbers separated by a space! (int int)\n") + answerdict[i].load_question(answerdict) + + @staticmethod + def heartbeat_default(answerdict, i): + if i == "heartbeat": + ''' + Default heartbeat is determined by atmospheric resolution. + Of course, this just the recommended value. The user can + enter whatever value they like + ''' + heartbeat = "" + match answerdict["AM_horizontal_res"].q_answer: + case "c12" | "c24" | "c48" | "c90": + heartbeat = "1800" + case "c180": + heartbeat = "900" + case "c360": + heartbeat = "450" + case "c720": + heartbeat = "225" + case "c1440": + heartbeat = "75" + case "c5760": + heartbeat = "30" + case "c270" | "c540": + heartbeat = "600" + case "c1080": + heartbeat = "150" + case "c1536": + heartbeat = "75" + case "c2160": + heartbeat = "60" + + + # Per W. Putman recommendation, set heartbeat to 450s anytime BACM_1M is selected + # ((IMPORTANT: default must be type string)) + if answerdict["AM_microphysics"].q_answer == "BACM_1M": + heartbeat = "450" + + answerdict[i].q_default = heartbeat + + + @staticmethod + def heartbeat_valid(answerdict, i): + if i == "heartbeat": + # input validation using regex + while not re.match(r"^\d+$", answerdict[i].q_answer): + print(f"{color.RED}please enter exactly 1 number!{color.RESET}") + answerdict[i].load_question(answerdict) + + @staticmethod + def history_template_default(answerdict, i): + if i == "history_template": + + match answerdict["OM_name"]: + case "MOM5": + answerdict[i].q_default = f"{pathdict['etc']}/HISTORY.AOGCM-MOM5.rc.tmpl" + case "MOM6": + answerdict[i].q_default = f"{pathdict['etc']}/HISTORY.AOGCM.rc.tmpl" + case "MIT": + answerdict[i].q_default = f"{pathdict['etc']}/HISTORY.AOGCM_MITgcm.rc.tmpl" + case _: + answerdict[i].q_default = f"{pathdict['etc']}/HISTORY.AGCM.rc.tmpl" + + + @staticmethod + def history_template_valid(answerdict, i): + if i == "history_template": + while not os.path.exists(answerdict[i].q_answer): + print(f"Error: Could not find {color.RED}{answerdict[i]}{color.RESET}") + answerdict[i].load_question(answerdict) + + + @staticmethod + def exp_dir_default(answerdict, i): + if i == "home_dir" or i == "exp_dir": + root = f"{os.environ.get('HOME')}/.{i[:3].upper()}DIRroot" + if os.path.exists(root): + try: + print("here") + with open(root, "r") as file: + answerdict[i].q_default = f"{file.read()}/{answerdict['experiment_id'].q_answer}" + except Exception as e: + print(f"An error occurred while reading {color.BLUE}.HOMDIRroot{color.RESET}: {str(e)}") + elif envdict['site'] in ['NAS','NCCS']: + answerdict[i].q_default = f"/{'discover/' if envdict['site'] == 'NCCS' else ''}nobackup/{os.environ.get('LOGNAME')}/{answerdict['experiment_id'].q_answer}" + else: + answerdict[i].q_default = f"{os.environ.get('HOME')}/{answerdict['experiment_id']}" + + + @staticmethod + def exp_dir_valid(answerdict, i): + if i == "home_dir" or i == "exp_dir": + while os.path.basename(answerdict[i].q_answer) != answerdict['experiment_id'].q_answer: + print(f"{color.RED}This directory MUST point to the experiment ID: {color.BLUE}{answerdict['experiment_id'].q_answer}{color.RED}!{color.RESET}") + answerdict[i].load_question(answerdict) + + + @staticmethod + def group_root_default(answerdict, i): + if i == "group_root": + groups = subprocess.check_output('groups', shell=True).decode('utf-8').strip() + answerdict[i].q_default = groups.split()[0] + + + + + +# open yaml file and create dictionary from it's contents +def load_yamls(): + + # list of question files (*MAKE SURE THEY ARE IN THE ORDER YOU WANT THEM TO BE ASKED*) + file_list = ["exp_setup.yaml", \ + "atmospheric_model.yaml", \ + "ocean_model.yaml", \ + "land_model.yaml", \ + "gocart.yaml", \ + "directory_setup.yaml"] + all_yaml_questions = {} + + for filename in file_list: + try: + with open(filename, "r") as file: + yaml_questions = yaml.safe_load(file) + all_yaml_questions.update(yaml_questions) + except IOError: + print(f"{color.RED}YAML file '{filename}' could not be located. Exiting.") + exit(1) + + return all_yaml_questions + +# actual driver for questionary questions +def process(): + answerdict = {} + yaml_questions = load_yamls() + + # creates a dictionary of question:answer pairs + for i in yaml_questions: + temp = generateQuestion(i, yaml_questions[i]["type"], \ + yaml_questions[i]["prompt"], \ + yaml_questions[i]["choices"], \ + yaml_questions[i]["default_answer"],\ + yaml_questions[i]["follows_up"]) + + answerdict[i] = temp + + + # if the question properties need to dynamically change at + # runtime call handle function BEFORE load_question() + handle.processor_choices(answerdict,i) + handle.MIT_hres_choices(answerdict, i) + handle.MOM_hres_default(answerdict, i) + handle.heartbeat_default(answerdict, i) + handle.history_template_default(answerdict, i) + handle.exp_dir_default(answerdict, i) + + # prompts the user with the question + answerdict[i].load_question(answerdict) + + # input validation and other post processing goes here, + # AFTER load_question() call + handle.experiment_desc(answerdict, i) + handle.OM_hres_valid(answerdict, i) + handle.heartbeat_valid(answerdict, i) + handle.history_template_valid(answerdict, i) + handle.exp_dir_valid(answerdict, i) + + # strips the first word from every select type question + handle.select_type(answerdict, i) + + return answerdict + diff --git a/gcmpy/scripts/test_generate_question.py b/gcmpy/scripts/test_generate_question.py new file mode 100644 index 00000000..b4a4219d --- /dev/null +++ b/gcmpy/scripts/test_generate_question.py @@ -0,0 +1,9 @@ +import unittest +import generate_question as gq + +class testGenereateQuestion(unittest.TestCase): + def test_should_ask(self): + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/gcmpy/scripts/utility.py b/gcmpy/scripts/utility.py new file mode 100644 index 00000000..9ce7295d --- /dev/null +++ b/gcmpy/scripts/utility.py @@ -0,0 +1,66 @@ +import os, sys, platform + +# pretty font +class color: + PURPLE = '\033[95m' + CYAN = '\033[96m' + DARKCYAN = '\033[36m' + BLUE = '\033[94m' + GREEN = '\033[92m' + YELLOW = '\033[93m' + RED = '\033[91m' + BOLD = '\033[1m' + UNDERLINE = '\033[4m' + END = '\033[0m' + RESET = '\033[0m' + + # accepts any string and styles it + def color_path(path): + return color.BLUE + color.BOLD + path + color.RESET + + def color_file(file): + return color.GREEN + color.BOLD + file + color.RESET + +# raises expections +class exceptions: + # Displays usage information to user + def print_usage(): + print(color.GREEN + pathdict['SCRIPTNAME'] + ", a setup script for the GEOS GCM\n\n" + \ + "\tUsage: " + pathdict['SCRIPTNAME'] + " [optional flag]\n\n" + \ + "\t --link Link GEOSgcm.x into experiment directory\n" + \ + "\t --singularity Set up Singularity experiment\n" + \ + "\t -h --help Show usage" + \ + "\n\nIf invoked alone, the script runs as normal." + \ + "\nFor more information, please contact Matt Thompson, Scott Rabenhorst, or Shayon Shakoorzadeh.\n") + exit(1) + + # Display misconfiguration messages to the user + def raise_user_exception(msg): + print(msg) + exit(1) + + def raise_fatal_exception(msg): + exceptions.raiseuserexception(msg) + exit(1) + + # This function will clean output files if script is interrupted + def cleanup(): + pass + + + +####################################################################### +# Directory and Environment Variable Locations Dictionaries +####################################################################### +# set up envirnoment dictionary for later +envdict = {} +pathdict = {} # Start empty - cannot reference self before initialization +pathdict['cwd'] = os.getcwd() +pathdict['bin'] = os.path.dirname(pathdict['cwd']) +pathdict['install'] = os.path.dirname(pathdict['bin']) +pathdict['etc'] = os.path.join(pathdict['install'], 'etc') +pathdict['GEOSgcm'] = os.path.dirname(pathdict['install']) +pathdict['build'] = os.path.join(pathdict['GEOSgcm'], 'build') +#pathdict['SCRIPT'] = os.path.realpath(__file__) +#pathdict['SCRIPTNAME'] = os.path.split(pathdict['SCRIPT'])[-1] +#pathdict['PY_METHOD'] = os.path.join(pathdict['BIN'], 'py_method') From 6361d50d09d4ad40858b97a0590645498a257c94 Mon Sep 17 00:00:00 2001 From: Shayon Shakoorzadeh Date: Tue, 9 Apr 2024 17:49:53 -0400 Subject: [PATCH 04/11] Add yaml files via upload --- gcmpy/yaml/atmospheric_model.yaml | 50 ++++++++++++++++++ gcmpy/yaml/directory_setup.yaml | 30 +++++++++++ gcmpy/yaml/exp_setup.yaml | 32 ++++++++++++ gcmpy/yaml/gocart.yaml | 13 +++++ gcmpy/yaml/land_model.yaml | 14 +++++ gcmpy/yaml/mpi_config.yaml | 85 +++++++++++++++++++++++++++++++ gcmpy/yaml/ocean_model.yaml | 66 ++++++++++++++++++++++++ 7 files changed, 290 insertions(+) create mode 100644 gcmpy/yaml/atmospheric_model.yaml create mode 100644 gcmpy/yaml/directory_setup.yaml create mode 100644 gcmpy/yaml/exp_setup.yaml create mode 100644 gcmpy/yaml/gocart.yaml create mode 100644 gcmpy/yaml/land_model.yaml create mode 100644 gcmpy/yaml/mpi_config.yaml create mode 100644 gcmpy/yaml/ocean_model.yaml diff --git a/gcmpy/yaml/atmospheric_model.yaml b/gcmpy/yaml/atmospheric_model.yaml new file mode 100644 index 00000000..9769d35f --- /dev/null +++ b/gcmpy/yaml/atmospheric_model.yaml @@ -0,0 +1,50 @@ +AM_horizontal_res: + type: 'select' + prompt: 'Select the atmospheric models HORIZONTAL resolution:' + choices: ['c12 -- 8 deg (750.0 km)', 'c24 -- 4 deg (375.0 km)', 'c48 -- 2 deg (187.5 km)', 'c90 -- 1 deg (100.0 km)', 'c180 -- 1/2 deg ( 50.0 km)', 'c360 -- 1/4 deg ( 25.0 km)', 'c720 -- 1/8 deg ( 12.5 km)', 'c1440 - 1/16 deg ( 6.25 km)', 'c2880 - 1/32 deg ( 3.125 km)', 'c5760 - 1/64 deg ( 1.5625 km)','c270 -- (16.0 -100 km)', 'c540 -- ( 8.0 - 50 km)', 'c1080 - ( 4.0 - 25 km)', 'c1536 - ( 2.0 - 20 km)', 'c2160 - ( 2.0 - 12 km)'] + default_answer: '' + follows_up: '' + +AM_vertical_res: + type: 'select' + prompt: 'Enter the atmospheric models VERTICAL resolution:' + choices: ['72', '91', '137', '181'] + default_answer: '' + follows_up: '' + +AM_microphysics: + type: 'select' + prompt: 'Select the Atmospheric Model Microphysics:' + choices: ['BACM_1M -- 3-phase 1-moment Bacmeister et al', 'GFDL_1M -- 6-phase 1-moment Geophysical Fluid Dynamics Laboratory', 'MGB2_2M -- 5 or 6-phase 2-moment Morrison & Gettleman'] + default_answer: '' + follows_up: '' + +heartbeat: + type: 'text' + prompt: 'Enter a number (in seconds) for HEARTBEAT_DT (press ENTER for recommended value):' + choices: '' + default_answer: '' + follows_up: '' + +use_hydrostatic: + type: 'confirm' + prompt: 'Use Hydrostatic Amtosphere?' + choices: '' + default_answer: True + follows_up: '' + +io_server: + type: 'confirm' + prompt: 'Would you like to IOSERVER?' + choices: '' + default_answer: 'True' + follows_up: '' + + +#combine these two questions and just change "choices:" based on site +processor: + type: 'select' + prompt: 'Enter the Processor Type you wish to run on:' + choices: '' + default_answer: '' + follows_up: '' diff --git a/gcmpy/yaml/directory_setup.yaml b/gcmpy/yaml/directory_setup.yaml new file mode 100644 index 00000000..c4e7456e --- /dev/null +++ b/gcmpy/yaml/directory_setup.yaml @@ -0,0 +1,30 @@ +history_template: + type: 'path' + prompt: 'Enter the location of HISTORY template to use (press ENTER for recommended template):' + choices: '' + default_answer: '' + follows_up: '' + +home_dir: + type: 'path' + prompt: 'Enter Desired Location for the HOME Directory (to contain scripts and RC files):' + choices: '' + default_answer: '' + follows_up: '' + +exp_dir: + type: 'path' + prompt: 'Enter Desired Location for the EXPERIMENT Directory (to contain model output and restart files):' + choices: '' + default_answer: '' + follows_up: '' + +group_root: + type: 'text' + prompt: 'Enter your GROUP ID for Current EXP:' + choices: '' + default_answer: '' + follows_up: '' + + + diff --git a/gcmpy/yaml/exp_setup.yaml b/gcmpy/yaml/exp_setup.yaml new file mode 100644 index 00000000..82404a5d --- /dev/null +++ b/gcmpy/yaml/exp_setup.yaml @@ -0,0 +1,32 @@ +experiment_id: + type: 'text' + prompt: 'Enter the experiment ID:' + choices: '' + default_answer: '' + follows_up: '' + +experiment_description: + type: 'text' + prompt: 'Enter a 1-line Experiment Description:' + choices: '' + default_answer: '' + follows_up: '' + +clone_experiment: + type: 'confirm' + prompt: 'Would you like to CLONE an old experiment?' + choices: '' + default_answer: False + follows_up: '' + +clone_experiment_path: + type: 'path' + prompt: 'Enter the location of the experiment you wish to clone (where gcm_run.j is located):' + choices: '' + default_answer: '' + follows_up: + - ['clone_experiment', [True]] + + + + diff --git a/gcmpy/yaml/gocart.yaml b/gcmpy/yaml/gocart.yaml new file mode 100644 index 00000000..f7c681ff --- /dev/null +++ b/gcmpy/yaml/gocart.yaml @@ -0,0 +1,13 @@ +gocart_aerosols: + type: 'select' + prompt: 'Do you wish to run GOCART with Actual or Climatological Aersols?' + choices: ['Actual', 'Climatological'] + default_answer: '' + follows_up: '' + +gocart_emission: + type: 'select' + prompt: 'Select the GOCART Emission Files to use:' + choices: ['AMIP', 'OPS'] + default_answer: '' + follows_up: '' diff --git a/gcmpy/yaml/land_model.yaml b/gcmpy/yaml/land_model.yaml new file mode 100644 index 00000000..86353a87 --- /dev/null +++ b/gcmpy/yaml/land_model.yaml @@ -0,0 +1,14 @@ +LS_boundary_conditions: + type: 'select' + prompt: 'Select the Land Surface Boundary Conditions:' + choices: ['Icarus', 'Icarus-NLv3'] + default_answer: '' + follows_up: '' + +LS_model: + type: 'select' + prompt: 'Select the Land Surface Model:' + choices: ['Catchment', 'CatchmentCN-CLM4.0 (CN_CLM40)', 'CatchmentCN-CLM4.5 (CN_CLM45)'] + default_answer: '' + follows_up: + - ['LS_boundary_conditions', 'Icarus-NLv3'] diff --git a/gcmpy/yaml/mpi_config.yaml b/gcmpy/yaml/mpi_config.yaml new file mode 100644 index 00000000..df537070 --- /dev/null +++ b/gcmpy/yaml/mpi_config.yaml @@ -0,0 +1,85 @@ +openmpi: | + # Turn off warning about TMPDIR on NFS + setenv OMPI_MCA_shmem_mmap_enable_nfs_warning 0 + setenv OMPI_MCA_mpi_preconnect_all 1 + setenv OMPI_MCA_coll_tuned_bcast_algorithm 7 + setenv OMPI_MCA_coll_tuned_scatter_algorithm 2 + setenv OMPI_MCA_coll_tuned_reduce_scatter_algorithm 3 + setenv OMPI_MCA_coll_tuned_allreduce_algorithm 3 + setenv OMPI_MCA_coll_tuned_allgather_algorithm 4 + setenv OMPI_MCA_coll_tuned_allgatherv_algorithm 3 + setenv OMPI_MCA_coll_tuned_gather_algorithm 1 + setenv OMPI_MCA_coll_tuned_barrier_algorithm 0 + # required for a tuned flag to be effective + setenv OMPI_MCA_coll_tuned_use_dynamic_rules 1 + # disable file locks + setenv OMPI_MCA_sharedfp "^lockedfile,individual" + +mvapich: | + # MVAPICH and GEOS has issues with restart writing. Having the + # oserver write them seems to...work + setenv MV2_ENABLE_AFFINITY 0 + setenv SLURM_DISTRIBUTION block + setenv MV2_MPIRUN_TIMEOUT 100 + setenv MV2_GATHERV_SSEND_THRESHOLD 256 + +mpt: | + setenv MPI_COLL_REPRODUCIBLE + setenv SLURM_DISTRIBUTION block + #setenv MPI_DISPLAY_SETTINGS 1 + #setenv MPI_VERBOSE 1 + setenv MPI_MEMMAP_OFF + unsetenv MPI_NUM_MEMORY_REGIONS + setenv MPI_XPMEM_ENABLED yes + unsetenv SUPPRESS_XPMEM_TRIM_THRESH + setenv MPI_LAUNCH_TIMEOUT 40 + setenv MPI_COMM_MAX 1024 + setenv MPI_GROUP_MAX 1024 + setenv MPI_BUFS_PER_PROC 256 + # For some reason, PMI_RANK is randomly set and interferes + # with binarytile.x and other executables. + unsetenv PMI_RANK + # Often when debugging on MPT, the traceback from Intel Fortran + # is "absorbed" and only MPT's errors are displayed. To allow the + # compiler's traceback to be displayed, uncomment this environment + # variable + #setenv FOR_IGNORE_EXCEPTIONS false + +intelmpi: | + setenv I_MPI_ADJUST_ALLREDUCE 12 + setenv I_MPI_ADJUST_GATHERV 3 + # This flag prints out the Intel MPI state. Uncomment if needed + #setenv I_MPI_DEBUG 9 + +NCCS: | + # These are options determined to be useful at NCCS + # Not setting generally as they are more fabric/cluster + # specific compared to the above adjustments + setenv I_MPI_SHM_HEAP_VSIZE 512 + setenv PSM2_MEMORY large + +BUILT_ON_SLE15: | + # Testing by Bill Putman found these to be + # useful flags with Intel MPI on SLES15 on the + # Milan nodes. + # Note 1: Testing by NCCS shows the PSM3 provider + # runs on the Infiniband fabric. Tests show it runs + # up to C720. + # Note 2: When the Cascade Lakes are moved to + # SLES15, these will need to be Milan-only flags + # as Intel MPI will probably work just fine with + # Intel chips. + setenv I_MPI_FALLBACK 0 + setenv I_MPI_FABRICS ofi + setenv I_MPI_OFI_PROVIDER psm3 + setenv I_MPI_ADJUST_SCATTER 2 + setenv I_MPI_ADJUST_SCATTERV 2 + setenv I_MPI_ADJUST_GATHER 2 + setenv I_MPI_ADJUST_GATHERV 3 + setenv I_MPI_ADJUST_ALLGATHER 3 + setenv I_MPI_ADJUST_ALLGATHERV 3 + setenv I_MPI_ADJUST_ALLREDUCE 12 + setenv I_MPI_ADJUST_REDUCE 10 + setenv I_MPI_ADJUST_BCAST 11 + setenv I_MPI_ADJUST_REDUCE_SCATTER 4 + setenv I_MPI_ADJUST_BARRIER 9 diff --git a/gcmpy/yaml/ocean_model.yaml b/gcmpy/yaml/ocean_model.yaml new file mode 100644 index 00000000..57e321dc --- /dev/null +++ b/gcmpy/yaml/ocean_model.yaml @@ -0,0 +1,66 @@ +# format - loads the questionary API +# +# question name: +# question type: 'text/confirm/select/path' +# question prompt: 'ask your question' +# choices: ['list', 'of', 'choices'] <----------(only for select type, otherwise leave blank) +# default answer: 'default' <----------(optional/can be left blank) +# follows_up: <----------(optional/can be left blank. If you want to +# -['prev question', ['answer1', 'answer2']] conditionally skip a question, add a list of +# -['or other prev question', ['any']] tuples containing the question(s) it will follow up +# and the answer the user has to give in order for +# this question to be asked. Multiple answers can be +# an ask condition, or you can set the answer to 'any' +# which will allow any answer given to show the question.) + +OM_coupled: + type: 'confirm' + prompt: 'Do you wish to run the COUPLED Ocean/Sea-Ice Model?' + choices: '' + default_answer: False + follows_up: '' + +# ------------------------------------- coupled == False -------------------------------------- +OM_horizontal_res: + type: 'select' + prompt: 'Select the Data_Ocean HORIZONTAL resolution:' + choices: ['o1 (1 -deg, 360x180 Reynolds)', 'o2 (1/4-deg, 1440x720 MERRA-2)', 'o3 (1/8-deg, 2880x1440 OSTIA)', 'CS (Cubed-Sphere OSTIA)'] + default_answer: '' + follows_up: + - ['OM_coupled', [False]] +# --------------------------------------------------------------------------------------------- + +# ------------------------------------- coupled == True --------------------------------------- +OM_name: + type: 'select' + prompt: 'Choose an Ocean Model:' + choices: ['MOM5', 'MOM6', 'MIT'] + default_answer: '' + follows_up: + - ['OM_coupled', [True]] + +OM_MOM_horizontal_res: + type: 'text' + prompt: 'Enter the Ocean Lat/Lon Horizontal Resolution (IM JM):' + choices: '' + default_answer: '360 200' + follows_up: + - ['OM_name', ['MOM5', 'MOM6']] + +OM_MIT_horizontal_res: + type: 'select' + prompt: 'Select the Data_Ocean HORIZONTAL resolution:' + choices: ['llc90 (1-deg, Lat-Lon-Cube)', 'llc270 (1/3-deg, Lat-Lon-Cube)', 'cs32 (5-deg, cubed-sphere)'] + default_answer: '' + follows_up: + - ['OM_name', ['MIT']] + +OM_vertical_res: + type: 'text' + prompt: 'Enter the Ocean Model VERTICAL resolution (LM):' + choices: '' + default_answer: '50' + follows_up: + - ['OM_MOM_horizontal_res', ['any']] + - ['OM_MIT_horizontal_res', ['any']] +# ---------------------------------------------------------------------------------------------- \ No newline at end of file From e13a1f5b1facfc44014af5aa1b59d9b46035f006 Mon Sep 17 00:00:00 2001 From: Shayon Shakoorzadeh Date: Wed, 10 Apr 2024 11:54:12 -0400 Subject: [PATCH 05/11] Update gcmpy/CMakeLists.txt Co-authored-by: Matthew Thompson --- gcmpy/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcmpy/CMakeLists.txt b/gcmpy/CMakeLists.txt index 8846312e..f90cd199 100644 --- a/gcmpy/CMakeLists.txt +++ b/gcmpy/CMakeLists.txt @@ -1,6 +1,6 @@ # cp (makes exe) add_subdirectory(scripts) -add_subdirectory(yamls) +add_subdirectory(yaml) set (programs) From 6f84181fe02a73622e4621669478f160fc78c4b5 Mon Sep 17 00:00:00 2001 From: Shayon Shakoorzadeh Date: Wed, 10 Apr 2024 15:17:31 -0400 Subject: [PATCH 06/11] CMakeLists changes --- gcmpy/CMakeLists.txt | 2 +- gcmpy/scripts/CMakeLists.txt | 10 +++++++++- gcmpy/yaml/CMakeLists.txt | 8 +++++++- 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/gcmpy/CMakeLists.txt b/gcmpy/CMakeLists.txt index 8846312e..f90cd199 100644 --- a/gcmpy/CMakeLists.txt +++ b/gcmpy/CMakeLists.txt @@ -1,6 +1,6 @@ # cp (makes exe) add_subdirectory(scripts) -add_subdirectory(yamls) +add_subdirectory(yaml) set (programs) diff --git a/gcmpy/scripts/CMakeLists.txt b/gcmpy/scripts/CMakeLists.txt index 43be2a33..440db4ac 100644 --- a/gcmpy/scripts/CMakeLists.txt +++ b/gcmpy/scripts/CMakeLists.txt @@ -1,5 +1,13 @@ set (programs - #fill + atmosphere.py + env.py + generate_question.py + gocart.py + land.py + model.py + ocean.py + process_questions.py + utility.py ) install ( diff --git a/gcmpy/yaml/CMakeLists.txt b/gcmpy/yaml/CMakeLists.txt index 36e3ed48..92e417d7 100644 --- a/gcmpy/yaml/CMakeLists.txt +++ b/gcmpy/yaml/CMakeLists.txt @@ -1,5 +1,11 @@ set (programs - #fill + atmospheric_model.yaml + directory_setup.yaml + exp_setup.yaml + gocart.yaml + land_model.yaml + mpi_config.yaml + ocean_model.yaml ) install ( From bd64abf0f74aa70e78c7bbb84f6ba024486be874 Mon Sep 17 00:00:00 2001 From: Shayon Shakoorzadeh Date: Wed, 24 Apr 2024 12:11:26 -0400 Subject: [PATCH 07/11] storing to switch branches --- gcmpy/scripts/model.py | 25 +++++++++++++++++++++---- gcmpy/scripts/process_questions.py | 12 ++++++------ gcmpy/scripts/utility.py | 19 +++++++++---------- gcmpy/sync.sh | 8 ++++++++ 4 files changed, 44 insertions(+), 20 deletions(-) create mode 100755 gcmpy/sync.sh diff --git a/gcmpy/scripts/model.py b/gcmpy/scripts/model.py index 8c3209c8..86970dec 100644 --- a/gcmpy/scripts/model.py +++ b/gcmpy/scripts/model.py @@ -369,7 +369,7 @@ def RC_setup(self): shutil.rmtree(RC_dir) # Copy over all files and subdirs in install/etc, keeping symlinks, and ignoring *.tmpl files - shutil.copytree(pathdict['etc'], RC_dir, symlinks=True, ignore=shutil.ignore_patterns('*.tmpl')) + shutil.copytree(pathdict['etc'], RC_dir, symlinks=True, ignore=shutil.ignore_patterns('*.tmpl', 'fvcore.layout.rc')) # Copy or symlink GEOSgcm.x (((IGNORE SINGULARITY/NATIVE BUILDS FOR NOW!!))) geosgcmx_path = os.path.join(pathdict['bin'], 'GEOSgcm.x') @@ -384,7 +384,7 @@ def RC_setup(self): def mpistacksettings(self): # load mpi config from YAML - with open('mpi_config.yaml') as file: + with open('../yaml/mpi_config.yaml') as file: mpidict = yaml.load(file, Loader=yaml.FullLoader) # retrieve config from correlating mpi setting being used @@ -392,8 +392,25 @@ def mpistacksettings(self): print(mpi_config) - - + ####################################################################### + # Create directories and copy files over + ####################################################################### + def copy_files_into_exp(self): + file_list = ['gcm_run.j', \ + 'gcm_post.j', \ + 'gcm_archive.j', \ + 'gcm_regress.j', \ + 'gcm_plot.tmpl', \ + 'gcm_quickplot.csh', \ + 'gcm_moveplot.j', \ + 'gcm_forecast.tmpl', \ + 'gcm_forecast.setup', \ + 'gcm_emip.setup', \ + 'CAP.rc.tmpl', \ + 'AGCM.rc.tmpl', \ + 'HISTORY.rc.tmpl', \ + 'logging.yaml', \ + 'fvcore_layout.rc'] mymodel = model() diff --git a/gcmpy/scripts/process_questions.py b/gcmpy/scripts/process_questions.py index 00411572..0e09cb97 100644 --- a/gcmpy/scripts/process_questions.py +++ b/gcmpy/scripts/process_questions.py @@ -170,12 +170,12 @@ def group_root_default(answerdict, i): def load_yamls(): # list of question files (*MAKE SURE THEY ARE IN THE ORDER YOU WANT THEM TO BE ASKED*) - file_list = ["exp_setup.yaml", \ - "atmospheric_model.yaml", \ - "ocean_model.yaml", \ - "land_model.yaml", \ - "gocart.yaml", \ - "directory_setup.yaml"] + file_list = ["../yaml/exp_setup.yaml", \ + "../yaml/atmospheric_model.yaml", \ + "../yaml/ocean_model.yaml", \ + "../yaml/land_model.yaml", \ + "../yaml/gocart.yaml", \ + "../yaml/directory_setup.yaml"] all_yaml_questions = {} for filename in file_list: diff --git a/gcmpy/scripts/utility.py b/gcmpy/scripts/utility.py index 9ce7295d..7e2fc1c5 100644 --- a/gcmpy/scripts/utility.py +++ b/gcmpy/scripts/utility.py @@ -54,13 +54,12 @@ def cleanup(): ####################################################################### # set up envirnoment dictionary for later envdict = {} -pathdict = {} # Start empty - cannot reference self before initialization -pathdict['cwd'] = os.getcwd() -pathdict['bin'] = os.path.dirname(pathdict['cwd']) -pathdict['install'] = os.path.dirname(pathdict['bin']) -pathdict['etc'] = os.path.join(pathdict['install'], 'etc') -pathdict['GEOSgcm'] = os.path.dirname(pathdict['install']) -pathdict['build'] = os.path.join(pathdict['GEOSgcm'], 'build') -#pathdict['SCRIPT'] = os.path.realpath(__file__) -#pathdict['SCRIPTNAME'] = os.path.split(pathdict['SCRIPT'])[-1] -#pathdict['PY_METHOD'] = os.path.join(pathdict['BIN'], 'py_method') +pathdict = {} # Start empty - cannot reference self before initialization (<--what are you yapping about??) +pathdict['scripts'] = os.getcwd() +pathdict['gcmpy'] = os.path.dirname(pathdict['scripts']) +pathdict['yaml'] = os.path.join(pathdict['gcmpy'], 'yaml') +pathdict['bin'] = os.path.dirname(pathdict['gcmpy']) +pathdict['install'] = os.path.dirname(pathdict['bin']) +pathdict['etc'] = os.path.join(pathdict['install'], 'etc') +pathdict['GEOSgcm'] = os.path.dirname(pathdict['install']) +pathdict['build'] = os.path.join(pathdict['GEOSgcm'], 'build') diff --git a/gcmpy/sync.sh b/gcmpy/sync.sh new file mode 100755 index 00000000..5225d453 --- /dev/null +++ b/gcmpy/sync.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +# Source and destination directories +source_dir="/discover/nobackup/sshakoor/GEOSgcm/src/Applications/@GEOSgcm_App/gcmpy" +destination_dir="/discover/nobackup/sshakoor/GEOSgcm/install/bin/gcmpy" + +# Synchronize scripts +rsync -av --exclude='sync.sh' --exclude='**/CMakeLists.txt' "$source_dir/" "$destination_dir/" From 51e200226075f6abc864251548ceebd14fc84950 Mon Sep 17 00:00:00 2001 From: Shayon Shakoorzadeh Date: Wed, 24 Apr 2024 15:30:18 -0400 Subject: [PATCH 08/11] commit to switch branches --- gcmpy/scripts/model.py | 43 ++++++++++++++++++++++++---------------- gcmpy/scripts/utility.py | 17 ++++++++-------- 2 files changed, 35 insertions(+), 25 deletions(-) diff --git a/gcmpy/scripts/model.py b/gcmpy/scripts/model.py index 86970dec..bfa2fa3f 100644 --- a/gcmpy/scripts/model.py +++ b/gcmpy/scripts/model.py @@ -6,6 +6,7 @@ from utility import envdict, pathdict import math, os, shutil, tempfile, yaml from pathlib import Path +from jinja2 import Environment, FileSystemLoader, StrictUndefined # combines all models (atmos, ocean, land, gocart) into one big one @@ -395,23 +396,12 @@ def mpistacksettings(self): ####################################################################### # Create directories and copy files over ####################################################################### - def copy_files_into_exp(self): - file_list = ['gcm_run.j', \ - 'gcm_post.j', \ - 'gcm_archive.j', \ - 'gcm_regress.j', \ - 'gcm_plot.tmpl', \ - 'gcm_quickplot.csh', \ - 'gcm_moveplot.j', \ - 'gcm_forecast.tmpl', \ - 'gcm_forecast.setup', \ - 'gcm_emip.setup', \ - 'CAP.rc.tmpl', \ - 'AGCM.rc.tmpl', \ - 'HISTORY.rc.tmpl', \ - 'logging.yaml', \ - 'fvcore_layout.rc'] - + def copy_files_into_exp(self, file_list[]): + for file in file_list: + if file[-5:] == '.tmpl': + shutil.copy(f"{pathdict['GEOSgcm_App']}/{file}", f"{self.exp_dir}/{file[:-5]}") + else: + shutil.copy(f"{pathdict['GEOSgcm_App']}/{file}", f"{self.exp_dir}/{file}") mymodel = model() mymodel.config_models() @@ -423,3 +413,22 @@ def copy_files_into_exp(self): mymodel.create_dotfile(f"{os.environ.get('HOME')}/.GROUProot", answerdict['group_root'].q_answer) mymodel.RC_setup() mymodel.mpistacksettings() +file_list['gcm_run.j', \ + 'gcm_post.j', \ + 'gcm_archive.j', \ + 'gcm_regress.j', \ + 'gcm_plot.tmpl', \ + 'gcm_quickplot.csh', \ + 'gcm_moveplot.j', \ + 'gcm_forecast.tmpl', \ + 'gcm_forecast.setup', \ + 'gcm_emip.setup', \ + 'CAP.rc.tmpl', \ + 'AGCM.rc.tmpl', \ + 'HISTORY.rc.tmpl', \ + 'logging.yaml', \ + 'fvcore_layout.rc'] + +mymodel.copy_files_into_exp(file_list) + + diff --git a/gcmpy/scripts/utility.py b/gcmpy/scripts/utility.py index 7e2fc1c5..9d60c84a 100644 --- a/gcmpy/scripts/utility.py +++ b/gcmpy/scripts/utility.py @@ -55,11 +55,12 @@ def cleanup(): # set up envirnoment dictionary for later envdict = {} pathdict = {} # Start empty - cannot reference self before initialization (<--what are you yapping about??) -pathdict['scripts'] = os.getcwd() -pathdict['gcmpy'] = os.path.dirname(pathdict['scripts']) -pathdict['yaml'] = os.path.join(pathdict['gcmpy'], 'yaml') -pathdict['bin'] = os.path.dirname(pathdict['gcmpy']) -pathdict['install'] = os.path.dirname(pathdict['bin']) -pathdict['etc'] = os.path.join(pathdict['install'], 'etc') -pathdict['GEOSgcm'] = os.path.dirname(pathdict['install']) -pathdict['build'] = os.path.join(pathdict['GEOSgcm'], 'build') +pathdict['scripts'] = os.getcwd() +pathdict['gcmpy'] = os.path.dirname(pathdict['scripts']) +pathdict['yaml'] = os.path.join(pathdict['gcmpy'], 'yaml') +pathdict['bin'] = os.path.dirname(pathdict['gcmpy']) +pathdict['install'] = os.path.dirname(pathdict['bin']) +pathdict['etc'] = os.path.join(pathdict['install'], 'etc') +pathdict['GEOSgcm'] = os.path.dirname(pathdict['install']) +pathdict['build'] = os.path.join(pathdict['GEOSgcm'], 'build') +pathdict['GEOSgcm_App'] = os.path.join(pathdict['GEOSgcm'], 'src/Applications/@GEOSgcm_App') From 4a95c497731f0ccd8defbbbe54ceb7616565e691 Mon Sep 17 00:00:00 2001 From: Shayon Shakoorzadeh Date: Wed, 12 Jun 2024 13:17:01 -0400 Subject: [PATCH 09/11] dir setup --- gcmpy/scripts/.model.py.swp | Bin 0 -> 20480 bytes .../__pycache__/atmosphere.cpython-312.pyc | Bin 0 -> 11467 bytes gcmpy/scripts/__pycache__/env.cpython-312.pyc | Bin 0 -> 4668 bytes .../generate_question.cpython-312.pyc | Bin 0 -> 2995 bytes .../__pycache__/gocart.cpython-312.pyc | Bin 0 -> 2472 bytes .../scripts/__pycache__/land.cpython-312.pyc | Bin 0 -> 3150 bytes .../scripts/__pycache__/ocean.cpython-312.pyc | Bin 0 -> 12294 bytes .../process_questions.cpython-312.pyc | Bin 0 -> 12394 bytes .../__pycache__/utility.cpython-312.pyc | Bin 0 -> 3732 bytes gcmpy/scripts/model.py | 106 ++++++++++++++---- gcmpy/scripts/ocean.py | 1 + gcmpy/yaml/directory_setup.yaml | 7 -- gcmpy/yaml/ocean_model.yaml | 10 +- 13 files changed, 94 insertions(+), 30 deletions(-) create mode 100644 gcmpy/scripts/.model.py.swp create mode 100644 gcmpy/scripts/__pycache__/atmosphere.cpython-312.pyc create mode 100644 gcmpy/scripts/__pycache__/env.cpython-312.pyc create mode 100644 gcmpy/scripts/__pycache__/generate_question.cpython-312.pyc create mode 100644 gcmpy/scripts/__pycache__/gocart.cpython-312.pyc create mode 100644 gcmpy/scripts/__pycache__/land.cpython-312.pyc create mode 100644 gcmpy/scripts/__pycache__/ocean.cpython-312.pyc create mode 100644 gcmpy/scripts/__pycache__/process_questions.cpython-312.pyc create mode 100644 gcmpy/scripts/__pycache__/utility.cpython-312.pyc diff --git a/gcmpy/scripts/.model.py.swp b/gcmpy/scripts/.model.py.swp new file mode 100644 index 0000000000000000000000000000000000000000..9828119ed456ce9298323498d949b7ee20b6c8d8 GIT binary patch literal 20480 zcmeHPU5p!76`oSk(9!~G_z{2YjjiHc>e#zU6E%yr>elh@x?QhVwv$b?O{e4W*q&rO z`TdApt56s4e9?cjkJ= zv$LBtN_l`8>9cq4J#+54XYM)oo^$WIF?FO=W(RXq60Ubi(o_FfYNdaC>CR7nP?8*{ zqnxxXo2s_En!2M}>zX~icRMOc%C~zhr<-q@jeYy_f_Z+SSgEzuZeBMXS22uyLpSrd z?+x;fYU@4M$#<=$X5@NRloc?UjQM;Tm5pu|L#crrGEX5{`W(XdcQdNN;HsYAkjdg zfkXp|1`-V<8b~ydXdux*qJcyM|Cbt2nv!$?JwHS7GWP#P{Qu%RCFwEXo4{v*lfX^D zdx6*9AxVz_4+9SYeV_xZ03QKfy-Jdv0UiME1=fHYfUAH%UMWdG1ik_^fhpi};OyJM z6W9P&fZKpufh=$(@YgFO=}F)T;0*8}@CD!)a1*c_V8FG21f0EGlD-3c2Dlrj0q+JL zf14z&10MrsfP=t?fJ=d=cS_RFfzJWQfTO^Nf#)uhq-TN81FOIh;1b|>J0Js~0}H@T z-~}Wqz7O<(n}J=xKarex8Tb-V1FFC^z%xj?`~tWGxEk02(0ll)pSyTt1CoWVH0$;a zS8v-^zsGFtuD)(-4pUraXo}-9cf(>0MLnsRP1e&KDfA>8yD?>-Oa>X7WnIPX;Vh&Z|duMQ){xuAVTan%6Ov~2B9k=QKSn(TIyV$Y36!USKYOgDHm1_JE`MQ z`BlYG`-b9b7t+3`KvQnnmgz$AT%*@RWl=qj$?qx6wQw>fq&m@IUv+{dqx+uGcS58w ziX7KaNLj35Ys$&7_C>AUVuo&NImgA3oQX*`JIhitGpU(aVtQzEzu5J&Y&w=6%yn~Y zC@C12UVA-5cPBC|lV#JBYg5PBZZw?Urd<`<)y=*Z+YkvBX=#?#?JcNBeU8dNoi9|D zmWqXXsj^&)Q$x$Lc?ZxOYw3ozSy7whm{$x(8#liV-R&@|r1=2=AD$WimCCNx z?a^q=9M9b)djwgQu``oQabg;cc_XuIYD`aQwrp*%4UNHXv^&NCT-tqGV^$wNW6VCo zc8M-hN@f! zBmw6}Mi6b^YlgdrF%J-G{yfZoUbo>pp`ES#fY*M+3t<$6US3MKWY0yWg11K2M58bm zBxzO{`I<_tSUpm#dfp9Q(}4@LOn7zG>UQBsaH|+?%f?vg1|9di?0FqWGg>*t?OIN5 z`6x6UzCXrCKvQd>)b;%K1l3P^(xj;vlZbYzr`bB%bA_87jU3MHgw>FpcGrc4&9YYN zbf^hYHDpu#N(FuIRls#USswrlU7p%0Dmg8!!tJ%4FX(=@>31`<- zTxGK)=Vph8BkUTgBh%!0zS0Gc(<8itIvl>;v&dHETCENRaa)&&fG!P&AG~`i+2XM= zSA~HUQgW%xClYqktXJx(t5o**ir4E{T}^h}eybJtj;WTdE4HpQ4D?uUp2P}-#G#9~ z)wWc{c5`sD@C*}ywem^>UVDowZ|wPxabi!t<)hiKxXZ^PEIq2R=BO)Pfy~FaBYF}q zYR`uDWpv7k6?f}uPvlMY)Vx=Sv#K6n>JO|%A}#R``ors_zZ$~$aiKRV$JDW03C0K~ zgKf!8-KJ`B&4!&u#wQ3ldeBLNmh&(@(-md(JqvGc1{0xA7GBVC>Uc7-H1eG07TjnV z15495AMXSG+k5cf!Kr;ydl`-z;)-;(p|vsfjXi~Drf)nrjc2BXi;x6MF17F=q7dJ4 zQb+A<3$l>E5e(zFh5X2NWFh~s9a(E#R{IV3>fl1RBf8NYdArD4Hc zczie+SdmxiunFE;mT^{DE@am4)g;zYu7zn&V-3Ac-|$RW33S;B-8`Y}WqP+~!N7{< zx{mcVO;cGmk9nmwz(mnqvXrJ~XITxaSJgd?IkSbvn;wTAVj4ql2o@{2m3(#-?m!*~ zPO8(#c*rtZbJ2P(%LYo<@EA~y?hXV4Ka9vSVoWQxEJZOmMbG6$tso4R1?a49o;>AI z_%GKN!MpQn=(UN5>2I%;YpvJOuuk)z36)?a$nQK-@6GvC{6?bq|F?*7e}(v$;(xJ! z{wU)7uL1+$2rvy?0h~pA{{ld@P1$ia6jVx0x$)<2Y3bX{PVzffiD6Uumns3mjQo3T>m5BKH%fPDsTg^ z3%DA134MDWcn)|LpuYYRNUlT!i3So4BpOIGkZ9n4O9R~jj}9qeK%o~cVnvZ^^#(F6 z;vB3|T}wuEH9@zsfu!?f5JvJPZl<-aS~lvG0kU97Ze@;3V{6}xVYLy_v~#o|M4FOc z5t1M^mUEG_<5UrnxrIU*+=F`XiI5cLR-zP968zt!tabHXjwfwAX(C*R1ldVV#YTW2 z+m6H+IEJK)|M~10mYX}VWkc9_P)~9i)#-pLh6}G9X=PKVNnBfN<25>>oj5Lvt#e@?k2FQ*idlz!ZBG42e3^a+&RzN>_8%@aS=#@(YLm;>mz7 zNtB{?kN|ZDylMRCzWuh3U3)Tx9M4j1}9?eO~)DoZm8@7b+hB7g#PO0-I% zdh~~{Cy2C`DJ|EF)#bUROg2+0uT-k_s8{!%&eZDFxq4x-SkBCNIosigo#2n}%tE;o zCd$SV73QktIl(8Mq&zojC?N314dgzZS=?J0aR}UCn(evJi^u;6-K7s)f)B+1;(YxR zi1p6^p8ytsS3vX|;Mc(SfO~;kfH~lG#Q9GFj{^hXX5eMS@!tSCz%9Vf5XV0Wd>!~S z@F}1JP@GS({ZD{9fmPt|h}q8oMc@aB(;oxA4D0|53Yrb2gbx?H;1LsVdPjW~scD^tPEn=TaIx=g>vV&k|>oLrIm)Y$!hMLMpGtwa-* zTrTe>5K1$~jfK`X+(>As!$yR8ypeIZKi}3YXa6}ZIy~ZI%sN!!3qJt~v6W!%-nEj(9{_93AqRVbM6-Km8-?%}{D09Q=A)q&NW5 z8%#L1nq@cvipKdu*n2~xp7^UQuLn_DuTLc5=m^pr_D5Y8O7k9Mt-i_6oN}S5LFyxE z0=u19Z{qjJ5Cif;Iu@qGsA&+>-!!ZxQqjd)g>qi>Aug_S4Kg6bh9j)HFB3`Wi%3(m zXBZb#2x?yvLLhNPSri=7LD+~J+J@#SD1UQ$ErY^L=zwb)R*f`=wvN+n-gA7n8mUiP zLpV+P#S@eq;D0wXDuFVwdo0?dZ@(!tO4-nyfxw(+3bH&#*X&tdJl{7AZ%aO~+#n^8 z$>s7ymIpJx6JBmLl*e8#hQH15RMx;U>uB7xFED*wBN uOiDF$O0T?9!U+VV6B%kqjTbo{3O64%Mq1n;yLn2ckj6i6<;NnE(tiL>oMEj1 literal 0 HcmV?d00001 diff --git a/gcmpy/scripts/__pycache__/atmosphere.cpython-312.pyc b/gcmpy/scripts/__pycache__/atmosphere.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d2b48271bf2b4f467fcfa465a730cd402a9551c6 GIT binary patch literal 11467 zcmdTKTX0jy^u#k<643_05j}n5x4@i)lf=PnY&aKduZGkMAt82h2 zv`I^+PD2`J3W1g~wcE~M%17u-XPVA*rk(yYGySm$#F|@}W+usW^hfQF&g84-?7dn^ zeqi%RLhp>u?%6$icF$vXk9PMT1qEgXzHOhi43xAm%%_N?KTgZcdmwX}5gCyUGsA2@ z%VJ&^)(z|X^(>=fdKgiEo)Nj5I<-YVH)e3nCDWDRn zkkyd+U68rV1etzTWcqa?d!Fgni#mWD^6xi@9Kbx$0MIDr0pvv^K$FM=G>@5FX6Q^F zj>blYf>JP$=8NnN@YBO@J_!(HVb3xGmXlA^0Y<3_rb)tpWKf4KRZ7ItdSPUuJFF17 z3V_EggeV69y)j)NGJx?pm?;O#@I%V{be)XM$(PlmVYwbfy~tEDH#K>HPeHo%S>||U zI-g0!yf|nF=&b27GS0#rtSF6Tjlj6v2!L1d67>8-=u6V{%Z$;ajnS4xSC!6ZQqhcb zqCO5qFpAQ2jhMeQ->ZyhxrR1fLn*OcSq@fCv4W+!h}Nr|SV;J+rL@S0+D$YAMs+V1 z0k)1(qW4i*i0;?Zy5gmKgmITZ4Q-XCXBeolT{8g34e7E+=RhQ>PPZrX!JdO{%)uNK zD}{b*D7FT$T8fncwuxe>9cKdPphIo)4nf-c2{&Sbt&wbxV~XucRbJCcj!o$ zJ6=2@MF)jIAYy;c?v|y|pgUfK-Da1#wkjqpJG|f9?VTMLTu0O&6ilNI9&T81B#LgGLu98~z@(PKhva9CD& z93jc+LBk3^0$LS(ore{(?^PiXjhqTfGUSiaykTFj@5o`r8k41<92gP?`~f)%Lq^^a z|8P*T03XnU91MpdLB+VYW1m-O@+!uj?#_cG& zLhnn0JSs(^0|Ub8*yu6E-2JjJ>^~6{eESt1@+uEwEHE@25@EQqUm6U`sIE-jFW+Dl zE*18fY zV=N$rM&wvyX0vjSj48Yzgd!nX5aPD1igY3FV6neLhp#h9N8LDg!J2ez%1LU*xjX#Y zq(e>bIkrx}mT>Htb?i+z_5z_X>2N|)uvB8X=h*by<1jw|^m^S?-E_y)mJBD2p%HqTY1o}`JU{E$pK$EQQB*BaRskJnF|lN!x7evh;A=_N zx~Jv$Pv1B_9l9mYICd>D+*;?PeEsy)>1k;)e5+>0(GD~`6$wPYP?&VostKycOzQ6N z>yqh~lcDL8w@Uw5{`>MnlHi7hjp=fRIm3#& z6R@30??Tv&p$4VN4J~1trbf!N271&4`G8^xlK=&6mo*+WAf&>1ok%o^=BpgE%mlI!Y6*0lq6IJ-$dW#3kSy|0M}yM|V?+R5 z6_C-v!un)7x_2HYkkJ$Ed(1;PauJ7=8wT6*?uA~5(RmSiM%r_+LwAz z#`|C-kL2mlTM6lHBlKp2*3{Y}2CCGDmz`I1=0yZMBQj|;50hU9Ct`LyvY z{-vZnoW-F2QF?H*jENC3NY3zBC66vI^0TDlrKYnRg3h8tLHNMBP%KW!#u%7=0Th5iG*Lpy0+nQ ze2A=ubj)J)3i4tf)L%$jx~hY`-vxQ?MBb4Vy=NuQ-3UEV8qx7svf)0ndfBRSWW$+3 zWfRu{Mvpc?(mar~hDurswRFc75u>rgD(2`WpwCF^1niMLnfQN3(!3R{SPp$;toSfV z^^tc$(z-82(#_A9q*WlPi%MD#qs!P7vB$URmS;**UF14Qx|K*e{-pDC**!Gl(GAa% z^r9O^Lk}IrM!@LN;)E_z37YW`&Ajy$Xa=K^Rn2wLxG4V4UE7}8jDM(p*vL;nKTSkG zZ-9QD<{W8$=5^7M<11{016{NdU7UN$x_BBZ<4QaEs&~5u9?_Pqg~$)YcA|mrJyl;= z^?JGDyjV#KtKJD0Xd!3aSk=F8^&bfy-qYxD>6m>Ck5;z%B;24CeW0m1R0RLS6l1Sf zJuBh7z20^S|8`5b<4YK%r0~BP97=Cb;mwCcFnypI&Zq;e+f#o)ir0`rAK2kh^xgw~ zDI7$q7R!)dTM9R~lpO*7c~Q7P({_(yhCTz)(UEXa>`P&1P~Ot+Ns%t`j0x~<&7RcX z;KWM8n-sX^Yie!vgm_rhLR+CUH-{{^IGm`3{^-E zuDBsM!;Tkb9a&*PJCMYxjO4J&x^|{wYr@(vYi&M}_oHs;u%It;jR_@tHOC@{nfJHT#^)8B?n!Av{n(G;=s%+cIlwP1ssL;BMP?!B|xLYKcnQ zdC9wk;Wa-%_vyUoU1kL6X)vCTi|k8|WU=q5%3oxCWOAp-o^;huH&5rOf@pGP91dM< z32|vBS=N2L4laT#!}NL2`g!r%#cKV3?+4Wb=F*b-%)+4Q4U;F<+r#WbJ@ZMNK(o z$Vw6~^riK!kc<0;R2YSZRqTasH zOEB0nac9Nf{22h3;R(TUNEdz(jk6>cErMtqPhlyC+#7{4yqVz$rAebi-sBt0+>#&% zdX7qBvOJ%J`?rMQo^%4iHU!%NxD3)}%#(T&XwWKYFI1@a#p;z-Q4H>tv#;6X>$0MR zg_U-YA_iA`4BY-&OJ=Rr32XKJ;>u+8MsNd5vf44hU9zMLE?NGmRb8@xDofTaRwW*3 z5LNDh%yO#K6v^kR5tM`1SX8YLrgA$k1uZ!r$rBjuGuez3&KKmwqk@VS> z^(eg^_<;2Jg|qopiTtWb)9rj$vS#ae=Ut2albTH=T`VZ|nTrn*6n$nC;^>t;M&y9X zDdO`QDn7Z>LUodF>2p5q-he0PbCedn36y8@^~H<5>Kg=z8|`*@DAElNN3tHKU{RmD zdl6aRVzO?xW(s$A$HBv0Zn$p6D>msl1iKLs4_)Yu#prMYrvgzaD1`lE(NQ^GwZI*7 zYMam^2Z5dDkB)@Zaj$qy($NO~2>0HQTq;l?oXjeKt;<$EYpYGzYNt)NZB5DA28dX3 zvX?ePBvn$r*m~4WA7(wxP1onTY3^(n@s5(tZ(_l$Sxs!1u_)ezQY#UgWm=k`E~}-> z?33`HQhbdV6|`3)#DW9f5(p=C~Yy1*q!=`M5y{A)%g=Q}&!+=^I7a1b0|yz`Ewnxq$cN%D5@ zC-Am{RCZ{Uh(cvMtp2^K8&H)4XH+=i^Wy->YDJ9&1=J5pKSwBF^{5y}=N!WQ&sU?3Mu3f6NWw zJTs?r8S3U LTQSEVBn|%u?;q7K literal 0 HcmV?d00001 diff --git a/gcmpy/scripts/__pycache__/env.cpython-312.pyc b/gcmpy/scripts/__pycache__/env.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e52fa08ecff323f9113d7f65f0e2a68ceda3adf0 GIT binary patch literal 4668 zcmcgvTWlN06`kdBxerk<>P73hvR-CviH@bjkGOVhNtSF!lq*_|%{mn*?ua5q?lQYe zJqQ<-iU6Tdx2DjdQqYge{izJ2fb-|i^shi36-gIjCLc-BepG*S93YMJ(atQnQWDv& z+ooMXvorUex%bSSJ9n1vmX-4&oAOb{ z*y6JoZ`xtmywkBjm?o+{u4pTGl&_LWFhpB=wTEdZ85D%@;3+kH!n zMAd^kR6#NTUJC)>tAG`fZ66j%?92fGR9fPEB{#};q`V=u!BUAMJqp}t+|da{qH#Op z5(DeN4EXT=UB71UPO*b+oH8)&re^mo*-dK+ER;$~Yci(5GgzLn5|ky}xogwxnf{?o zDm#gV1`e;_o6lEpt`PsKm?e_4}Fde#^mEb=+D%l|78=q6fRjm(Jj7-&*yM978DK4#C{q`K_j58t5+ z7^BVz;ep4@c;3v=@X%hZnZ>*8q$9*S$p-u`H*%@hV0DZq)u%@4GwrC8DTEfb#(u(?g1^okO;O5(`0T zn_S4-|G-be*4-Tm$upM|rQO|HNDjs$el?^o5cxEW#1fY7=|o+>5{>$0NyJDf`qf|@ zMrB>xpE$Q)loefk)<=Z_B`AjyQ0fr{FN9)T^9SJx0a;~5g2?Ju1Jj6xdLY4i&tD$w zQ3HrlkyQ)nFd>{h;~gA0KZb>Y;^&01{p0|(;s1x^uDpmhc@qWI{NI~beNqHal^?Co`H z+O&U0QB?QP;HWnkh`Kd3;64+JMM43;5m@)jMa=}3x-lZQ;MM|aD5h&}yfHnog-86! zAe7;JOg*L6VISlc(5i9sH}LmGkg*+31s1g%fj1Iq+u<)BTCw8Cj%vzfL97ykGA4C8F7-$@Q(#$*Nz_H|o<|-xsPAYcWM@ zi16b=vJNA-r#4|R;-a(uSST=k1Tk1f_B9&QRR>X7J^lS&#K%-605uJ#C_C)e=3peU z-AL3nd7}T)xQ1Bo%tFLJ1GmZd5@P%PTEH*C?QSxnJ!ibz#69BAs)}DCvk`q}WMF&I zP@j3#idFG!%DOi6r6M zL}f}OYzR=*U}UN%(Kd(|sv2N99-YKp6BEOIlF27T#EyF|dPZOMAm){j9G_2=x`|YBefF4(&>kzUi8ltrY4HtokU!PLcD5@;3XX5oZ)Qnt3me}ivjq9NZKISSpJ}OB! zl_3rJnSe@6tY2L~45<$Dh=cQRViu~CsAf`8A`@{^D{x@VdVzR|Nki4I8|OGUi)dUY zkPuUGS{TP1IaGg0gK-V2I~7SKKUk0rQ`TV=SxKrRD=A>e8dgHMuR_0s7!98^9kG*s z4N4)kkVLd!UO;Sdbx82bID^ze1$=#?FhY#s9noWh1IVhwc^#9|W)L-n*y2VYTqNT% zwtaqdP*rg%TH!o)RUVB;^ia__UL@i%exd-a@+kWGXeq#q^CQswF*ue5$36m%yp5-Ins0}j}0Qr^`oD_7S1Ym=#K*^>RA0)Y}2`*U1L^6WC3I+o!YauwCfOin0GPA+$) zCNe^Mj%!-4&T<{5>`F#xTCZ9kyH&R_@S7H_O$u9FQ<`h~6W8(y*Fi=dX|7|l^W}GX z-%)>mG~Ib24=4{cKs^9d*8ueyp!jptCtsVFufaSpgwFe-0Jm}>+8cCey1wy7~FDPNINcM9j|OT#?p?l ztmE?L_^WBhmBQ2qc2H8DRBz5L&)vMfe0^mrw=c(6r7osRTIjtSyf^i>lB;{=Gn!&N6rQ-xwN|p_Jdk!C*mQZa&e1KWH|_Lho#Tr` z$>EH(21DvZ+&{N8nq$k7vr9i(>&mdLd8>_gepp$PQnHnud0;6$z0sPhZ}@fRTb;i; zkYaP~2Y)+qYvk_1Z2PGcyT+v}T0dtn=mA0I?LeqZ)h@TJ4QKfFyp`ddAKCZk#N+Gb zTjKGwcswh=Ca zcTaL20N%59jP%j(xs3ivr{@TGzmFOD4*ULb7HdBMhdgfjgC2^sM-Gj)(ti{w-2M~a CXhu{3 literal 0 HcmV?d00001 diff --git a/gcmpy/scripts/__pycache__/generate_question.cpython-312.pyc b/gcmpy/scripts/__pycache__/generate_question.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a4d5b6bbefaa6fbaaabb988d15eaacaf38caba86 GIT binary patch literal 2995 zcmb_eT}%{L6u$GbKd>xpS;j5}WVN-hrg0l7H72#zM6o8#VufO|woa!r11#*%aPNRF zS$`gQz(}KSl0IqDL=7gs>XSBU(k6Xjfn>L%Nln_9z9BrA_|$V|c2-!VX%kPf_ug~Q z&pmU_z2Dj2g28$Mbl|U^;m3YL{zS$lv9efCz~Tnch{mN!(wVs=2RxtVGJKNH2uXn> zb%ZC!i6+btOFkZ8$>4OXC^pQeH)x%bOQX= zhizX4u0Z6fxWEJFo<<4+aFwqiq#y!^mNpeUz`0o>1ut+OSEdo*ye^j~F)k7FS>6db zt7LSX&Ag#o{t4Njxr||2brZ5WnoFrV!=7-pHcmi?}$C(hWYGM>xP`0-=?10!lC z&ZrtcVi@U^s$jNQ{IiNghTS-582LC;Y05BJ+)b@KQOWH-BX9X-Ih9SBvOL}D%H7oO zb43^#`v`{1q`aejR-6mCQzQypu-v*Erbc&a@17M`{H^5(sD2+@2RouxD*+7F!LH&D zu($#F9)r1-=;7DzA0^+!3HciQUEnVWa0B>OFdf_Izv8MOOc55*ID6*i+W$>gBjX3) z8@ahSNAkQ#@Ya*eDb=)s2GyskS+zons_KTR zYn7@qhIb9?Nkg_N8_i9oLCVIb+qNYAl`sOzY#Ihgd!VTlXkQAnmwOJBdX5%*j;;|t z7%hh4*Mp0l@p8ChK2r>TPzv`J!@Y|KPZYx^e$^LGk1Qod7SmI!gq-J&@y`j#aDDs> zocbNEp9k3hf0~CXWw_J)?`@r{9&o(sB`vMxrZ8=W<2Mx#ABtVPpCRRT@v2V_Bo!)f z3zc2Q<*~r8DUZgDcf*B0X;z=!SoPVte&FGfF?=gE1iLo;ZQeHVp4v2qm8zk8{6Yl) z#@?zC0lQGyy1?5og3r*VZKD_L=#OlO9#d6A|0nu;Rq%Zy@p-ny*HRN4u-nGBW2^4H zY9n|f@%ild4sOUFUiTW>Hon(X|G(z%61IsA^^N%xmT2ndP3UPp7r*^ePIJ>OpPI`K zr)Va|(Kd+A@&bPAsu|-zQo()EP|VR-BgGG}LEQ?t2&>RMeFwK~n8*8R)la-?oOq0k zX9Li%aoT~J*e0EZ5XF^^If9KjRDuo5>i`eK%Up$&U^=I0RRo&exdl}=$#;V(yCIocsKaKe01^Q#T6-0l1>$+Q_IpI zSo&Q{pKG~&*wPmz>10tlxh$QU?Rys4iLu%Mmxy_>4`PRxWvgD6GdXQCjq3&(lB%Q~ z2}Rs+s{B5z?h@PB#&n9%We?cO87j_@HQwjxSc`Z)E$b~IPp6}eiYol7_Muycemq7fK6rNe{+MD<%aiIAtX;TM>I01}ETO`z`GQlJ?#6dBrt<`E}yqmCL{ln}g z)KMaFKvj@XNmZm&ije53pC6Mqg)wR;Kq1C83?9cAYjC@VxJGSx_)W++9S)404R1xV?NV6UCd>fk*j6EM0f`e{C55Vu|f=x_K;}@C^OC-YhIie zh~#?f;Q`GdP*J zkao@c)Z}$Vo6@o3-Om&8>C%**kIRKigM)Ehznr~NEVB4Waw0vQ%g5D%Ze%qren~CF z!7i2KdXA|jLyy}B9xatkQIgbxYDm(&gWu7b0}H5M196=^i5y-(GE|9tv>6$#L`IkR zTY)Fh`1-L#C3^|ENcG<3Q*gn{Q`O5=!(-S#4I) zabGtKb^wDLtz=gCe7II!%x}5&L5MyE0!q2B?K}S4lS^W?XW)mS?}r}t3~m$JFhVPV zx0gnj)hB`Qor^1(Z!*j0?zP`*T#KxIwl?;Q@T>o4|4)nSEy<0*nZG?4&)$N3d&+~b zTjlu*lofc(Gcdn6zN9j}(nknugD#3a#4V!i0n6<}mgN?*3_Zo#)hc7h_@8?KdT+|~ zCUVAXRy)J6Uznj`czjs{n((xm%W8;rGx%8x3M2t*L(vJseq;{5x?)H+t}ed2u6B$* z1>!oXhFWfGH?^&BN40lgnSa#WUG42#=I{8eX`{LOarm8TZ~v=Sp7}^i=z^h{|C{SKG+cza-@ej0=gcj0DX zGZd+WBKHO#h7PUKN~nJ;+*a+gK#o@XV$1wSbM$ez)p2!OGq|1!q^0d;!0~hNGk5R^ z3vicj;BZE*p-|+x3rcO3kll5FiKb#4I}D5LT@){p%bP0}rqt>Ap!2b|c?3drWEOm- z7`WYZv+0r80otb6SrI!o!~@pNQ!x>HICcj~f`2$EUzBGx)B}=q6?kv2ur53p-W{pC z$NOO3CsrDZ6^S#1K_jO*h87geCsV+l9mk zq#SY}Qq)kj5~*m^UjXmMz{(XV&3&8bTtLlH-MVOoKbH!EgO1V6#LAZj#K^!C*AM!-5s% zB#|uZ7JvRV{N9 z4UC4;V<|aCvr;^rl2u(zr!>Ed;-(Dox?~==;li_#;v;D_t{AqVxMozaI4KjOv819a zR5M&kQq4#o1pB4#u;FG(Iw50VUP~!Am}WZmh9Z%U6Gr*D?*0KO9@FC^NhPHl6I*As3}d4B@%%l zH5C9olMQHbs%CU8fJ--JvW6&0YD(25X`+mo3MwA8b_Lii^4xcPKDyv*TlRG<_&TPA zMX?r4Pa8uEzO&1|z6D?3v~X9<`RZ|MUtZScW!*MeukaZgy3LD*sw=J z{h&&{aM-IbqTnvH%Mo#}a4`>=H0H$OOk z{(Jk6uJ2smPCfO6mfUAw*wCNWLcSt!@X#4JKfquXnsOB6cDl*Q+_htb&?9i2@zA&y zgtBexl^_eCSuiqB{J)H3`__if64}8r9%eC{`5Z<0aIMXJn0+&!vp63BCWZ}1cRWVN zv<>H>&QMQiAT;6XilwqiMQ%JPH5k7GW{CEJ>xA#xoMQb%8edqBfo>XVav1( zSvj&D$SRN_D6|Tg*{>#wPEFAzxHTr+48pvI0BbU^TO?QRxi@lWWTmn$*U&N}ELI%J zHMFAi<{FNpJeaqGJTW6IRd}CO?$1wcab=<4QV3~huB$HMIq)j5*dq+sPvCI>3)noY zj{tV-UWd3m0qi^jsRCdx1{yUBm@ENLfqA|Mo(Qy+jcJyrZr$9v)rF&Y*xLxY*0IF{ zLD)ufl?O6sFUtTb$b_b|`6)!-L{_s_2qa_l1i z7aMQ)Sx0q|yNopEaOUcm_)18a%y}9}T$^%R; zxeva29biMU>!1?`>^iXSTrlLR@Z56v4p>cFm?dO$9&TGM?B{6%OwuM~+u?A)2iLHA zb%K3cHJM5qIBL6q0RW==p8JmbZaEB=#oY_y?j>*+*%03|cX@`kf(?&W!>@r= W8^>`gWcLcGdnvrj?OP+jSlB3kMP>;gtZTgv6LY9^}zN0@O7glQ=lGJ2N5JOLr@6 ze^9AagW4{sC|lUA)Uf?gDy>v1t+rIE)O5E}XL!igH0VS6`eU=wN)*1;C-v8!bH^Ur zA<)oI?003~x#!+<&$*9t&pr3t`=7bFITVC%FYh{asEMNfh(I#v5{XB33`)EkQX*r3;#z!NL{1^%p-xN z%i%kn=uRS&e9lpBH8%}k^AvpKKtdY6_9^(uDS$x*Zu(?x0%c`Dh78D<0c9tlG_Lxm za3vyvbyy0xnv%5%WKKe9w6dN;D>sQxYUgD@8&n8;m#;zwfC^Ly%L+4~q70}w11iaY zN;4pf3Sqy>G9c^%38bABDumoCGoUIJLMqiNgqSrMkTnBhGN9Td6rt?KLD?`g;0}6b zd~yyXyrJ1}z{koPAkF(cey`6x6JmV?yxqp3yvfGb+{&_(b%cc>N#C47+&IbkSs@bk$+^jt+jEjTBa;GB6W6In zHiS7}Amm{=oLgDbHy~>c!oPn&*7gky$=ZV$VN0}vXHquuyr6WAq)zz*KG}#}BB|`t zVGpMy^&wvHdt_~BR**G=N8vvX|NU|nbTbeN$~y1tNuO*8y2D(^%geffp@D5A-Y#ng z`o?7QOjvO9Q@-#ND41)YRCr(D6pw41`E|rG89v3&IN0FHEn6HsKjk?c3ULmXb7*wZ zJLBLvucJF04!{@+{!oy2>`j!oA=d#I;fRCxa{jQuJBaFN4M$|7+wBkf1-Cnzr{pDJ zT$nrqjNhP^i)$7lQn5W&+#wZroY%dby=5yO!9EEiUZ z)vZ#YBUacc6(Tc3yvPC{qxvaPplb3-FwOjwS3+4{yaemz#EX;)8EleFOUnSo6i+r1 z>zR{KA~R9Fs^+}zuCe?P3>1~2g;8%2E&MN#04=O)=yTL(iS+CAIeJ>vqewvspC!J% z)QN2rbsRMF>zdcKf~qqGRbNi4+A&#|qApSAG?!q5JEy%wrD3X?Fdg%4Tl-Ve(YK4H!u6gX7kvaV;mdr;PoI7gjhb{*G_c}7nl;3gG} z5G#u$8&hXIfmt7q^%w|T2O{SXVgx;!pAbSMPjc*l!5@MMM6SrPpnGrUd1Jh8)2Dl` z?zvsJeemIMm*8*%Si9Yz2h-aPAkI41i_v3ai*l++KFIm{>170N)@L`8ZJI;-heJb$LmNg}4`@#)66S!EU8Cp? z-4yJuJg%ptavsQyZf!(3)^m) zm@8w8WB+tKUQrXP*eq3S7Ry@}+0Uo0O)XAb=S9n|^S$pJi02l(d;G2A3oqTuwXKbK zW9!$QUv=JS7A?;73S+l&YuBvs+Ku8^*CDCvkl5M3H1^}s?~g7W6fH;2_r87Lfsv{O zu6NDV4@^{^?eW!u(o9xMkgOJ%RmfN!Sei(QgVjK}s(n&epzlG-*OF@}0XbZY<|XQK z7RUhdCV{MsHTDx$Pk8Tq5|!uKA{JWs{Q7G*fbQ-A2_Nd=`e6)yrCT`E$Fk%! zmnd|i9Z^YhXv;sQBEk3++-PW`Wf?6@>Gxz_H`L;A1f2%N<|rxNg7Eeq-$GBVQf4(RXwBrVw+DORjNo|Abg|3~S$CFPq236ICS< z&;5{qlQSx6NUaVaW=|ANq6k%?1x2GBjFyf%yN~qkA9Wu(+&uzEf}q9GON#Q|5%?PyVI#MR6`sK=wufg!JKzh3`k(-%$G%Wu@)=>6LDmFq^ASnh=V zHSZ}MJ`df#h`Yt=z5;xfPOQim!D#>;cdIzK4&4d+4o;oA*Xi}zZRhAHPOmPpuCqY23wP8;9qCynp4>TgKSAIM=M`MO5HY9yS~ur&xIWfWSi&UglvbrqH> zKP&@vR8LEUqQNo&l~Fs$Z3entWCD3OVDUS!T*}#>aT4LcA}?2q6Zv4l^5OJWRstzz|mk<2xJnasC-! zP+;c#!W4t2`PQ6$KE^ZW;e3oR%>~^^mT&=2z{vodv|5=_ zpN}~e3IsxP{@^5}IA8;0SfAkW2l$;iQ-ToYcRC!Cz-#tot2Z=54*xCA(E$gtcKCT7 zj$DrRuAIJ8Oe8eRF>toR)&W~&=L>`x@Kc<1GoF(n=pe!0tYDHtz?&iM4TF2c>j&2i z+scehd4i`AiSYzkW-=6FnZT^~bZyR%0A7kYKkwVj%mxL206Gi=(nup;pIYVvC^bkL$z;cWPPh8gV)Y zxt*9nY;V}aD8(KpMhq@HSuFo zvO#R@k}7w`Dtn~Lo}06`D*NM&8y9*%a>W~)Kb^cfd0lfgu+V$ibw4v#dvykL@0Ht< zq*n3lz1q5vF;eD!-Jm$JZ-Cfd3r%g&gkN3!(9EPaxt@3!U8 zyl%OodV#&t{^8`M%`kR`RbZT{hXKm{B^xL$WIe@{(SYcz?;xJo95xU$1_h1;;-p7O zAR(B?;!!eHnsJN(0{|s)Re)S|vriQzHY^n%u+;4$g>{x@D6R}PF&){&ipVCWm@B!g zk>uro(U1*oQhDIUkuXvI9;m7&O^PaOA~iR#=8V3eS)%qdvaZV$)pW^Em@cW2Sw#&n z^9fhPEO;7Uz${cTi=Kwb7AN{q@-!^6iIxs>9vA@7QSBWoL@~?&xKfW1jyrc0BUGZY zaSU9}KGsmkWe8p;Oq{uZk8e}D*F0;UqN)GnEtW2P;VY0LH5Tc+Ld zQVSfwUF7qQ>pr-22}C~kEtMDNF3c?mAJ2U_cj52Qy_?W2e<-NAKo;eEb({gM^Fc1C zszU1@K?xOEVnq#7QNv>Kt)iBAjdi~FeHZzB;QG0uUTo-;D!O77`=p9}_o{8GMrk5C z12gQ3ib-VDfwD=<_^G-+Gvk4ts%dy&r3x*NFKqPN;QT#m2D>TUx4hO_S+&z>L0xEx zn?QBB0Ifs-!Bc#0{{bSZ1=~)w%mj65p3_XL7Fx1i${tK!m*O%7kyL$5kXF45M4_mZ zi8@iKlXDDgk4j+i2Fi`=oiuQ7Cjltb zCex{Ja@=7YQ4OH9lfdQyq@Iz8ou9e|#4`jgi4>-K3plS0I#dPnGb)h3%Ix3?D>XZ) zno_far>NBIfI@oaku8NAk&<}rq{`#l8qYbdY%2wF()*sBWgsm znE^98Y9PE?I^-;p=#Xh&w0sTX9d2l9>#uZcRNsolPeAJHakc`7Ez#_b&Q9nxu#xpc zIxPA0vC8NxShieD(Y1>Hx%kiaCyI}ib9S~wiyh*xT0mKT_l;H2wn zW5823<(~x7wJRtrQFAYhNP^bJUunC&iNotySuccGPb76yDQjmuVUC9)j>8BQx^f|o z`!)y4pW>_-F&LqAaVW$bGT~mvhH!lDYAUO3aB@Ho(Ui zXYi97udE@E&9$Wa(eSL?YapHu?yh2cIeM|K}Hhtf;R4$H; ziepD(W5=YiW8&y>@wi)LUL`pB#j$*wlyAF}UmtI<6IT)3tU-|}UWE*^i%JJXwjr+x;)8ceUtPK;aXY9gqLCJDa>HAU9woS5Zk6B)nEHB=qZ)!wK&+Ve#CC#m( zgUdCwpEQdtdnH?U%yv++9b9T(>JV*%w`+#TQQTUGr}@Q#5C4XM^&RjTNS@QJjQ|~p z*KZ}6RvUqu4lEV_X!+iF%>&8Bb}HLj|asdFE$9I z=I^LdTox;Cl!_bg6mN{%TjqQ36gS3gn}FJ(ID_rzi`Uk{=U|+%&-Z@N_j7UiM-3m_ zKeUUDE~&!xL&J~F-#1GYW8`t&PN`~Ftg2V4>V=n0l}>uu)&E`On+O8K^!=*gU-X*F z5&BP*reu`%P(auK6u9JbimSQCJMl4@2Bi_sK!gu}`Y+iPiT@_5EVq zz|yc-G6aP=aSNlof5WGB+3{lKQ&jaE?~^_PO|m|xBl;X>L7&oSfuM?B6*4Rg)dEf- zrOM$ZPYaa5wKlEVyldluHJ}^h&=elq4|n~<8-@-8?hHn6V00NHJmRE3+B`m6w^6T#A zH=vhE@$?MZ^YB3?z9n$G5L0i}dPbCcVH(W)(D4D-mow5l&3 zL%}uW!4`Rw#^G3!2nU@<+wD#2p@z|0AMU5MdKdjLht_X=m_;I!e%Hf_Ed4?HVLq+j zPe06Z=o=ogbPXWvrAKIeH9R-Z(l-)k6`?t3H^^GJiy>nx8)o5Z$S*{cYYHRoQv%$X zksVoS$3J6Z&?+Wv`_;Hx>GvM)Q^=#?&sRY7Kugo~J*wg!RSv&D>&|HD#)lNfr0)L# Dej({3 literal 0 HcmV?d00001 diff --git a/gcmpy/scripts/__pycache__/process_questions.cpython-312.pyc b/gcmpy/scripts/__pycache__/process_questions.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a3c2d8d18617d8d070e247a01f4f3c0ad7040e28 GIT binary patch literal 12394 zcmcIqeNY_9b)T92{($`uy8;Aa1VU&9EZ>sQ>7?URmQKJ)fKFgZt7T_^U6$R|%pi$1 zI_r;fN^&VNiq9pS^OZR#DWjav=E{EmgE_EsS zpSz|TuY)dO;|UpAJ@+s#tpN^aU)5X2#PpGFxoc>M)w{i zw>fTN^luX5X2t-qWuEjHe~StRJ)~rxVk0c)7ua)iEH4D35syYPu#xLbFd#_Am|vL2 zDP15Mj&cFHIXO}@{zW_?U4F|1 zk^xGYXGVj1ptRs474CTg{XE)1Nzb$J8NzQ-5r=vK)2+Ye6Xs*AWW46{NBA2oC+YZD zI4C@%B(t1>MSu+Qr~*wtztS`no8)Jkn8-w1TNBSu`)8t2uIW_Y(C}1Xwu$EgOc;>#W%f@F7ZKb6L7@loA>}1j0t>GjEe?Xp7&i#OCRrz%}W-!lekxLr#$rDtUL>J zgTDyTZQ>(~YteuA<)o!bv{Wr0UANS)n%7!BC`vZ>i_QH{_OCY&ru4*t=2i0D%gNf8 z#M+mVwOwLu*Q5PU$fvbup0tRy19#3WUb{Q+p|v_?AXz0En<7?!@%gM$bv9afou-^FUrn4eZ*&#X) zC!Hrn=gCLMoOBf>p&~lK4iqT*ZDq^&ZZi-Sw zHG>P0{o88jhVzOH1@evYLhQGw>?VzDSE$aMpA z9F}T?i5>%omb7HL<_k8r=-~iO; zktunbNEr!hQNmugZmHX}l_km>MVmKiJ0jYStQ}srb#2xUBn}O#)4S=|lc;DC9nDEc zr|9Thdwt!}o5up!|?no=$cKD$@?r1Hhmv^DjN6n(qPqW0;(a$Sr zMx#G~M<1&ZJoaF#1Vsy-w0J~|XXV1WrESyVO1ObKl(e*ombTTob<6S1+J-yFkWurY zr5Xm>`Z9{zQB+{C5g6kIbRgF*xu*=#3+$5ljAao*Fq9VxpVqg@h)e?QH~`~h|!eGqIA8vY(LAUU>6D2|nMvwk5koxyMp2{c!ZqHShL zks>$O{b4YSO45rtCl|H_1pgI?(k2O#rIjPcPkC z39UIE&L+LRqPKU$-IryHFru5;u(R83k81_&83LfVk#p-3X4BMI2>8gEgN zF(@(N60`+2L(XfEV^MRgkaG%hiqxE9$Tlkr1_xV3+kH77-?7~Rf6d=&H6cE zg7pg$fzgX=hbaIEYi)<9{U}78&62j|XfwnCcu1N~oNPM`$@Y$Bt`YOMwsvq{l%kGw zfL7`_0>xAts?&M|#U1Uyei&+k#sZBdX<9mPx})tdWFg%pQSi5cDx3`lxM*y8o(~3i z$O~Z*0@9c78OYqvt?V!NmTpC5M|7k6s0Y) zr_4mzo;#?~MNS==-BjU9Fq+f%~U_?b@?R*A{wKY|{Ht zzWm&D?f=$=cP78-OLIdmt|gcL3Ue)7G4qG0?sEYBC)-)|G|XfpP_s!kAs z<%Jxu{$qRvd5@978ax(Qc;Y$))%)Xn3!-#O8m0Gnj9e%5jXQ=S1jqRp3{IGxfY_Fi zI~j_V{b64I6g0s{A<9Uj0=+j^%2&G{Rwg|sMbF6%_bULs-*J7n?Av8amPAe0FKYjx z{&D@I%7lAp!~WWfKscwJr27>B;TPJ@A3<-jZ0De2X8|P1Ru%Xxvi5n{zJmGJy`#!h zHrPNd8G;MfG8C@G7%_;s3cSZuxQ3}vpUj1M!MOl?Kl^l$7oyy}Phe+bVNk|gD||UN z=Q<<}D+J;cx34On73=_RL?*o7u|KF2F_3(*Du zyj2QM+?6SY#+7Wa*U_5h4x?zBelN}Mvx;|BX;r@y1ClWwZHv-czk1zL{gJa`X-st1 z0itcSbgtEYP?c!zN_1UHcrP!WS-N(A;KP#oyt3C5-ivD4uUzHX^{ynmKDBJq<<2g< zknp~)mZe;TwPw2sfV?l*1Q^W4vgM2v6vDDXUQ88V3OKeD6ykY7b!9P>pn4ZV1-u7u ztM0G2i7d`y=y6^EoYN|A$a0z?#g!Lwa851zqkbWIk4qz;W{?|$cf{R&92ez|(LK?* zFoV4blR@w@cuW!%1#baz&`Wyo8Sn!CheDovo9M~NSGkj9;^H@;SNsA*S#r4r#^==i zsRxn!k>&A>t98SD9566Z>ixmokLDlFugt6+dUWomjcZl^?E2R||G4K#QNn#;!~XhC zSXiLNr29D92o;b;Xixff*Bj3$j6x%khWLWpif;{@U`$$-a_J21c6qnGR3Gh#&(_H&N8NL`S!F~ z>xhty6GmlM)`%3aa0;OUQ=)u1)c<|`&v*sY{?BLWm@m3cb7ow9XW{WdYqf@GA$alz zY!Jp^gF+Jb{`{t9jJQG85Q1A7sr>}kb4!~wqZR>q_;3iJjbUDZueD}zROA%{2^>X2 zm6E@pA{g{juZ$V$5kuV4zM-8@KW~sX2)TD2*Pvva2B)Ww3371YcQb|)+Aj?a^u?(r z?{Cpsk_^44&vVhJAZe$;6)%z8bFzus4JC29D?+mfndoQ$do&sP#x%G`X^!uM3DmGD5}oh|X69l|lHu&osll#+ zK97U*K@mQd403RqpqXL?Ne50<=)qOgk{?BAL?vxB#zrJ9>V*u4NG(gIzMBCyh7O}- zr>eI5>}Vf{tU*b`3mkUBf}dF3u|Na(t0&=$B$DNkf``3}vU@>i323-fU9Q-QdHH_= z)%f>8H_Eo6`RgZ_{L7thM-W_w?uRg3UU=$mNVpnT&aaZI)vGP5x}R7+u>81a<*RE` ziCz77dbezjZ``ePfB)u+^j}onw~VDk)n!@YcXqS>+P5-1Bx-iXdGfa;0~( zqw^O+ry>NWH@M_r{pL6o2R96drZ6+&fw)C_vmhbh(h_;F*?kH#O4|LDrYD{K8v z8+#HBy^p#cg%ankiv9jX-$de2K;=!95mk+!o6(2J-~YRhchjE|q&z-}isL^U{k!p> zj<214V%ZoOOOE)&5np2XYU1j|Mqn~AHT^UYOiY9lv~Tbe({n4(!Zok{l%JJS~~kpn!h*cAf0nQgRB&+i~cLe zF5-2HF5>?45640UoeLfPDJx!ex)iXY@aggoGZs{qRaDS^VzgNt1?{F6y)DP;D%`#< zt9?6+SY7}m%hG%5*YC<>1M>ghE_>0J;&CPKy`Q$*^n)A&ed1UYVW?~#0XqwhGdVbS zhusT3FgiR!$D+t}2q#SFTr+b72}hjA=;!1Va&;W0M&U-K2ZHdWXoCdpzeNM(pfD!tCx?h_}_gwS<@-jbUt++TMIp@5>JdJ zPfUm>Cf1!3IlJ9qyx^{gBUci`zQol~VkRzL{R#kppX@^bfF7{&uaf=LX9RJII*Y*J z6d(qap$C-pQ|Hj*9CaSC3{w{o%P@5Xv7iT(ou_>0vHL3GaQLZ$hU6~S7yE<{ofBYM zRyA(l=4fGb@N)hkBr>KY_gdB>M&)^dp%~2%w2JXm(7e$p3-?6^iu`0STlBjzi2-Ty!qx!x~MG)BFfr{L%5#a0D9H%?Xo@N9C&^ddbYsO{C8a zC2L@s4b1qWb3$xRkn{{2h%(^Z27MWfaR?CbmN&=I_f^Qvb@VwiFITl?S$co5z398B z{t!g6!nCeiDz;ozNmqmDYFN3rHj;2PJau&^U8hCY=_hqhUFVaoi=yk|Q`h*N{>=jo z-w(eV{>#YfIq|@ej2{l1CzW_k2R#nS=)*@qKA&Xv`DUZcT$qKl)dv>`{9&}j5XB2{ zZzV9x3e!=BLwLjOL6HYVO(;5oqL)xKilWz1bQwhxC_*Q!9NHjpUxVl&fnSc@o*dc; z;0S35-UbWqP2wY+;}c4&8zi4;2)z?Ln~+RdA&GL2s+mXAd1l>&!ikcpQp{=44Jo-O znR2G&j;)Ae;~n~+KsW4`^m<}@FqQm~mvFE7Ms(OpztgOaI=^lP%aDK}Z9V-!x5 zOwl`XYSJD0go2bjR%n<4@K*!Mz$>sCD^$3gKz@(BMFfdkZ~)v(Tn5YW76n@-D3t?j zj?_z0Xmj-9Go%G|lXnZYc!kzt#=ypx?s7Io_g>M$eW2m(8-XHqBY^sk18f8uVH2lr z{sdJuh6<`!MzcVHsjtm;Q3wvDHMroH{$gb6-+NOv?)8HnecbK!HqHBI!%Yb4c~+Q< zc@Y`ktK|EI*(e_a%RU%DYk>{R<)unF+|2SvvP*U=C1JSekX^7#DVT}|;EY0Ur$Wh8 zRbN&wICL%)w~TcSoTZVKP1nWgI=Ew@I62?~P#D^R12-?-2R9;MOX0=$Gdw0P22~|3 z`WDH=`zKi+y80vO=VA=-ONP@!*kzLM`#k`)JUYpQ-sTH~zeIBS!(kt4M!hz~OGZ?T zb?tTZmg3yv{EdJaUphiOJogsx{|)^4??7~$_*AcRwA?-otFF8vp>=;$Qu!%i(3RXV zrYOCsWXs`NoL<&Eb?i?ORMD{&?bg29@9%wg@2d9czK+G>O?!3H?h);t74L?<^K%`r z{7*^;)nQ><_a?NJpW7e{SN+{*dcwZ@Q-Xp<;X=mfgR%Q#i?1$yW##C~p+x&g!a2HO zy|87qFFOA0R7wY2&-sJU!9TTjl~UiX>$2(|6Glirw(7cErpFEgN|&~E7wdjrL_(ai z4AH(CFT;O<{rdy7{IZsuqK;rmlD*(RQ6ulwnZKfP{d83&mDPDH}b-cyhy#~g&lb-^B95L zkr#Ox@!DnZHBUyv;ph$CHy88R z@v`U8qL7t)o{@m<18f>#s{r2tz{Ui&9?rZl3{*>6@2sqie6l70-w&-Jv_=maS@;Tt2b7PpoNQ zy&%>c->hkoiHl;*36&TVYfdUeqN7i&=~t-LOJYs;Gn3ZdOWx4}rj=FRF)y~W8ggmm(jmKq+^aO|OV5HfxD+1gSB>5X+*HdEG quLsZ#vea2v_N$6!aA7^Jcg$k>i7q#}`pUpRmiwA|-e`tfmVqJ-t}G>pNj->>(^%g_B% zMpnE4;>V1qgL&i-4@Y}?h!77l*Nzu_aEJKl9R>B_iz51edx##$gFnPW?>u;#N6eQ; z><}Nl^PoO_dBhL#&^wAajUwTTBJnGI^!a=wiH|0e?*D;2!a{{$yS1J@+RK6DKAawM zLdAg0h(S3ZhUB2g${{f2*_H_j=6^4PEyT3}F0PtL;Ot!fN@=N5%bK{7ovGI~wIW%np_|z^ zTbeRRvtUtgX3Yv#>z0{qSgNL4%}l-N^0c63vX9+Q$C-V}AZ%=o0IZ>>$s?cIckKuH zjd$1Ib&{tZ9dVKqU$I}uzKl7^^G~?*dtios8&$=m-tD@wbH#kVfH@H1D3L=|7gbI1 zs)}PEY8QcsyNf_1gigL9e0=xg2MZhGy67ZFAHCuvGhZG3dg#j`CprCuo5l$+E~NWi zu3Xlono=&ie7RgR+8!KU zdz(Oxz&o8$zN47k9vmXtp|wi{=rsQ<%&^mY02e&}#A9!&zXBRR)q%J`c0iyzAP^of z++@>XmseIRN}Zn7E?+FT2*PjgbLz|L5Y_6CahU{gpjjP~$qc#b2*Kn4#UQ0)FMWvW zS&Ctb5sDneD8crn%OHy!AKGMoU)n_JSi$8>v&BnSuN7u4=k1YkK@v>GYSaafaKfU2 z1*@tEgptDB>}8&}x#)r^Eh*;&dr-J;&@xDxu&7B(e*x`GNNssOj@MQ ztO}}b8G^D}SFl=BbW4y`tW+!mH+g%qwcJ$orG_S9;JP4`6ia}RNHq1O=$kYiV4lYl>tlLdDQ6sbUG2CCd`7RgGHRH1u(yR54&)lx`||)xf4TE`XC# zRcabK>Bx;|OR`eU@IR1qk!D@Cj-e9XX{V&&$XzC@70V4NtEz>E!ET=xZj9QOy`+xq zpTUezJ_Jk5zXHI$$0hD5pRU|pdGOA|8_w{g(>M9#^bM!)#&-YEZjeb1KRt2k;VtLH z8)R~N`^;t17npzbzqiW1+ZXkeDvjj_pDW+#pY zEM_+mPFl>5C>(7tKlY($;?8@YL}=JucPb$UCC6(pfHzf#9Jl-jl~~XwGya3R87SbF zK%NfZm!ZRitecD*teH#x+wcgMR8xTpDY)FJkNvyO(9HBQI0ep3@_9&WcBgWWf6B%G zM-n+O?|R*K0tZE_btOjb2HLt4!=oTf2k{B$+(8;0iM|st%@n^1T7RBLy)+gjOVWIa z0;k&dQY+B|7GuQEaR8F(;6J#N4tMfl<$G?dD`C)gHTu*-Ai9s5h`yn`lX>8HMk>G$ zy-~LZePOg@Sy*k760D(ZE=~_eAsxZwEse=%BR)dlBmoj5o*+OL9pC`169sZnn#TZc zqh}$G?SFPG0Kft4@eG^rXhW+SX?;7mEd7*lLrtlsxxt&N?nYGIgc8C`u_|t)bwj!A z&4#MU_KSypn7A?DS4ukO1`N{;noZN?>Y8McYr|u<7lWdINQjL`AYwPX1lxTiWAb;& zjlk_kUaM|s*--TZ`TopJd#~V&_)Rb-oH0iLcY+MV{1ZLDg`WQ@Jjh(wMF2mI9cL1| z2w>+TiY3+}+W_2lYUJ-PZ=~1LpZ$J~yYqsBp4(0hZ;Y;wZe-W9k6!)atTT3DD|Hcc z#~n1#){U=^KV1DH;*9?Bn^Rk<8PFbcP=8zNtvut5o!?4LgYM|xlHID)o7t_@xdZ!o vKBLa)xo`4YskeHq+YiPcsBhuiRw@Sze;F!q`(*n1R_X?*U;GXorzhaQ4@@11 literal 0 HcmV?d00001 diff --git a/gcmpy/scripts/model.py b/gcmpy/scripts/model.py index bfa2fa3f..7f373228 100644 --- a/gcmpy/scripts/model.py +++ b/gcmpy/scripts/model.py @@ -3,7 +3,7 @@ from land import land from gocart import gocart from env import answerdict, linkx -from utility import envdict, pathdict +from utility import envdict, pathdict, color import math, os, shutil, tempfile, yaml from pathlib import Path from jinja2 import Environment, FileSystemLoader, StrictUndefined @@ -391,44 +391,106 @@ def mpistacksettings(self): # retrieve config from correlating mpi setting being used mpi_config = mpidict.get(envdict['mpi']) - print(mpi_config) - ####################################################################### # Create directories and copy files over ####################################################################### - def copy_files_into_exp(self, file_list[]): + # A little helper function for copying files and displaying the info to the user + def copy_helper(self, src, destination, filename): + shutil.copy(src, destination) + print(f"Creating {color.RED}{filename}{color.RESET} for Experiment: {answerdict['experiment_id'].q_answer}") + + def copy_files_into_exp(self, file_list): + print("\n\n\n") + for file in file_list: if file[-5:] == '.tmpl': - shutil.copy(f"{pathdict['GEOSgcm_App']}/{file}", f"{self.exp_dir}/{file[:-5]}") + self.copy_helper(f"{pathdict['GEOSgcm_App']}/{file}", f"{self.exp_dir}/{file[:-5]}", file) else: - shutil.copy(f"{pathdict['GEOSgcm_App']}/{file}", f"{self.exp_dir}/{file}") + self.copy_helper(f"{pathdict['GEOSgcm_App']}/{file}", f"{self.exp_dir}/{file}", file) + + # These files will be added if user chose to run coupled, regardless of ocean model selected. + if self.ocean.coupled == True: + self.copy_helper(f"{pathdict['install']}/coupled_diagnostics/g5lib/plotcon.j", f"{self.exp_dir}/plotcon.j", "plotcon.j") + self.copy_helper(f"{pathdict['install']}/coupled_diagnostics/g5lib/confon.py", f"{self.exp_dir}/__init__.py", "confon.py") + + if self.ocean.name == 'MOM5': + self.copy_helper(f"{pathdict['etc']}/MOM5/geos5/{self.ocean.IM}x{self.ocean.JM}/input.nml", f"{self.exp_dir}/input.nml", "input.nml") + self.copy_helper(f"{pathdict['etc']}/MOM5/geos5/{self.ocean.IM}x{self.ocean.JM}/diag_table", f"{self.exp_dir}/diag_table.nml", "diag_table") + self.copy_helper(f"{pathdict['etc']}/MOM5/geos5/{self.ocean.IM}x{self.ocean.JM}/field_table", f"{self.exp_dir}/field_table.nml", "field_table") + elif self.ocean.name == 'MOM6': + self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.IM}x{self.ocean.JM}/MOM_input", f"{self.exp_dir}/MOM_input", "MOM_input") + self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.IM}x{self.ocean.JM}/MOM_override", f"{self.exp_dir}/MOM_override", "MOM_override") + self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.IM}x{self.ocean.JM}/input.nml", f"{self.exp_dir}/input.nml", "input.nml") + self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.IM}x{self.ocean.JM}/diag_table", f"{self.exp_dir}/diag_table", "diag_table") + self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.IM}x{self.ocean.JM}/field_table", f"{self.exp_dir}/field_table", "field_table") + + if self.ocean.seaice_model == 'CICE6': + self.copy_helper(f"{pathdict['ect']}/CICE6/cice6_app/{self.ocean.IM}x{self.ocean.JM}/ice_in", f"{self.exp_dir}/ice_in") + + print(f"{color.GREEN}Done!{color.RESET}\n") + + + ####################################################################### + # Produce Final script and .rc files + ####################################################################### + + # THIS WHOLE SECTION IS WILDLY OUT OF DATE, HOWEVER I KEPT IT AS IT WAS + # IN THE ORIGINAL SCRIPT FOR NOW + def restarts(self): + # comment or un-comment restarts based on exp configuration + # --------------------------------------------------------- + rsnames = {'H2O': False, + 'MAM': False, + 'CARMA': False, + 'GMICHEM': False, + 'STRATCHEM': False} + rstypes = ['INTERNAL','IMPORT'] + + # Load Jinja2 template + with open(f"{answerdict['exp_dir'].q_answer}/AGCM.rc", "r") as file: + file_content = file.read() + #file = Template(file_content) + + # Template in a "#" if restart is set to false + for rst in rsnames: + for typ in rstypes: + rst_string = f"{rst}_{typ}" + comment = "#" if not rsnames[rst] else "" + file_content = file_content.replace(rst_string, f"{comment}{rst_string}") + + with open(f"{answerdict['exp_dir'].q_answer}/AGCM.rc", "w") as file: + file.write(file_content) + + + mymodel = model() mymodel.config_models() #mymodel.print_all_vars() mymodel.set_nodes() mymodel.set_stuff() -mymodel.create_dotfile(f"{os.environ.get('HOME')}/.HOMDIRroot", answerdict['home_dir'].q_answer) mymodel.create_dotfile(f"{os.environ.get('HOME')}/.EXPDIRroot", answerdict['exp_dir'].q_answer) mymodel.create_dotfile(f"{os.environ.get('HOME')}/.GROUProot", answerdict['group_root'].q_answer) mymodel.RC_setup() mymodel.mpistacksettings() -file_list['gcm_run.j', \ - 'gcm_post.j', \ - 'gcm_archive.j', \ - 'gcm_regress.j', \ - 'gcm_plot.tmpl', \ - 'gcm_quickplot.csh', \ - 'gcm_moveplot.j', \ - 'gcm_forecast.tmpl', \ - 'gcm_forecast.setup', \ - 'gcm_emip.setup', \ - 'CAP.rc.tmpl', \ - 'AGCM.rc.tmpl', \ - 'HISTORY.rc.tmpl', \ - 'logging.yaml', \ - 'fvcore_layout.rc'] +file_list = ['gcm_run.j', + 'gcm_post.j', + 'gcm_archive.j', + 'gcm_regress.j', + 'gcm_plot.tmpl', + 'gcm_quickplot.csh', + 'gcm_moveplot.j', + 'gcm_forecast.tmpl', + 'gcm_forecast.setup', + 'gcm_emip.setup', + 'CAP.rc.tmpl', + 'AGCM.rc.tmpl', + 'HISTORY.rc.tmpl', + 'logging.yaml', + 'fvcore_layout.rc'] mymodel.copy_files_into_exp(file_list) +mymodel.restarts() + diff --git a/gcmpy/scripts/ocean.py b/gcmpy/scripts/ocean.py index bf53153f..2f832bce 100644 --- a/gcmpy/scripts/ocean.py +++ b/gcmpy/scripts/ocean.py @@ -6,6 +6,7 @@ class ocean: def __init__(self): self.name = answerdict["OM_name"].q_answer self.coupled = answerdict["OM_coupled"].q_answer + self.seaice_model = answerdict["OM_seaice_model"].q_answer self.gridtype = "" self.gridtype_abrv = "" self.gridname = "" diff --git a/gcmpy/yaml/directory_setup.yaml b/gcmpy/yaml/directory_setup.yaml index c4e7456e..4edbcfe2 100644 --- a/gcmpy/yaml/directory_setup.yaml +++ b/gcmpy/yaml/directory_setup.yaml @@ -5,13 +5,6 @@ history_template: default_answer: '' follows_up: '' -home_dir: - type: 'path' - prompt: 'Enter Desired Location for the HOME Directory (to contain scripts and RC files):' - choices: '' - default_answer: '' - follows_up: '' - exp_dir: type: 'path' prompt: 'Enter Desired Location for the EXPERIMENT Directory (to contain model output and restart files):' diff --git a/gcmpy/yaml/ocean_model.yaml b/gcmpy/yaml/ocean_model.yaml index 57e321dc..91c69f32 100644 --- a/gcmpy/yaml/ocean_model.yaml +++ b/gcmpy/yaml/ocean_model.yaml @@ -63,4 +63,12 @@ OM_vertical_res: follows_up: - ['OM_MOM_horizontal_res', ['any']] - ['OM_MIT_horizontal_res', ['any']] -# ---------------------------------------------------------------------------------------------- \ No newline at end of file + +OM_seaice_model: + type: 'select' + prompt: 'Choose a seaice model:' + choices: ['CICE4', 'CICE6'] + default_answer: '' + follows_up: + - ['OM_coupled', ['True']] +# ---------------------------------------------------------------------------------------------- From 230641bc44e92aac8dc6a160674d251c476412e9 Mon Sep 17 00:00:00 2001 From: Shayon Shakoorzadeh Date: Tue, 9 Jul 2024 10:55:04 -0400 Subject: [PATCH 10/11] stashing for a moment --- AGCM.rc.tmpl | 388 +++++++++++++------------- CAP.rc.tmpl | 12 +- HISTORY.rc.tmpl | 136 +++++----- fvcore_layout.rc | 14 +- gcm_archive.j | 22 +- gcm_emip.setup | 2 +- gcm_forecast.setup | 12 +- gcm_forecast.tmpl | 92 +++---- gcm_moveplot.j | 30 +-- gcm_plot.tmpl | 32 +-- gcm_post.j | 34 +-- gcm_regress.j | 232 ++++++++-------- gcm_run.j | 658 ++++++++++++++++++++++----------------------- gcmpy/jinjafy.py | 250 +++++++++++++++++ gcmpy/sync.sh | 5 + 15 files changed, 1087 insertions(+), 832 deletions(-) create mode 100755 gcmpy/jinjafy.py diff --git a/AGCM.rc.tmpl b/AGCM.rc.tmpl index 162aa713..49e1a345 100644 --- a/AGCM.rc.tmpl +++ b/AGCM.rc.tmpl @@ -1,54 +1,54 @@ # Atmospheric Model Configuration Parameters # ------------------------------------------ -NX: @NX -NY: @NY +NX: {{ NX }} +NY: {{ NY }} -IOSERVER_NODES: @NUM_OSERVER_NODES -NUM_BACKEND_PES: @NUM_BACKEND_PES +IOSERVER_NODES: {{ NUM_OSERVER_NODES }} +NUM_BACKEND_PES: {{ NUM_BACKEND_PES }} -AGCM_IM: @AGCM_IM -AGCM_JM: @AGCM_JM -AGCM_LM: @AGCM_LM -AGCM_GRIDNAME: @AGCM_GRIDNAME +AGCM_IM: {{ AGCM_IM }} +AGCM_JM: {{ AGCM_JM }} +AGCM_LM: {{ AGCM_LM }} +AGCM_GRIDNAME: {{ AGCM_GRIDNAME }} -AGCM.GRID_TYPE: @GRID_TYPE -AGCM.GRIDNAME: @AGCM_GRIDNAME -@CUBE_AGCM AGCM.NF: @AGCM_NF -AGCM.LM: @AGCM_LM -AGCM.IM_WORLD: @AGCM_IM -@LATLON_AGCM AGCM.JM_WORLD: @AGCM_JM +AGCM.GRID_TYPE: {{ GRID_TYPE }} +AGCM.GRIDNAME: {{ AGCM_GRIDNAME }} +{{ CUBE_AGCM }} AGCM.NF: {{ AGCM_NF }} +AGCM.LM: {{ AGCM_LM }} +AGCM.IM_WORLD: {{ AGCM_IM }} +{{ LATLON_AGCM }} AGCM.JM_WORLD: {{ AGCM_JM }} -@CONUS AGCM.STRETCH_FACTOR: @STRETCH_FACTOR -@CONUS AGCM.TARGET_LAT: 39.5 -@CONUS AGCM.TARGET_LON: -98.35 +{{ CONUS }} AGCM.STRETCH_FACTOR: {{ STRETCH_FACTOR }} +{{ CONUS }} AGCM.TARGET_LAT: 39.5 +{{ CONUS }} AGCM.TARGET_LON: -98.35 ########################################################### # long physics DT options # ---------------------------------------- ADJUST_DT: .FALSE. CHEMISTRY_RUN_AT_INTERVAL_START: .TRUE. -CHEMISTRY_DT: @LONG_DT -GOCART_DT: @LONG_DT -HEMCO_DT: @LONG_DT -GF_DT: @LONG_DT -UW_DT: @LONG_DT +CHEMISTRY_DT: {{ LONG_DT }} +GOCART_DT: {{ LONG_DT }} +HEMCO_DT: {{ LONG_DT }} +GF_DT: {{ LONG_DT }} +UW_DT: {{ LONG_DT }} ########################################################### ########################################################### # dynamics options # ---------------------------------------- -DYCORE: @DYCORE -@FV_HWT FV3_CONFIG: HWT +DYCORE: {{ DYCORE }} +{{ FV_HWT }} FV3_CONFIG: HWT AdvCore_Advection: 0 ########################################################### ########################################################### # radiation options # ---------------------------------------- - SOLAR_DT: @SOLAR_DT - IRRAD_DT: @IRRAD_DT -SATSIM_DT: @IRRAD_DT + SOLAR_DT: {{ SOLAR_DT }} + IRRAD_DT: {{ IRRAD_DT }} +SATSIM_DT: {{ IRRAD_DT }} SOLARAvrg: 0 IRRADAvrg: 0 EOT: .TRUE. @@ -58,21 +58,21 @@ ORBIT_ANAL2B: .TRUE. ########################################################### # BACM_1M microphysics options # ---------------------------------------- -@BACM_1M_CLDMICR_OPTION: BACM_1M +{{ BACM_1M_ }}CLDMICR_OPTION: BACM_1M ########################################################### ########################################################### # GFDL_1M microphysics options # ---------------------------------------- -@GFDL_1M_CLDMICR_OPTION: GFDL_1M -@GFDL_1M_HYDROSTATIC: @GFDL_HYDRO +{{ GFDL_1M_ }}CLDMICR_OPTION: GFDL_1M +{{ GFDL_1M_ }}HYDROSTATIC: {{ GFDL_HYDRO }} ########################################################### ########################################################### # MGB2_2M microphysics options # ---------------------------------------- -@MGB2_2M_CLDMICR_OPTION: MGB2_2M -@MGB2_2M_MGVERSION: @MGVERSION +{{ MGB2_2M_ }}CLDMICR_OPTION: MGB2_2M +{{ MGB2_2M_ }}MGVERSION: @MGVERSION ########################################################### ########################################################### @@ -84,7 +84,7 @@ SHALLOW_OPTION: UW ########################################################### # convection scheme options # ---------------------------------------- -CONVPAR_OPTION: @CONVPAR_OPTION +CONVPAR_OPTION: {{ CONVPAR_OPTION }} #USE_GF2020: 1 # Convective plumes to be activated (1 true, 0 false) DEEP: 1 @@ -102,42 +102,42 @@ CLOSURE_CONGESTUS: 3 ########################################################### # TRB/GWD Parameterization # ------------------------ -@FV_HWT RAYLEIGH_TAU1: 0.0 +{{ FV_HWT }} RAYLEIGH_TAU1: 0.0 ########################################################### -@HIST_GOCART########################################################### -@HIST_GOCART# Enable wet scavenging -@HIST_GOCART# NOTE: this section should be commented out when running gocart.data -@HIST_GOCARTMCHEMTRI_increments:: -@HIST_GOCARTDU::DU default -@HIST_GOCARTSS::SS default -@HIST_GOCARTSU::SO4 default -@HIST_GOCARTCA.bc::CA.bcphilic default -@HIST_GOCARTCA.br::CA.brphilic default -@HIST_GOCARTCA.oc::CA.ocphilic default -@HIST_GOCARTNI::NO3an1 "NI::NO3an2,NI::NO3an3" -@HIST_GOCARTPCHEM::OX default -@HIST_GOCART:: -@HIST_GOCART########################################################### + {{ HIST_GOCART }}########################################################### + {{ HIST_GOCART }}# Enable wet scavenging + {{ HIST_GOCART }}# NOTE: this section should be commented out when running gocart.data + {{ HIST_GOCART }}MCHEMTRI_increments:: + {{ HIST_GOCART }}DU::DU default + {{ HIST_GOCART }}SS::SS default + {{ HIST_GOCART }}SU::SO4 default + {{ HIST_GOCART }}CA.bc::CA.bcphilic default + {{ HIST_GOCART }}CA.br::CA.brphilic default + {{ HIST_GOCART }}CA.oc::CA.ocphilic default + {{ HIST_GOCART }}NI::NO3an1 "NI::NO3an2,NI::NO3an3" + {{ HIST_GOCART }}PCHEM::OX default + {{ HIST_GOCART }}:: + {{ HIST_GOCART }}########################################################### ########################################################### # Ocean Model Configuration Parameters # ------------------------------------ -OGCM.GRID_TYPE: @OGCM_GRID_TYPE -OGCM.GRIDNAME: @OGCM_GRIDNAME -OGCM.IM_WORLD: @OGCM_IM -OGCM.JM_WORLD: @OGCM_JM -OGCM.LM: @OGCM_LM -@COUPLED OGCM.NX: @OGCM_NX -@COUPLED OGCM.NY: @OGCM_NY -@MOM5 OGCM.GRIDSPEC: MAPL_Tripolar.nc -@MOM6 OGCM.GRIDSPEC: MAPL_Tripolar.nc -@MIT OGCM.GRIDSPEC: mit.ascii -@MIT OCEAN_DIR: mitocean_run -@DATAOCEAN @LATLON_OGCM OGCM.POLE: 'PE' -@DATAOCEAN @LATLON_OGCM OGCM.DATELINE: 'DE' -@DATAOCEAN @CUBE_OGCM OGCM.NF: @OGCM_NF -OGCM_RUN_DT: @OCEAN_DT +OGCM.GRID_TYPE: {{ OGCM_GRID_TYPE }} +OGCM.GRIDNAME: {{ OGCM_GRIDNAME }} +OGCM.IM_WORLD: {{ OGCM_IM }} +OGCM.JM_WORLD: {{ OGCM_JM }} +OGCM.LM: {{ OGCM_LM }} +{{ COUPLED }} OGCM.NX: {{ OGCM_NX }} +{{ COUPLED }} OGCM.NY: {{ OGCM_NY }} +{{ MOM5 }} OGCM.GRIDSPEC: MAPL_Tripolar.nc +{{ MOM6 }} OGCM.GRIDSPEC: MAPL_Tripolar.nc +{{ MIT }} OGCM.GRIDSPEC: mit.ascii +{{ MIT }} OCEAN_DIR: mitocean_run +{{ DATAOCEAN }} {{ LATLON_OGCM }} OGCM.POLE: 'PE' +{{ DATAOCEAN }} {{ LATLON_OGCM }} OGCM.DATELINE: 'DE' +{{ DATAOCEAN }} {{ CUBE_OGCM }} OGCM.NF: {{ OGCM_NF }} +OGCM_RUN_DT: {{ OCEAN_DT }} ########################################################### ############################################################################################## #### #### @@ -165,7 +165,7 @@ PRINTRC: 1 #--------------------------------------------------------------- PARALLEL_READFORCING: 0 # The num_readers should be set to multiple of 6 if READ_RESTART_BY_FACE is set to YES -NUM_READERS: @NUM_READERS +NUM_READERS: {{ NUM_READERS }} # The num_writers should be set to multiple of 6 if WRITE_RESTART_BY_FACE is set to YES NUM_WRITERS: 1 @@ -176,16 +176,16 @@ READ_RESTART_BY_FACE: NO WRITE_RESTART_BY_FACE: NO # Write restarts by oserver (Default: NO) -WRITE_RESTART_BY_OSERVER: @RESTART_BY_OSERVER +WRITE_RESTART_BY_OSERVER: {{ RESTART_BY_OSERVER }} # Active observer when desired # ---------------------------- -BKG_FREQUENCY: @OBSERVER_FRQ +BKG_FREQUENCY: {{ OBSERVER_FRQ }} # Resolution of state passed to atmos analysis # -------------------------------------------- -ANA_GRIDNAME: PC@AGCM_IMx@AGCM_JM-DC -ANA_LM: @AGCM_LM +ANA_GRIDNAME: PC{{ AGCM_IM }}x{{ AGCM_JM }}-DC +ANA_LM: {{ AGCM_LM }} # Restart Record Parameters # ------------------------- @@ -317,7 +317,7 @@ RECORD_REF_TIME: >>>REFTIME<<< >>>FCSTIME<<< # Typical MERRA-2 Regular REPLAY Configuration # -------------------------------------------- #M2 REPLAY_ANA_EXPID: MERRA-2 -#M2 REPLAY_ANA_LOCATION: @M2_REPLAY_ANA_LOCATION +#M2 REPLAY_ANA_LOCATION: {{ M2_REPLAY_ANA_LOCATION }} #M2 REPLAY_MODE: Regular #M2 REPLAY_FILE: ana/MERRA2_all/Y%y4/M%m2/MERRA2.ana.eta.%y4%m2%d2_%h2z.nc4 # ----------------------------------------------------------------- @@ -345,23 +345,23 @@ RECORD_REF_TIME: >>>REFTIME<<< >>>FCSTIME<<< # Typical Exact REPLAY Configuration # ---------------------------------- # REPLAY_MODE: Exact -# REPLAY_FILE: rs/Y%y4/M%m2/@REPLAY_ANA_EXPID.agcm_import_rst.%y4%m2%d2_%h2z.bin -# REPLAY_FILE09: rs/Y%y4/M%m2/@REPLAY_ANA_EXPID.agcm09_import_rst.%y4%m2%d2_%h2z.bin +# REPLAY_FILE: rs/Y%y4/M%m2/{{ REPLAY_ANA_EXPID }}.agcm_import_rst.%y4%m2%d2_%h2z.bin +# REPLAY_FILE09: rs/Y%y4/M%m2/{{ REPLAY_ANA_EXPID }}.agcm09_import_rst.%y4%m2%d2_%h2z.bin # REPLAY_FILE_FREQUENCY: 10800 # REPLAY_FILE_REFERENCE_TIME: 000000 # Typical Regular REPLAY Configuration # ------------------------------------ # REPLAY_MODE: Regular -# REPLAY_FILE: ana/Y%y4/M%m2/@REPLAY_ANA_EXPID.ana.eta.%y4%m2%d2_%h2z.nc4 -# REPLAY_FILE09: ana/Y%y4/M%m2/@REPLAY_ANA_EXPID.ana09.eta.%y4%m2%d2_%h2z.nc4 +# REPLAY_FILE: ana/Y%y4/M%m2/{{ REPLAY_ANA_EXPID }}.ana.eta.%y4%m2%d2_%h2z.nc4 +# REPLAY_FILE09: ana/Y%y4/M%m2/{{ REPLAY_ANA_EXPID }}.ana09.eta.%y4%m2%d2_%h2z.nc4 # REPLAY_FILE_FREQUENCY: 10800 # REPLAY_FILE_REFERENCE_TIME: 000000 # 4DIAU (recreate analysis tendency on the fly) # --------------------------------------------- -#>>>4DIAUDAS<<< AINC_FILE: @EXPID.ana.eta.%y4%m2%d2_%h2%n2z.nc4 +#>>>4DIAUDAS<<< AINC_FILE: {{ EXPID }}.ana.eta.%y4%m2%d2_%h2%n2z.nc4 #>>>4DIAUDAS<<< NUDGE_STATE: YES ##>>>4DIAUDAS<<< TAUANL: 3600 #>>>4DIAUDAS<<< AGCM_IMPORT_RESTART_FILE: agcm_import_rst.>>>ANA0YYYYMMDDHHMN<<>>REFTIME<<< >>>FCSTIME<<< # 1 : Catchment # 2 : CatchmentCNCLM40 # ------------------------------------------------------------ -LSM_CHOICE: @LSM_CHOICE +LSM_CHOICE: {{ LSM_CHOICE }} # Apply increments from LDAS? # 0 : no (default) @@ -570,227 +570,227 @@ LSM_CHOICE: @LSM_CHOICE >>>FORCEGCM<<>>FORCEGCM<<>>FORCEGCM<<>>FORCEGCM<<>>FORCEGCM<<>>FORCEGCM<<>>NCSUFFIX<<<' , inst3_3d_asm_Np-.mode: 'instantaneous' , - inst3_3d_asm_Np-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst3_3d_asm_Np-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst3_3d_asm_Np-.frequency: 030000 , inst3_3d_asm_Np-.duration: 030000 , inst3_3d_asm_Np-.end_date: >>>IOEDATE<<< , @@ -209,7 +209,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst3_3d_asm_Nv-.regrid_method: 'BILINEAR_MONOTONIC' , inst3_3d_asm_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , inst3_3d_asm_Nv-.mode: 'instantaneous' , - inst3_3d_asm_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst3_3d_asm_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst3_3d_asm_Nv-.frequency: 030000 , inst3_3d_asm_Nv-.duration: 030000 , inst3_3d_asm_Nv-.end_date: >>>IOEDATE<<< , @@ -240,7 +240,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst1_2d_asm_Nx-.regrid_method: 'BILINEAR_MONOTONIC' , inst1_2d_asm_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , inst1_2d_asm_Nx-.mode: 'instantaneous' , - inst1_2d_asm_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst1_2d_asm_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst1_2d_asm_Nx-.frequency: 010000 , inst1_2d_asm_Nx-.duration: 010000 , inst1_2d_asm_Nx-.end_date: >>>IOEDATE<<< , @@ -283,7 +283,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_cld_Nv-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_cld_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_cld_Nv-.mode: 'time-averaged' , - tavg3_3d_cld_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_cld_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_cld_Nv-.frequency: 030000 , tavg3_3d_cld_Nv-.duration: 030000 , tavg3_3d_cld_Nv-.end_date: >>>IOEDATE<<< , @@ -340,7 +340,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_mst_Ne-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_mst_Ne-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_mst_Ne-.mode: 'time-averaged' , - tavg3_3d_mst_Ne-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_mst_Ne-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_mst_Ne-.frequency: 030000 , tavg3_3d_mst_Ne-.duration: 030000 , tavg3_3d_mst_Ne-.end_date: >>>IOEDATE<<< , @@ -361,7 +361,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_mst_Nv-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_mst_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_mst_Nv-.mode: 'time-averaged' , - tavg3_3d_mst_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_mst_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_mst_Nv-.frequency: 030000 , tavg3_3d_mst_Nv-.duration: 030000 , tavg3_3d_mst_Nv-.end_date: >>>IOEDATE<<< , @@ -419,7 +419,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_rad_Nv-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_rad_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_rad_Nv-.mode: 'time-averaged' , - tavg3_3d_rad_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_rad_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_rad_Nv-.frequency: 030000 , tavg3_3d_rad_Nv-.duration: 030000 , tavg3_3d_rad_Nv-.end_date: >>>IOEDATE<<< , @@ -463,7 +463,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_trb_Ne-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_trb_Ne-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_trb_Ne-.mode: 'time-averaged' , - tavg3_3d_trb_Ne-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_trb_Ne-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_trb_Ne-.frequency: 030000 , tavg3_3d_trb_Ne-.duration: 030000 , tavg3_3d_trb_Ne-.end_date: >>>IOEDATE<<< , @@ -519,7 +519,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_tdt_Nv-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_tdt_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_tdt_Nv-.mode: 'time-averaged' , - tavg3_3d_tdt_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_tdt_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_tdt_Nv-.frequency: 030000 , tavg3_3d_tdt_Nv-.duration: 030000 , tavg3_3d_tdt_Nv-.end_date: >>>IOEDATE<<< , @@ -573,7 +573,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_udt_Nv-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_udt_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_udt_Nv-.mode: 'time-averaged' , - tavg3_3d_udt_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_udt_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_udt_Nv-.frequency: 030000 , tavg3_3d_udt_Nv-.duration: 030000 , tavg3_3d_udt_Nv-.end_date: >>>IOEDATE<<< , @@ -621,7 +621,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_qdt_Nv-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_qdt_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_qdt_Nv-.mode: 'time-averaged' , - tavg3_3d_qdt_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_qdt_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_qdt_Nv-.frequency: 030000 , tavg3_3d_qdt_Nv-.duration: 030000 , tavg3_3d_qdt_Nv-.end_date: >>>IOEDATE<<< , @@ -679,7 +679,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_odt_Nv-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_odt_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_odt_Nv-.mode: 'time-averaged' , - tavg3_3d_odt_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_odt_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_odt_Nv-.frequency: 030000 , tavg3_3d_odt_Nv-.duration: 030000 , tavg3_3d_odt_Nv-.end_date: >>>IOEDATE<<< , @@ -725,7 +725,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_lsf_Nv-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_lsf_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_lsf_Nv-.mode: 'time-averaged' , - tavg3_3d_lsf_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_lsf_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_lsf_Nv-.frequency: 030000 , tavg3_3d_lsf_Nv-.duration: 030000 , tavg3_3d_lsf_Nv-.end_date: >>>IOEDATE<<< , @@ -743,7 +743,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_lsf_Ne-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_lsf_Ne-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_lsf_Ne-.mode: 'time-averaged' , - tavg3_3d_lsf_Ne-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_lsf_Ne-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_lsf_Ne-.frequency: 030000 , tavg3_3d_lsf_Ne-.duration: 030000 , tavg3_3d_lsf_Ne-.end_date: >>>IOEDATE<<< , @@ -759,7 +759,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg1_2d_slv_Nx-.regrid_method: 'BILINEAR_MONOTONIC' , tavg1_2d_slv_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg1_2d_slv_Nx-.mode: 'time-averaged' , - tavg1_2d_slv_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg1_2d_slv_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg1_2d_slv_Nx-.frequency: 010000 , tavg1_2d_slv_Nx-.duration: 010000 , tavg1_2d_slv_Nx-.end_date: >>>IOEDATE<<< , @@ -811,7 +811,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg1_2d_flx_Nx-.regrid_method: 'BILINEAR_MONOTONIC' , tavg1_2d_flx_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg1_2d_flx_Nx-.mode: 'time-averaged' , - tavg1_2d_flx_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg1_2d_flx_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg1_2d_flx_Nx-.frequency: 010000 , tavg1_2d_flx_Nx-.duration: 010000 , tavg1_2d_flx_Nx-.end_date: >>>IOEDATE<<< , @@ -869,7 +869,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg1_2d_rad_Nx-.regrid_method: 'BILINEAR_MONOTONIC' , tavg1_2d_rad_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg1_2d_rad_Nx-.mode: 'time-averaged' , - tavg1_2d_rad_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg1_2d_rad_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg1_2d_rad_Nx-.frequency: 010000 , tavg1_2d_rad_Nx-.duration: 010000 , tavg1_2d_rad_Nx-.end_date: >>>IOEDATE<<< , @@ -918,7 +918,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg1_2d_lnd_Nx-.descr: '2d,1-Hourly,Time-Averaged,Single-Level,Assimilation,Land Surface Diagnostics' , tavg1_2d_lnd_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg1_2d_lnd_Nx-.mode: 'time-averaged' , - tavg1_2d_lnd_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg1_2d_lnd_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg1_2d_lnd_Nx-.frequency: 010000 , tavg1_2d_lnd_Nx-.duration: 010000 , tavg1_2d_lnd_Nx-.end_date: >>>IOEDATE<<< , @@ -980,7 +980,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' # tavg1_2d_lfo_Nx-.nbits: 10, tavg1_2d_lfo_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg1_2d_lfo_Nx-.mode: 'time-averaged' , - tavg1_2d_lfo_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg1_2d_lfo_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg1_2d_lfo_Nx-.frequency: 010000 , tavg1_2d_lfo_Nx-.duration: 010000 , tavg1_2d_lfo_Nx-.end_date: >>>IOEDATE<<< , @@ -1000,7 +1000,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' # inst1_2d_lfo_Nx-.nbits: 10, inst1_2d_lfo_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , inst1_2d_lfo_Nx-.mode: 'instantaneous' , - inst1_2d_lfo_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst1_2d_lfo_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst1_2d_lfo_Nx-.frequency: 010000 , inst1_2d_lfo_Nx-.duration: 010000 , inst1_2d_lfo_Nx-.end_date: >>>IOEDATE<<< , @@ -1019,7 +1019,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg1_2d_ocn_Nx-.regrid_method: 'BILINEAR_MONOTONIC' , tavg1_2d_ocn_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg1_2d_ocn_Nx-.mode: 'time-averaged' , - tavg1_2d_ocn_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg1_2d_ocn_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg1_2d_ocn_Nx-.frequency: 010000 , tavg1_2d_ocn_Nx-.duration: 010000 , tavg1_2d_ocn_Nx-.end_date: >>>IOEDATE<<< , @@ -1077,7 +1077,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst1_2d_ocn_Nx-.regrid_method: 'BILINEAR_MONOTONIC' , inst1_2d_ocn_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , inst1_2d_ocn_Nx-.mode: 'instantaneous' , - inst1_2d_ocn_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst1_2d_ocn_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst1_2d_ocn_Nx-.frequency: 010000 , inst1_2d_ocn_Nx-.duration: 010000 , inst1_2d_ocn_Nx-.end_date: >>>IOEDATE<<< , @@ -1298,7 +1298,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' gaas_bkg.sfc.frequency: 030000 , gaas_bkg.sfc.duration: 030000 , gaas_bkg.sfc.mode: 'instantaneous' , - gaas_bkg.sfc.grid_label: PC@HIST_IMx@HIST_JM-DC , + gaas_bkg.sfc.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , gaas_bkg.sfc.fields: 'U10M;V10M' , 'SURFACE' , 'DUEXTTAU' , 'DU' , 'SSEXTTAU' , 'SS' , @@ -1320,7 +1320,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst3_2d_gas_Nx-.frequency: 030000 , inst3_2d_gas_Nx-.duration: 030000 , inst3_2d_gas_Nx-.mode: 'instantaneous' , - inst3_2d_gas_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst3_2d_gas_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst3_2d_gas_Nx-.fields: 'AODANA' , 'GAAS' , 'AODINC' , 'GAAS' , :: @@ -1338,7 +1338,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst3_3d_gas_Nv-.frequency: 030000 , inst3_3d_gas_Nv-.duration: 030000 , inst3_3d_gas_Nv-.mode: 'instantaneous' , - inst3_3d_gas_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst3_3d_gas_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst3_3d_gas_Nv-.fields: 'DELP' , 'DYN' , 'delp' , 'AIRDENS' , 'CHEMENV' , 'DUINC' , 'GAAS' , @@ -1354,7 +1354,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst1_2d_aer_Nx-.descr: '2d,1-Hourly,Instantaneous,Single-Level,Analysis,Aerosol Diagnostics' , inst1_2d_aer_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , inst1_2d_aer_Nx-.mode: 'instantaneous' , - inst1_2d_aer_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst1_2d_aer_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst1_2d_aer_Nx-.conservative: 1 , inst1_2d_aer_Nx-.splitField: 1 , inst1_2d_aer_Nx-.frequency: 010000 , @@ -1467,7 +1467,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg1_2d_adg_Nx-.descr: '2d,1-Hourly,Time-Averaged,Single-Level,Analysis,Aerosol Diagnostics (extended)' , tavg1_2d_adg_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg1_2d_adg_Nx-.mode: 'time-averaged' , - tavg1_2d_adg_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg1_2d_adg_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg1_2d_adg_Nx-.frequency: 010000 , tavg1_2d_adg_Nx-.duration: 010000 , tavg1_2d_adg_Nx-.end_date: >>>IOEDATE<<< , @@ -1557,7 +1557,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_2d_chm_Nx-.descr: '2d,3-Hourly,Time-Averaged,Single-Level,Analysis,Constituent Diagnostics' , tavg3_2d_chm_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_2d_chm_Nx-.mode: 'time-averaged' , - tavg3_2d_chm_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_2d_chm_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_2d_chm_Nx-.frequency: 030000 , tavg3_2d_chm_Nx-.duration: 030000 , tavg3_2d_chm_Nx-.end_date: >>>IOEDATE<<< , @@ -1584,7 +1584,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst1_2d_hwl_Nx-.descr: '2d,1-Hourly,Instantaneous,Single-Level,Analysis,Hyperwall' , inst1_2d_hwl_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , inst1_2d_hwl_Nx-.mode: 'instantaneous' , - inst1_2d_hwl_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst1_2d_hwl_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst1_2d_hwl_Nx-.splitField: 1 , inst1_2d_hwl_Nx-.frequency: 010000 , inst1_2d_hwl_Nx-.duration: 010000 , @@ -1602,7 +1602,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst3_3d_aer_Nv-.descr: '3d,3-Hourly,Instantaneous,Model-Level,Analysis,Aerosol Concentrations' , inst3_3d_aer_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , inst3_3d_aer_Nv-.mode: 'instantaneous' , - inst3_3d_aer_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst3_3d_aer_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst3_3d_aer_Nv-.frequency: 030000 , inst3_3d_aer_Nv-.duration: 030000 , inst3_3d_aer_Nv-.ref_time: 000000 , @@ -1655,7 +1655,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst3_3d_chm_Nv-.descr: '3d,3-Hourly,Instantaneous,Model-Level,Analysis,Chemistry' , inst3_3d_chm_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , inst3_3d_chm_Nv-.mode: 'instantaneous' , - inst3_3d_chm_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst3_3d_chm_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst3_3d_chm_Nv-.frequency: 030000 , inst3_3d_chm_Nv-.duration: 030000 , inst3_3d_chm_Nv-.ref_time: 000000 , @@ -1675,7 +1675,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst3_3d_tag_Nv-.descr: '3d,3-Hourly,Instantaneous,Model-Level,Analysis,Tag Tracers' , inst3_3d_tag_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , inst3_3d_tag_Nv-.mode: 'instantaneous' , - inst3_3d_tag_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst3_3d_tag_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst3_3d_tag_Nv-.frequency: 030000 , inst3_3d_tag_Nv-.duration: 030000 , inst3_3d_tag_Nv-.ref_time: 000000 , @@ -1704,7 +1704,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_2d_tag_Nx-.descr: '2d,3-Hourly,Time-Averaged,Single-Level,Analysis,Constituent Diagnostics' , tavg3_2d_tag_Nx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_2d_tag_Nx-.mode: 'time-averaged' , - tavg3_2d_tag_Nx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_2d_tag_Nx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_2d_tag_Nx-.frequency: 030000 , tavg3_2d_tag_Nx-.duration: 030000 , tavg3_2d_tag_Nx-.end_date: >>>IOEDATE<<< , @@ -1773,7 +1773,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_ctm_Cv-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_ctm_Cv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_ctm_Cv-.mode: 'time-averaged' , - tavg3_3d_ctm_Cv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_ctm_Cv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_ctm_Cv-.frequency: 030000 , tavg3_3d_ctm_Cv-.duration: 030000 , tavg3_3d_ctm_Cv-.end_date: >>>IOEDATE<<< , @@ -1804,7 +1804,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_ctm_Ce-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_ctm_Ce-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_ctm_Ce-.mode: 'time-averaged' , - tavg3_3d_ctm_Ce-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_ctm_Ce-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_ctm_Ce-.frequency: 030000 , tavg3_3d_ctm_Ce-.duration: 030000 , tavg3_3d_ctm_Ce-.end_date: >>>IOEDATE<<< , @@ -1825,7 +1825,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_2d_ctm_Cx-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_2d_ctm_Cx-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_2d_ctm_Cx-.mode: 'time-averaged' , - tavg3_2d_ctm_Cx-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_2d_ctm_Cx-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_2d_ctm_Cx-.frequency: 030000 , tavg3_2d_ctm_Cx-.duration: 030000 , tavg3_2d_ctm_Cx-.end_date: >>>IOEDATE<<< , @@ -1869,7 +1869,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_nav_Nv-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_nav_Nv-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_nav_Nv-.mode: 'time-averaged' , - tavg3_3d_nav_Nv-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_nav_Nv-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_nav_Nv-.frequency: 030000 , tavg3_3d_nav_Nv-.duration: 030000 , tavg3_3d_nav_Nv-.end_date: >>>IOEDATE<<< , @@ -1885,7 +1885,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_nav_Ne-.regrid_method: 'BILINEAR_MONOTONIC' , tavg3_3d_nav_Ne-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg3_3d_nav_Ne-.mode: 'time-averaged' , - tavg3_3d_nav_Ne-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_nav_Ne-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_nav_Ne-.frequency: 030000 , tavg3_3d_nav_Ne-.duration: 030000 , tavg3_3d_nav_Ne-.end_date: >>>IOEDATE<<< , @@ -1900,7 +1900,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_trc_Np-.frequency: 030000 , tavg3_3d_trc_Np-.duration: 030000 , tavg3_3d_trc_Np-.mode: 'time-averaged' , - tavg3_3d_trc_Np-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_trc_Np-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_trc_Np-.vscale: 100.0 , tavg3_3d_trc_Np-.vunit: 'hPa' , tavg3_3d_trc_Np-.vvars: 'log(PLE)' , 'DYN' , @@ -1926,7 +1926,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' vtx.mix.end_date: >>>IOEBKGD<<< , vtx.mix.end_time: >>>IOEBKGT<<< , vtx.mix.mode: 'instantaneous' , - vtx.mix.grid_label: PC@HIST_IMx@HIST_JM-DC , + vtx.mix.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , vtx.mix.fields: 'PHIS' , 'AGCM' , 'SLP' , 'DYN' , 'PS' , 'DYN' , @@ -1953,7 +1953,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' vtx.prs.end_date: >>>IOEBKGD<<< , vtx.prs.end_time: >>>IOEBKGT<<< , vtx.prs.mode: 'instantaneous' , - vtx.prs.grid_label: PC@HIST_IMx@HIST_JM-DC , + vtx.prs.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , vtx.prs.vscale: 100.0 , vtx.prs.vunit: 'hPa' , vtx.prs.vvars: 'log(PLE)' , 'DYN' , @@ -2032,7 +2032,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg2_3d_aiau_Np+-.regrid_method: 'BILINEAR_MONOTONIC' , tavg2_3d_aiau_Np+-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg2_3d_aiau_Np+-.mode: 'time-averaged' , - tavg2_3d_aiau_Np+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg2_3d_aiau_Np+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg2_3d_aiau_Np+-.frequency: 020000 , tavg2_3d_aiau_Np+-.duration: 020000 , tavg2_3d_aiau_Np+-.reftime: 010000 , @@ -2069,7 +2069,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst3_3d_asm_Np+-.regrid_method: 'BILINEAR_MONOTONIC' , inst3_3d_asm_Np+-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , inst3_3d_asm_Np+-.mode: 'instantaneous' , - inst3_3d_asm_Np+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst3_3d_asm_Np+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst3_3d_asm_Np+-.frequency: 030000 , inst3_3d_asm_Np+-.duration: 030000 , inst3_3d_asm_Np+-.ref_date: >>>IOBBKGD<<< , @@ -2104,7 +2104,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst3_3d_asm_Nv+-.regrid_method: 'BILINEAR_MONOTONIC' , inst3_3d_asm_Nv+-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , inst3_3d_asm_Nv+-.mode: 'instantaneous' , - inst3_3d_asm_Nv+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst3_3d_asm_Nv+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst3_3d_asm_Nv+-.frequency: 030000 , inst3_3d_asm_Nv+-.duration: 030000 , inst3_3d_asm_Nv+-.ref_date: >>>IOBBKGD<<< , @@ -2135,7 +2135,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg1_2d_slv_Nx+-.regrid_method: 'BILINEAR_MONOTONIC' , tavg1_2d_slv_Nx+-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg1_2d_slv_Nx+-.mode: 'time-averaged' , - tavg1_2d_slv_Nx+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg1_2d_slv_Nx+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg1_2d_slv_Nx+-.frequency: 010000 , tavg1_2d_slv_Nx+-.duration: 010000 , tavg1_2d_slv_Nx+-.ref_date: >>>IOBBKGD<<< , @@ -2185,7 +2185,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg1_2d_flx_Nx+-.regrid_method: 'BILINEAR_MONOTONIC' , tavg1_2d_flx_Nx+-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg1_2d_flx_Nx+-.mode: 'time-averaged' , - tavg1_2d_flx_Nx+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg1_2d_flx_Nx+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg1_2d_flx_Nx+-.frequency: 010000 , tavg1_2d_flx_Nx+-.duration: 010000 , tavg1_2d_flx_Nx+-.ref_date: >>>IOBBKGD<<< , @@ -2235,7 +2235,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg1_2d_lnd_Nx+-.descr: '2d,1-Hourly,Time-Averaged,Single-Level,Forecast,Land Surface Diagnostics' , tavg1_2d_lnd_Nx+-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg1_2d_lnd_Nx+-.mode: 'time-averaged' , - tavg1_2d_lnd_Nx+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg1_2d_lnd_Nx+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg1_2d_lnd_Nx+-.frequency: 010000 , tavg1_2d_lnd_Nx+-.duration: 010000 , tavg1_2d_lnd_Nx+-.ref_date: >>>IOBBKGD<<< , @@ -2299,7 +2299,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg1_2d_rad_Nx+-.regrid_method: 'BILINEAR_MONOTONIC' , tavg1_2d_rad_Nx+-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , tavg1_2d_rad_Nx+-.mode: 'time-averaged' , - tavg1_2d_rad_Nx+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg1_2d_rad_Nx+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg1_2d_rad_Nx+-.frequency: 010000 , tavg1_2d_rad_Nx+-.duration: 010000 , tavg1_2d_rad_Nx+-.ref_date: >>>IOBBKGD<<< , @@ -2351,7 +2351,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst3_2d_met_Nx+-.regrid_method: 'BILINEAR_MONOTONIC' , inst3_2d_met_Nx+-.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , inst3_2d_met_Nx+-.mode: 'instantaneous' , - inst3_2d_met_Nx+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst3_2d_met_Nx+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst3_2d_met_Nx+-.frequency: 030000 , inst3_2d_met_Nx+-.duration: 030000 , inst3_2d_met_Nx+-.ref_date: >>>IOBBKGD<<< , @@ -2492,7 +2492,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_ext_Np+-.frequency: 030000 , tavg3_3d_ext_Np+-.duration: 030000 , tavg3_3d_ext_Np+-.mode: 'time-averaged' , - tavg3_3d_ext_Np+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_ext_Np+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_ext_Np+-.splitField: 1 , tavg3_3d_ext_Np+-.conservative: 1 , tavg3_3d_ext_Np+-.vscale: 100.0 , @@ -2527,7 +2527,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_aer_Np+-.frequency: 030000 , tavg3_3d_aer_Np+-.duration: 030000 , tavg3_3d_aer_Np+-.mode: 'time-averaged' , - tavg3_3d_aer_Np+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_aer_Np+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_aer_Np+-.splitField: 1 , tavg3_3d_aer_Np+-.conservative: 1 , tavg3_3d_aer_Np+-.vscale: 100.0 , @@ -2552,7 +2552,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_chm_Np+-.frequency: 030000 , tavg3_3d_chm_Np+-.duration: 030000 , tavg3_3d_chm_Np+-.mode: 'time-averaged' , - tavg3_3d_chm_Np+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_chm_Np+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_chm_Np+-.vscale: 100.0 , tavg3_3d_chm_Np+-.vunit: 'hPa' , tavg3_3d_chm_Np+-.vvars: 'log(PLE)' , 'DYN' , @@ -2574,7 +2574,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' tavg3_3d_tag_Np+-.frequency: 030000 , tavg3_3d_tag_Np+-.duration: 030000 , tavg3_3d_tag_Np+-.mode: 'time-averaged' , - tavg3_3d_tag_Np+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + tavg3_3d_tag_Np+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , tavg3_3d_tag_Np+-.vscale: 100.0 , tavg3_3d_tag_Np+-.vunit: 'hPa' , tavg3_3d_tag_Np+-.vvars: 'log(PLE)' , 'DYN' , @@ -2606,7 +2606,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' inst1_2d_hwl_Nx+-.deflate: 2 , inst1_2d_hwl_Nx+-.regrid_method: 'BILINEAR_MONOTONIC' , inst1_2d_hwl_Nx+-.mode: 'instantaneous' , - inst1_2d_hwl_Nx+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + inst1_2d_hwl_Nx+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , inst1_2d_hwl_Nx+-.splitField: 1 , inst1_2d_hwl_Nx+-.frequency: 010000 , inst1_2d_hwl_Nx+-.duration: 010000 , @@ -2638,7 +2638,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' prog.eta+-.frequency: 060000 , prog.eta+-.duration: 060000 , prog.eta+-.mode: 'instantaneous' , - prog.eta+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + prog.eta+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , prog.eta+-.fields: 'PHIS' , 'AGCM' , 'phis' , 'SGH' , 'AGCM' , 'hs_stdv' , 'PS' , 'DYN' , 'ps' , @@ -2666,7 +2666,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' prog.sfc+-.frequency: 060000 , prog.sfc+-.duration: 060000 , prog.sfc+-.mode: 'instantaneous' , - prog.sfc+-.grid_label: PC@HIST_IMx@HIST_JM-DC , + prog.sfc+-.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , prog.sfc+-.fields: 'PHIS' , 'AGCM' , 'US;VS' , 'DYN' , 'UA;VA' , 'TA' , 'DYN' , @@ -2716,7 +2716,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' const_2d_asm_Nx.format: 'CFIO' , const_2d_asm_Nx.template: '%y4%m2%d2_%h2%n2z.>>>NCSUFFIX<<<' , const_2d_asm_Nx.mode: 'instantaneous' , - const_2d_asm_Nx.grid_label: PC@HIST_IMx@HIST_JM-DC , + const_2d_asm_Nx.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , const_2d_asm_Nx.frequency: 240000 , const_2d_asm_Nx.duration: 240000 , const_2d_asm_Nx.ref_time: >>>IOBBKGT<<< , @@ -2744,7 +2744,7 @@ COLLECTIONS: 'inst3_3d_asm_Np-' asm.eta.frequency: @ASYNFRQ , asm.eta.duration: @ASYNFRQ , asm.eta.mode: 'instantaneous' , - asm.eta.grid_label: PC@HIST_IMx@HIST_JM-DC , + asm.eta.grid_label: PC{{ HIST_IM }}x{{ HIST_JM }}-DC , asm.eta.fields: 'PHIS' , 'AGCM' , 'phis' , 'SGH' , 'AGCM' , 'hs_stdv' , 'TS' , 'SURFACE' , 'ts' , diff --git a/fvcore_layout.rc b/fvcore_layout.rc index fec5a2e9..398dd8b5 100644 --- a/fvcore_layout.rc +++ b/fvcore_layout.rc @@ -1,11 +1,11 @@ &fv_core_nml - @FV_HYDRO + {{ FV_HYDRO }} range_warn = .T. fv_debug = .F. - @FV_SCHMIDT - @FV_STRETCH_FAC - @FV_TARGET_LON - @FV_TARGET_LAT + {{ FV_SCHMIDT }} + {{ FV_STRETCH_FAC }} + {{ FV_TARGET_LON }} + {{ FV_TARGET_LAT }} compute_coords_locally = .false. / @@ -47,8 +47,8 @@ c_pgacs = 0.01 c_paut = 1.0 c_cracw = 1.0 - @GFDL_PROG_CCN - @GFDL_USE_CCN + {{ GFDL_PROG_CCN }} + {{ GFDL_USE_CCN }} ccn_l = 300. ccn_o = 100. z_slope_liq = .true. diff --git a/gcm_archive.j b/gcm_archive.j index a4a8ffcb..d48272dd 100644 --- a/gcm_archive.j +++ b/gcm_archive.j @@ -4,12 +4,12 @@ # Batch Parameters for Archive Job ####################################################################### -#@BATCH_TIME@ARCHIVE_T -#@ARCHIVE_P -#@BATCH_JOBNAME@ARCHIVE_N -#@ARCHIVE_Q -#@BATCH_GROUP -#@BATCH_OUTPUTNAMEOUTPUT +#{{ BATCH_TIME }}{{ ARCHIVE_T }} +#{{ ARCHIVE_P }} +#{{ BATCH_JOBNAME }}{{ ARCHIVE_N }} +#{{ ARCHIVE_Q }} +#{{ BATCH_GROUP }} +#{{ BATCH_OUTPUTNAME }}OUTPUT ####################################################################### # System Environment Variables @@ -21,7 +21,7 @@ limit stacksize unlimited @SETENVS -@MPT_SHEPHERD +{{ MPT_SHEPHERD }} ####################################################################### # Architecture Specific Environment Variables @@ -29,13 +29,13 @@ limit stacksize unlimited setenv ARCH `uname` -setenv SITE @SITE -setenv GEOSBIN @GEOSBIN +setenv SITE {{ SITE }} +setenv GEOSBIN {{ GEOSBIN }} source $GEOSBIN/g5_modules -setenv @LD_LIBRARY_PATH_CMD ${LD_LIBRARY_PATH} +setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH} if ( $?BASEDIR ) then - setenv @LD_LIBRARY_PATH_CMD ${@LD_LIBRARY_PATH_CMD}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib endif ####################################################################### diff --git a/gcm_emip.setup b/gcm_emip.setup index 53f75c14..6b5d7275 100755 --- a/gcm_emip.setup +++ b/gcm_emip.setup @@ -30,6 +30,6 @@ sed -f sedfile gcm_run.tmp > gcm_run.j$RSTDATE /bin/rm -f sedfile /bin/rm -f gcm_run.tmp -echo @BATCH_CMD gcm_run.j$RSTDATE +echo {{ BATCH_CMD }} gcm_run.j$RSTDATE end diff --git a/gcm_forecast.setup b/gcm_forecast.setup index bce27a19..88417308 100755 --- a/gcm_forecast.setup +++ b/gcm_forecast.setup @@ -5,12 +5,12 @@ ####################################################################### setenv ARCH `uname` -setenv SITE @SITE +setenv SITE {{ SITE }} -setenv EXPID @EXPID -setenv EXPDIR @EXPDIR -setenv HOMDIR @HOMDIR -setenv GEOSUTIL @GEOSUTIL +setenv EXPID {{ EXPID }} +setenv EXPDIR {{ EXPDIR }} +setenv HOMDIR {{ HOMDIR }} +setenv GEOSUTIL {{ GEOSUTIL }} ####################################################################### # Parameters for Forecasts @@ -107,7 +107,7 @@ s?&FHRS?$FHRS?g EOF sed -f sedfile $EXPDIR/forecasts/gcm_forecast.tmp > $EXPDIR/forecasts/gcm_forecast_${FCST_LABL}.j${date}-${nymde} -if( $QSUB != 'FALSE' ) @BATCH_CMD $EXPDIR/forecasts/gcm_forecast_${FCST_LABL}.j${date}-${nymde} +if( $QSUB != 'FALSE' ) {{ BATCH_CMD }} $EXPDIR/forecasts/gcm_forecast_${FCST_LABL}.j${date}-${nymde} set date = $test end diff --git a/gcm_forecast.tmpl b/gcm_forecast.tmpl index fd958308..86552531 100755 --- a/gcm_forecast.tmpl +++ b/gcm_forecast.tmpl @@ -4,12 +4,12 @@ # Batch Parameters for Run Job ####################################################################### -#@BATCH_TIME@RUN_FT -#@RUN_FP -#@BATCH_JOBNAME@RUN_FN_&DATE -#@RUN_Q -#@BATCH_GROUP -#@BATCH_OUTPUTNAMEFCSTOUT +#{{ BATCH_TIME }}{{ RUN_FT }} +#{{ RUN_FP }} +#{{ BATCH_JOBNAME }}{{ RUN_FN }}_&DATE +#{{ RUN_Q }} +#{{ BATCH_GROUP }} +#{{ BATCH_OUTPUTNAME }}FCSTOUT ####################################################################### # System Settings @@ -25,19 +25,19 @@ limit stacksize unlimited setenv ARCH `uname` -setenv SITE @SITE -setenv GEOSDIR @GEOSDIR -setenv GEOSBIN @GEOSBIN -setenv GEOSETC @GEOSETC -setenv GEOSUTIL @GEOSSRC +setenv SITE {{ SITE }} +setenv GEOSDIR {{ GEOSDIR }} +setenv GEOSBIN {{ GEOSBIN }} +setenv GEOSETC {{ GEOSETC }} +setenv GEOSUTIL {{ GEOSSRC }} source $GEOSBIN/g5_modules -setenv @LD_LIBRARY_PATH_CMD ${LD_LIBRARY_PATH}:${GEOSDIR}/lib +setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH}:${GEOSDIR}/lib if ( $?BASEDIR ) then - setenv @LD_LIBRARY_PATH_CMD ${@LD_LIBRARY_PATH_CMD}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib endif -setenv RUN_CMD "@RUN_CMD" +setenv RUN_CMD "{{ RUN_CMD }}" setenv GCMVER `cat $GEOSETC/.AGCM_VERSION` echo VERSION: $GCMVER @@ -47,9 +47,9 @@ echo VERSION: $GCMVER ####################################################################### -setenv EXPID @EXPID -setenv EXPDIR @EXPDIR -setenv HOMDIR @HOMDIR +setenv EXPID {{ EXPID }} +setenv EXPDIR {{ EXPDIR }} +setenv HOMDIR {{ HOMDIR }} ####################################################################### # Set Forecast Run Parameters @@ -104,14 +104,14 @@ set AGCM_LM = `grep '^\s*AGCM_LM:' $HOMDIR/AGCM.rc | cut -d: -f2` set OGCM_IM = `grep '^\s*OGCM\.IM_WORLD:' $HOMDIR/AGCM.rc | cut -d: -f2` set OGCM_JM = `grep '^\s*OGCM\.JM_WORLD:' $HOMDIR/AGCM.rc | cut -d: -f2` -@COUPLED set OGCM_LM = `grep '^\s*OGCM\.LM:' $HOMDIR/AGCM.rc | cut -d: -f2` -@COUPLED set NX = `grep '^\s*OGCM\.NX:' $HOMDIR/AGCM.rc | cut -d: -f2` -@COUPLED set NY = `grep '^\s*OGCM\.NY:' $HOMDIR/AGCM.rc | cut -d: -f2` +{{ COUPLED }} set OGCM_LM = `grep '^\s*OGCM\.LM:' $HOMDIR/AGCM.rc | cut -d: -f2` +{{ COUPLED }} set NX = `grep '^\s*OGCM\.NX:' $HOMDIR/AGCM.rc | cut -d: -f2` +{{ COUPLED }} set NY = `grep '^\s*OGCM\.NY:' $HOMDIR/AGCM.rc | cut -d: -f2` # Calculate number of cores/nodes for IOSERVER # -------------------------------------------- -set USE_IOSERVER = @USE_IOSERVER +set USE_IOSERVER = {{ USE_IOSERVER }} set NUM_OSERVER_NODES = `grep '^\s*IOSERVER_NODES:' $HOMDIR/AGCM.rc | cut -d: -f2` set NUM_BACKEND_PES = `grep '^\s*NUM_BACKEND_PES:' $HOMDIR/AGCM.rc | cut -d: -f2` @@ -127,7 +127,7 @@ endif @ MODEL_NPES = $NX * $NY -set NCPUS_PER_NODE = @NCPUS_PER_NODE +set NCPUS_PER_NODE = {{ NCPUS_PER_NODE }} set NUM_MODEL_NODES=`echo "scale=6;($MODEL_NPES / $NCPUS_PER_NODE)" | bc | awk 'function ceil(x, y){y=int(x); return(x>y?y+1:y)} {print ceil($1)}'` if ( $NCPUS != NULL ) then @@ -347,16 +347,16 @@ done: # Link Boundary Datasets ####################################################################### -setenv BCSDIR @BCSDIR -@DATAOCEANsetenv SSTDIR @SSTDIR -setenv BCRSLV @ATMOStag_@OCEANtag -@MOM5setenv SSTDIR @COUPLEDIR/SST/MERRA2/${OGCM_IM}x${OGCM_JM}/v1 -@MOM6setenv SSTDIR @COUPLEDIR/SST/MERRA2/${OGCM_IM}x${OGCM_JM}/v1 +setenv BCSDIR {{ BCSDIR }} +{{ DATAOCEAN }}setenv SSTDIR {{ SSTDIR }} +setenv BCRSLV {{ ATMOStag }}_{{ OCEANtag }} +{{ MOM5 }}setenv SSTDIR {{ COUPLEDIR }}/SST/MERRA2/${OGCM_IM}x${OGCM_JM}/v1 +{{ MOM6 }}setenv SSTDIR {{ COUPLEDIR }}/SST/MERRA2/${OGCM_IM}x${OGCM_JM}/v1 #this is hard-wired for NAS for now - should make it more general -@DATAOCEANsetenv BCTAG `basename $BCSDIR` -@COUPLEDsetenv BCTAG `basename @COUPLEDIR/@OCNMODEL/${OGCM_IM}x${OGCM_JM}` -setenv EMISSIONS @EMISSIONS +{{ DATAOCEAN }}setenv BCTAG `basename $BCSDIR` +{{ COUPLED }}setenv BCTAG `basename {{ COUPLEDIR }}/{{ OCNMODEL }}/${OGCM_IM}x${OGCM_JM}` +setenv EMISSIONS {{ EMISSIONS }} chmod +x linkbcs >>>GCMRUN_CATCHCN<< WSUB_ExtData.rc + {{ MP_TURN_OFF_WSUB_EXTDATA }}/bin/mv WSUB_ExtData.rc WSUB_ExtData.tmp + {{ MP_TURN_OFF_WSUB_EXTDATA }}cat WSUB_ExtData.tmp | sed -e '/^WSUB_CLIM/ s#ExtData.*#/dev/null#' > WSUB_ExtData.rc else - @MP_TURN_OFF_WSUB_EXTDATA/bin/mv WSUB_ExtData.yaml WSUB_ExtData.tmp - @MP_TURN_OFF_WSUB_EXTDATAcat WSUB_ExtData.tmp | sed -e '/collection:/ s#WSUB_SWclim.*#/dev/null#' > WSUB_ExtData.yaml + {{ MP_TURN_OFF_WSUB_EXTDATA }}/bin/mv WSUB_ExtData.yaml WSUB_ExtData.tmp + {{ MP_TURN_OFF_WSUB_EXTDATA }}cat WSUB_ExtData.tmp | sed -e '/collection:/ s#WSUB_SWclim.*#/dev/null#' > WSUB_ExtData.yaml endif -@MP_TURN_OFF_WSUB_EXTDATA/bin/rm WSUB_ExtData.tmp +{{ MP_TURN_OFF_WSUB_EXTDATA }}/bin/rm WSUB_ExtData.tmp # Generate the complete ExtData.rc # -------------------------------- @@ -604,7 +604,7 @@ setenv YEAR $yearc if (! -e tile.bin) then $GEOSBIN/binarytile.x tile.data tile.bin -@MOM5 $GEOSBIN/binarytile.x tile_hist.data tile_hist.bin +{{ MOM5 }} $GEOSBIN/binarytile.x tile_hist.data tile_hist.bin endif # If running in dual ocean mode, link sst and fraci data here @@ -634,8 +634,8 @@ else # Run the script # -------------- - @SINGULARITY_BUILD $RUN_CMD 1 $SINGULARITY_RUN $GEOSBIN/SaltIntSplitter tile.data $SCRDIR/saltwater_internal_rst - @NATIVE_BUILD $RUN_CMD 1 $GEOSBIN/SaltIntSplitter tile.data $SCRDIR/saltwater_internal_rst + {{ SINGULARITY_BUILD }} $RUN_CMD 1 $SINGULARITY_RUN $GEOSBIN/SaltIntSplitter tile.data $SCRDIR/saltwater_internal_rst + {{ NATIVE_BUILD }} $RUN_CMD 1 $GEOSBIN/SaltIntSplitter tile.data $SCRDIR/saltwater_internal_rst # Move restarts # ------------- @@ -673,8 +673,8 @@ endif if ( -x $GEOSBIN/rs_numtiles.x ) then set N_OPENW_TILES_EXPECTED = `grep '^\s*0' tile.data | wc -l` - @SINGULARITY_BUILD set N_OPENW_TILES_FOUND = `$RUN_CMD 1 $SINGULARITY_RUN $GEOSBIN/rs_numtiles.x openwater_internal_rst | grep Total | awk '{print $NF}'` - @NATIVE_BUILD set N_OPENW_TILES_FOUND = `$RUN_CMD 1 $GEOSBIN/rs_numtiles.x openwater_internal_rst | grep Total | awk '{print $NF}'` + {{ SINGULARITY_BUILD }} set N_OPENW_TILES_FOUND = `$RUN_CMD 1 $SINGULARITY_RUN $GEOSBIN/rs_numtiles.x openwater_internal_rst | grep Total | awk '{print $NF}'` + {{ NATIVE_BUILD }} set N_OPENW_TILES_FOUND = `$RUN_CMD 1 $GEOSBIN/rs_numtiles.x openwater_internal_rst | grep Total | awk '{print $NF}'` if ( $N_OPENW_TILES_EXPECTED != $N_OPENW_TILES_FOUND ) then echo "Error! Found $N_OPENW_TILES_FOUND tiles in openwater. Expect to find $N_OPENW_TILES_EXPECTED tiles." @@ -720,7 +720,7 @@ endif @SETENVS -@MPT_SHEPHERD +{{ MPT_SHEPHERD }} # Run bundleParser.py #--------------------- @@ -765,8 +765,8 @@ else set IOSERVER_EXTRA = "" endif -@SINGULARITY_BUILD @OCEAN_PRELOAD $RUN_CMD $TOTAL_PES $SINGULARITY_RUN $GEOSEXE $IOSERVER_OPTIONS $IOSERVER_EXTRA --logging_config 'logging.yaml' -@NATIVE_BUILD @OCEAN_PRELOAD $RUN_CMD $TOTAL_PES $GEOSEXE $IOSERVER_OPTIONS $IOSERVER_EXTRA --logging_config 'logging.yaml' +{{ SINGULARITY_BUILD }} {{ OCEAN_PRELOAD }} $RUN_CMD $TOTAL_PES $SINGULARITY_RUN $GEOSEXE $IOSERVER_OPTIONS $IOSERVER_EXTRA --logging_config 'logging.yaml' +{{ NATIVE_BUILD }} {{ OCEAN_PRELOAD }} $RUN_CMD $TOTAL_PES $GEOSEXE $IOSERVER_OPTIONS $IOSERVER_EXTRA --logging_config 'logging.yaml' if( $USE_SHMEM == 1 ) $GEOSBIN/RmShmKeys_sshmpi.csh >& /dev/null diff --git a/gcm_moveplot.j b/gcm_moveplot.j index cfe19f21..f473fd45 100755 --- a/gcm_moveplot.j +++ b/gcm_moveplot.j @@ -4,12 +4,12 @@ # Batch Parameters for Plot Job ####################################################################### -#@BATCH_TIME1:00:00 -#@MOVE_P -#@BATCH_JOBNAME@MOVE_N -#@MOVE_Q -#@BATCH_GROUP -#@BATCH_OUTPUTNAMEgcm_moveplot.o +#{{ BATCH_TIME }}1:00:00 +#{{ MOVE_P }} +#{{ BATCH_JOBNAME }}{{ MOVE_N }} +#{{ MOVE_Q }} +#{{ BATCH_GROUP }} +#{{ BATCH_OUTPUTNAME }}gcm_moveplot.o ####################################################################### # System Environment Variables @@ -21,7 +21,7 @@ limit stacksize unlimited @SETENVS -@MPT_SHEPHERD +{{ MPT_SHEPHERD }} ####################################################################### # Architecture Specific Environment Variables @@ -29,23 +29,23 @@ limit stacksize unlimited setenv ARCH `uname` -setenv SITE @SITE -setenv GEOSBIN @GEOSBIN -setenv GEOSUTIL @GEOSSRC +setenv SITE {{ SITE }} +setenv GEOSBIN {{ GEOSBIN }} +setenv GEOSUTIL {{ GEOSSRC }} source $GEOSBIN/g5_modules -setenv @LD_LIBRARY_PATH_CMD ${LD_LIBRARY_PATH} +setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH} if ( $?BASEDIR ) then - setenv @LD_LIBRARY_PATH_CMD ${@LD_LIBRARY_PATH_CMD}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib endif ####################################################################### # Experiment Specific Environment Variables ####################################################################### -setenv EXPID @EXPID -setenv EXPDIR @EXPDIR -setenv HOMDIR @HOMDIR +setenv EXPID {{ EXPID }} +setenv EXPDIR {{ EXPDIR }} +setenv HOMDIR {{ HOMDIR }} ####################################################################### # MOVE Commands diff --git a/gcm_plot.tmpl b/gcm_plot.tmpl index e202c413..f68ca271 100755 --- a/gcm_plot.tmpl +++ b/gcm_plot.tmpl @@ -4,12 +4,12 @@ # Batch Parameters for Plot Job ####################################################################### -#@BATCH_TIME@PLOT_T -#@PLOT_P -#@BATCH_JOBNAME@NAME -#@PLOT_Q -#@BATCH_GROUP -#@BATCH_OUTPUTNAMEOUTPUT +#{{ BATCH_TIME }}{{ PLOT_T }} +#{{ PLOT_P }} +#{{ BATCH_JOBNAME }}@NAME +#{{ PLOT_Q }} +#{{ BATCH_GROUP }} +#{{ BATCH_OUTPUTNAME }}OUTPUT ####################################################################### # System Environment Variables @@ -21,7 +21,7 @@ limit stacksize unlimited @SETENVS -@MPT_SHEPHERD +{{ MPT_SHEPHERD }} ####################################################################### # Architecture Specific Environment Variables @@ -29,23 +29,23 @@ limit stacksize unlimited setenv ARCH `uname` -setenv SITE @SITE -setenv GEOSBIN @GEOSBIN -setenv GEOSUTIL @GEOSSRC +setenv SITE {{ SITE }} +setenv GEOSBIN {{ GEOSBIN }} +setenv GEOSUTIL {{ GEOSSRC }} source $GEOSBIN/g5_modules -setenv @LD_LIBRARY_PATH_CMD ${LD_LIBRARY_PATH} +setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH} if ( $?BASEDIR ) then - setenv @LD_LIBRARY_PATH_CMD ${@LD_LIBRARY_PATH_CMD}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib endif ####################################################################### # Experiment Specific Environment Variables ####################################################################### -setenv EXPID @EXPID -setenv EXPDIR @EXPDIR -setenv HOMDIR @HOMDIR +setenv EXPID {{ EXPID }} +setenv EXPDIR {{ EXPDIR }} +setenv HOMDIR {{ HOMDIR }} ####################################################################### # Quickplot Commands @@ -64,5 +64,5 @@ endif ####################################################################### set MOVE = `grep PLOT_MOVE: $EXPDIR/plot/plot.rc | cut -d'#' -f1 | cut -d':' -f2` -if( $MOVE[1] == 'ON' ) @BATCH_CMD $EXPDIR/plot/gcm_moveplot.j +if( $MOVE[1] == 'ON' ) {{ BATCH_CMD }} $EXPDIR/plot/gcm_moveplot.j diff --git a/gcm_post.j b/gcm_post.j index 7cc0be91..177996ef 100755 --- a/gcm_post.j +++ b/gcm_post.j @@ -4,13 +4,13 @@ # Batch Parameters for Post-Processing Job ####################################################################### -#@BATCH_TIME@POST_T -#@POST_P -#@BATCH_JOBNAME@POST_N_@COLLECTION.@YYYYMM -#@POST_Q -#@BATCH_GROUP -#@BATCH_OUTPUTNAME@POST_O -#@BATCH_JOINOUTERR +#{{ BATCH_TIME }}{{ POST_T }} +#{{ POST_P }} +#{{ BATCH_JOBNAME }}{{ POST_N }}_@COLLECTION.@YYYYMM +#{{ POST_Q }} +#{{ BATCH_GROUP }} +#{{ BATCH_OUTPUTNAME }}@POST_O +#{{ BATCH_JOINOUTERR }} ####################################################################### # System Environment Variables @@ -22,7 +22,7 @@ limit stacksize unlimited @SETENVS -@MPT_SHEPHERD +{{ MPT_SHEPHERD }} ####################################################################### # Architecture Specific Environment Variables @@ -30,22 +30,22 @@ limit stacksize unlimited setenv ARCH `uname` -setenv SITE @SITE -setenv GEOSBIN @GEOSBIN -setenv GEOSUTIL @GEOSSRC -setenv BATCHNAME "@POST_N" +setenv SITE {{ SITE }} +setenv GEOSBIN {{ GEOSBIN }} +setenv GEOSUTIL {{ GEOSSRC }} +setenv BATCHNAME "{{ POST_N }}" source $GEOSBIN/g5_modules -setenv @LD_LIBRARY_PATH_CMD ${LD_LIBRARY_PATH} +setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH} if ( $?BASEDIR ) then - setenv @LD_LIBRARY_PATH_CMD ${@LD_LIBRARY_PATH_CMD}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib endif if( $?SLURM_NTASKS ) then - setenv RUN_CMD "@RUN_CMD" + setenv RUN_CMD "{{ RUN_CMD }}" set NCPUS = $SLURM_NTASKS else if( $?PBS_NODEFILE ) then - setenv RUN_CMD "@RUN_CMD" + setenv RUN_CMD "{{ RUN_CMD }}" set NCPUS = `cat $PBS_NODEFILE | wc -l` else set NCPUS = NULL @@ -55,6 +55,6 @@ endif # Perform Post Processing ####################################################################### -$GEOSUTIL/post/gcmpost.script -source @EXPDIR -ncpus $NCPUS -collections @COLLECTION -rec_plt @YYYYMM +$GEOSUTIL/post/gcmpost.script -source {{ EXPDIR }} -ncpus $NCPUS -collections @COLLECTION -rec_plt @YYYYMM exit diff --git a/gcm_regress.j b/gcm_regress.j index ee8a6c4f..5ba8d717 100755 --- a/gcm_regress.j +++ b/gcm_regress.j @@ -4,11 +4,11 @@ # Batch Parameters for Regress Job ####################################################################### -#@BATCH_TIME@RUN_T -#@RUN_P -#@BATCH_JOBNAME@REGRESS_N -#@RUN_Q -#@BATCH_GROUP +#{{ BATCH_TIME }}{{ RUN_T }} +#{{ RUN_P }} +#{{ BATCH_JOBNAME }}{{ REGRESS_N }} +#{{ RUN_Q }} +#{{ BATCH_GROUP }} ####################################################################### # System Environment Variables @@ -20,7 +20,7 @@ limit stacksize unlimited @SETENVS -@MPT_SHEPHERD +{{ MPT_SHEPHERD }} # Establish safe default number of OpenMP threads # ----------------------------------------------- @@ -85,26 +85,26 @@ set argv = () setenv ARCH `uname` -setenv SITE @SITE -setenv GEOSDIR @GEOSDIR -setenv GEOSBIN @GEOSBIN +setenv SITE {{ SITE }} +setenv GEOSDIR {{ GEOSDIR }} +setenv GEOSBIN {{ GEOSBIN }} source $GEOSBIN/g5_modules -setenv @LD_LIBRARY_PATH_CMD ${LD_LIBRARY_PATH}:${GEOSDIR}/lib -# We only add BASEDIR to the @LD_LIBRARY_PATH_CMD if BASEDIR is defined (i.e., not running with Spack) +setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH}:${GEOSDIR}/lib +# We only add BASEDIR to the {{ LD_LIBRARY_PATH_CMD }} if BASEDIR is defined (i.e., not running with Spack) if ( $?BASEDIR ) then - setenv @LD_LIBRARY_PATH_CMD ${@LD_LIBRARY_PATH_CMD}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib endif -setenv RUN_CMD "@RUN_CMD" +setenv RUN_CMD "{{ RUN_CMD }}" ####################################################################### # Experiment Specific Environment Variables ####################################################################### -setenv EXPID @EXPID -setenv EXPDIR @EXPDIR -setenv HOMDIR @HOMDIR +setenv EXPID {{ EXPID }} +setenv EXPDIR {{ EXPDIR }} +setenv HOMDIR {{ HOMDIR }} setenv SCRDIR $EXPDIR/scratch ####################################################################### @@ -130,9 +130,9 @@ cp $EXPDIR/RC/*.yaml $EXPDIR/regress cp $EXPDIR/GEOSgcm.x $EXPDIR/regress cp $EXPDIR/linkbcs $EXPDIR/regress cp $HOMDIR/*.yaml $EXPDIR/regress -@COUPLED cp $HOMDIR/*.nml $EXPDIR/regress -@MOM6cp $HOMDIR/MOM_input $EXPDIR/regress -@MOM6cp $HOMDIR/MOM_override $EXPDIR/regress +{{ COUPLED }} cp $HOMDIR/*.nml $EXPDIR/regress +{{ MOM6 }}cp $HOMDIR/MOM_input $EXPDIR/regress +{{ MOM6 }}cp $HOMDIR/MOM_override $EXPDIR/regress cat fvcore_layout.rc >> input.nml @@ -172,10 +172,10 @@ cp $EXPDIR/cap_restart $EXPDIR/regress # Get proper ridge scheme GWD internal restart # -------------------------------------------- /bin/rm gwd_internal_rst -/bin/cp @GWDRSDIR/gwd_internal_c${IM} gwd_internal_rst +/bin/cp {{ GWDRSDIR }}/gwd_internal_c${IM} gwd_internal_rst -@COUPLED /bin/mkdir INPUT -@COUPLED cp $EXPDIR/RESTART/* INPUT +{{ COUPLED }} /bin/mkdir INPUT +{{ COUPLED }} cp $EXPDIR/RESTART/* INPUT setenv YEAR `cat cap_restart | cut -c1-4` ./linkbcs @@ -228,23 +228,23 @@ COLLECTIONS: test_collection test_collection.format: 'CFIO' , test_collection.deflate: 1 , test_collection.frequency: 060000 , -@DATAOCEAN test_collection.fields: 'PHIS', 'AGCM' , -@DATAOCEAN 'SLP' , 'DYN' , -@DATAOCEAN 'T' , 'DYN' , -@DATAOCEAN 'U;V' , 'DYN' , -@DATAOCEAN 'Q' , 'MOIST', 'QV', -@MOM5 test_collection.fields: 'UW' ,'MOM' , 'US', -@MOM5 'VW' ,'MOM' , 'VS', -@MOM5 'TW' ,'MOM' , 'TS', -@MOM5 'SW' ,'MOM' , 'SS', -@MOM5 'SLV' ,'MOM' , -@MOM5 'QFLUX','OCEAN' , -@MOM6 test_collection.fields: 'UW' ,'MOM6' , 'US', -@MOM6 'VW' ,'MOM6' , 'VS', -@MOM6 'TW' ,'MOM6' , 'TS', -@MOM6 'SW' ,'MOM6' , 'SS', -@MOM6 'SLV' ,'MOM6' , -@MOM6 'QFLUX','OCEAN' , +{{ DATAOCEAN }} test_collection.fields: 'PHIS', 'AGCM' , +{{ DATAOCEAN }} 'SLP' , 'DYN' , +{{ DATAOCEAN }} 'T' , 'DYN' , +{{ DATAOCEAN }} 'U;V' , 'DYN' , +{{ DATAOCEAN }} 'Q' , 'MOIST', 'QV', +{{ MOM5 }} test_collection.fields: 'UW' ,'MOM' , 'US', +{{ MOM5 }} 'VW' ,'MOM' , 'VS', +{{ MOM5 }} 'TW' ,'MOM' , 'TS', +{{ MOM5 }} 'SW' ,'MOM' , 'SS', +{{ MOM5 }} 'SLV' ,'MOM' , +{{ MOM5 }} 'QFLUX','OCEAN' , +{{ MOM6 }} test_collection.fields: 'UW' ,'MOM6' , 'US', +{{ MOM6 }} 'VW' ,'MOM6' , 'VS', +{{ MOM6 }} 'TW' ,'MOM6' , 'TS', +{{ MOM6 }} 'SW' ,'MOM6' , 'SS', +{{ MOM6 }} 'SLV' ,'MOM6' , +{{ MOM6 }} 'QFLUX','OCEAN' , :: _EOF_ @@ -293,8 +293,8 @@ set EXTDATA2G_TRUE = `grep -i '^\s*USE_EXTDATA2G:\s*\.TRUE\.' CAP.rc | wc -l # Select proper AMIP GOCART Emission RC Files # ------------------------------------------- -setenv EMISSIONS @EMISSIONS -if( @EMISSIONS == AMIP_EMISSIONS ) then +setenv EMISSIONS {{ EMISSIONS }} +if( {{ EMISSIONS }} == AMIP_EMISSIONS ) then if( $EXTDATA2G_TRUE == 0 ) then set AMIP_Transition_Date = 20000301 @@ -321,16 +321,16 @@ if( @EMISSIONS == AMIP_EMISSIONS ) then endif endif -@MP_TURN_OFF_WSUB_EXTDATA# 1MOM and GFDL microphysics do not use WSUB_CLIM -@MP_TURN_OFF_WSUB_EXTDATA# ------------------------------------------------- +{{ MP_TURN_OFF_WSUB_EXTDATA }}# 1MOM and GFDL microphysics do not use WSUB_CLIM +{{ MP_TURN_OFF_WSUB_EXTDATA }}# ------------------------------------------------- if ($EXTDATA2G_TRUE == 0 ) then - @MP_TURN_OFF_WSUB_EXTDATA/bin/mv WSUB_ExtData.rc WSUB_ExtData.tmp - @MP_TURN_OFF_WSUB_EXTDATAcat WSUB_ExtData.tmp | sed -e '/^WSUB_CLIM/ s#ExtData.*#/dev/null#' > WSUB_ExtData.rc + {{ MP_TURN_OFF_WSUB_EXTDATA }}/bin/mv WSUB_ExtData.rc WSUB_ExtData.tmp + {{ MP_TURN_OFF_WSUB_EXTDATA }}cat WSUB_ExtData.tmp | sed -e '/^WSUB_CLIM/ s#ExtData.*#/dev/null#' > WSUB_ExtData.rc else - @MP_TURN_OFF_WSUB_EXTDATA/bin/mv WSUB_ExtData.yaml WSUB_ExtData.tmp - @MP_TURN_OFF_WSUB_EXTDATAcat WSUB_ExtData.tmp | sed -e '/collection:/ s#WSUB_SWclim.*#/dev/null#' > WSUB_ExtData.yaml + {{ MP_TURN_OFF_WSUB_EXTDATA }}/bin/mv WSUB_ExtData.yaml WSUB_ExtData.tmp + {{ MP_TURN_OFF_WSUB_EXTDATA }}cat WSUB_ExtData.tmp | sed -e '/collection:/ s#WSUB_SWclim.*#/dev/null#' > WSUB_ExtData.yaml endif -@MP_TURN_OFF_WSUB_EXTDATA/bin/rm WSUB_ExtData.tmp +{{ MP_TURN_OFF_WSUB_EXTDATA }}/bin/rm WSUB_ExtData.tmp # Generate the complete ExtData.rc # -------------------------------- @@ -451,7 +451,7 @@ if( $RUN_STARTSTOP == TRUE ) then echo "=== Running test of duration ${test_duration_step1} with NX = $NX and NY = $NY starting at $nymd0 $nhms0 ===" - @OCEAN_PRELOAD $RUN_CMD $NPES ./GEOSgcm.x --logging_config 'logging.yaml' + {{ OCEAN_PRELOAD }} $RUN_CMD $NPES ./GEOSgcm.x --logging_config 'logging.yaml' set date = `cat cap_restart` set nymde1 = $date[1] @@ -470,7 +470,7 @@ if( $RUN_STARTSTOP == TRUE ) then /bin/mv -v $chk ${chk}.${nymde1}_${nhmse1}.1 end - @MOM6/bin/mv -v RESTART/MOM.res.nc MOM.res.nc.1 + {{ MOM6 }}/bin/mv -v RESTART/MOM.res.nc MOM.res.nc.1 # Move history as well set hist_file_names = `ls -1 ${EXPID}.test_collection.*.nc4` @@ -499,9 +499,9 @@ cp CAP.rc.orig CAP.rc cp AGCM.rc.orig AGCM.rc cp HISTORY.rc0 HISTORY.rc -@COUPLED /bin/rm -rf INPUT -@COUPLED /bin/mkdir INPUT -@COUPLED cp $EXPDIR/RESTART/* INPUT +{{ COUPLED }} /bin/rm -rf INPUT +{{ COUPLED }} /bin/mkdir INPUT +{{ COUPLED }} cp $EXPDIR/RESTART/* INPUT ./strip CAP.rc set oldstring = `cat CAP.rc | grep JOB_SGMT:` @@ -517,7 +517,7 @@ set NY = `grep "^ *NY": AGCM.rc | cut -d':' -f2` echo "=== Running test of duration ${test_duration_step2} with NX = $NX and NY = $NY starting at $nymd0 $nhms0 ===" -@OCEAN_PRELOAD $RUN_CMD $NPES ./GEOSgcm.x --logging_config 'logging.yaml' +{{ OCEAN_PRELOAD }} $RUN_CMD $NPES ./GEOSgcm.x --logging_config 'logging.yaml' set date = `cat cap_restart` set nymde2 = $date[1] @@ -544,7 +544,7 @@ foreach chk ( $replay_chk_file_names ) $MOVE_OR_COPY $chk ${chk}.${nymde1}_${nhmse1}.2 end -@MOM6 $MOVE_OR_COPY RESTART/MOM.res.nc MOM.res.nc.2 +{{ MOM6 }} $MOVE_OR_COPY RESTART/MOM.res.nc MOM.res.nc.2 # *Copy* history as well set hist_file_names = `ls -1 ${EXPID}.test_collection.*.nc4` @@ -585,7 +585,7 @@ while ( $n <= $numchk ) @ n = $n + 1 end -@COUPLED cp RESTART/* INPUT +{{ COUPLED }} cp RESTART/* INPUT ################################################################## ###### @@ -600,10 +600,10 @@ if ($RUN_STARTSTOP == TRUE) then cp HISTORY.rc0 HISTORY.rc - @MOM6# When you restart in MOM6 mode, you must change input_filename - @MOM6# in the input.nml file from 'n' to 'r' - @MOM6 /bin/cp input.nml input.nml.orig - @MOM6 sed -i -e "s/input_filename = 'n'/input_filename = 'r'/g" input.nml + {{ MOM6 }}# When you restart in MOM6 mode, you must change input_filename + {{ MOM6 }}# in the input.nml file from 'n' to 'r' + {{ MOM6 }} /bin/cp input.nml input.nml.orig + {{ MOM6 }} sed -i -e "s/input_filename = 'n'/input_filename = 'r'/g" input.nml ./strip CAP.rc set oldstring = `cat CAP.rc | grep JOB_SGMT:` @@ -623,7 +623,7 @@ if ($RUN_STARTSTOP == TRUE) then echo "=== Running test of duration ${test_duration_step3} with NX = $NX and NY = $NY starting at $nymdb $nhmsb ===" - @OCEAN_PRELOAD $RUN_CMD $NPES ./GEOSgcm.x --logging_config 'logging.yaml' + {{ OCEAN_PRELOAD }} $RUN_CMD $NPES ./GEOSgcm.x --logging_config 'logging.yaml' set date = `cat cap_restart` set nymde3 = $date[1] @@ -639,7 +639,7 @@ if ($RUN_STARTSTOP == TRUE) then foreach chk ( $replay_chk_file_names ) /bin/mv -v $chk ${chk}.${nymde1}_${nhmse1}.3 end - @MOM6/bin/mv -v RESTART/MOM.res.nc MOM.res.nc.3 + {{ MOM6 }}/bin/mv -v RESTART/MOM.res.nc MOM.res.nc.3 # Move history as well set hist_file_names = `ls -1 ${EXPID}.test_collection.*.nc4` @@ -674,14 +674,14 @@ if ( $RUN_LAYOUT == TRUE) then # Get proper ridge scheme GWD internal restart # -------------------------------------------- /bin/rm gwd_internal_rst - /bin/cp @GWDRSDIR/gwd_internal_c${IM} gwd_internal_rst + /bin/cp {{ GWDRSDIR }}/gwd_internal_c${IM} gwd_internal_rst - @COUPLED /bin/rm -rf INPUT - @COUPLED /bin/mkdir INPUT - @COUPLED cp $EXPDIR/RESTART/* INPUT + {{ COUPLED }} /bin/rm -rf INPUT + {{ COUPLED }} /bin/mkdir INPUT + {{ COUPLED }} cp $EXPDIR/RESTART/* INPUT - @COUPLED # restore original input.nml - @COUPLED /bin/mv input.nml.orig input.nml + {{ COUPLED }} # restore original input.nml + {{ COUPLED }} /bin/mv input.nml.orig input.nml /bin/rm cap_restart echo $nymd0 $nhms0 > cap_restart @@ -717,17 +717,17 @@ if ( $RUN_LAYOUT == TRUE) then /bin/mv AGCM.rc AGCM.tmp cat AGCM.tmp | sed -e "s?$oldstring?$newstring?g" > AGCM.rc - @COUPLED set oldstring = `cat AGCM.rc | grep "^ *OGCM.NX:"` - @COUPLED set newstring = "OGCM.NX: ${test_NY}" - @COUPLED /bin/mv AGCM.rc AGCM.tmp - @COUPLED cat AGCM.tmp | sed -e "s?$oldstring?$newstring?g" > AGCM.rc - @COUPLED set oldstring = `cat AGCM.rc | grep "^ *OGCM.NY:"` - @COUPLED set newstring = "OGCM.NY: ${test_NX}" - @COUPLED /bin/mv AGCM.rc AGCM.tmp - @COUPLED cat AGCM.tmp | sed -e "s?$oldstring?$newstring?g" > AGCM.rc + {{ COUPLED }} set oldstring = `cat AGCM.rc | grep "^ *OGCM.NX:"` + {{ COUPLED }} set newstring = "OGCM.NX: ${test_NY}" + {{ COUPLED }} /bin/mv AGCM.rc AGCM.tmp + {{ COUPLED }} cat AGCM.tmp | sed -e "s?$oldstring?$newstring?g" > AGCM.rc + {{ COUPLED }} set oldstring = `cat AGCM.rc | grep "^ *OGCM.NY:"` + {{ COUPLED }} set newstring = "OGCM.NY: ${test_NX}" + {{ COUPLED }} /bin/mv AGCM.rc AGCM.tmp + {{ COUPLED }} cat AGCM.tmp | sed -e "s?$oldstring?$newstring?g" > AGCM.rc - @MOM5sed -r -i -e "/^ *layout/ s#= ([0-9]+),*([0-9]+)#= ${test_NY},${test_NX}#" input.nml - @MOM6sed -r -i -e "s/#override LAYOUT = 3, 2/#override LAYOUT = ${test_NY}, ${test_NX}/g" MOM_override + {{ MOM5 }}sed -r -i -e "/^ *layout/ s#= ([0-9]+),*([0-9]+)#= ${test_NY},${test_NX}#" input.nml + {{ MOM6 }}sed -r -i -e "s/#override LAYOUT = 3, 2/#override LAYOUT = ${test_NY}, ${test_NX}/g" MOM_override setenv YEAR `cat cap_restart | cut -c1-4` ./linkbcs @@ -737,7 +737,7 @@ if ( $RUN_LAYOUT == TRUE) then echo "=== Running test of duration ${test_duration_step4} with NX = $test_NX and NY = $test_NY starting at $nymd0 $nhms0 ===" - @OCEAN_PRELOAD $RUN_CMD $NPES ./GEOSgcm.x --logging_config 'logging.yaml' + {{ OCEAN_PRELOAD }} $RUN_CMD $NPES ./GEOSgcm.x --logging_config 'logging.yaml' set date = `cat cap_restart` set nymde4 = $date[1] @@ -754,7 +754,7 @@ if ( $RUN_LAYOUT == TRUE) then /bin/mv -v $chk ${chk}.${nymde1}_${nhmse1}.4 end - @MOM6/bin/mv -v RESTART/MOM.res.nc MOM.res.nc.4 + {{ MOM6 }}/bin/mv -v RESTART/MOM.res.nc MOM.res.nc.4 # Move history as well set hist_file_names = `ls -1 ${EXPID}.test_collection.*.nc4` @@ -809,24 +809,24 @@ if ($RUN_STARTSTOP == TRUE) then endif end - @MOM6# check MOM.res.nc (MOM6 restart) - @MOM6set file1 = MOM.res.nc.1 - @MOM6set file2 = MOM.res.nc.3 - @MOM6if( -e $file1 && -e $file2 ) then - @MOM6 set check = true - @MOM6 if( $check == true ) then - @MOM6 echo Comparing "MOM6 restarts" - @MOM6 cmp $file1 $file2 - @MOM6 if( $status == 0 ) then - @MOM6 echo Start-Stop Success! - @MOM6 echo " " - @MOM6 else - @MOM6 echo Start-Stop Failed! - @MOM6 echo " " - @MOM6 set pass = false - @MOM6 endif - @MOM6 endif - @MOM6endif + {{ MOM6 }}# check MOM.res.nc (MOM6 restart) + {{ MOM6 }}set file1 = MOM.res.nc.1 + {{ MOM6 }}set file2 = MOM.res.nc.3 + {{ MOM6 }}if( -e $file1 && -e $file2 ) then + {{ MOM6 }} set check = true + {{ MOM6 }} if( $check == true ) then + {{ MOM6 }} echo Comparing "MOM6 restarts" + {{ MOM6 }} cmp $file1 $file2 + {{ MOM6 }} if( $status == 0 ) then + {{ MOM6 }} echo Start-Stop Success! + {{ MOM6 }} echo " " + {{ MOM6 }} else + {{ MOM6 }} echo Start-Stop Failed! + {{ MOM6 }} echo " " + {{ MOM6 }} set pass = false + {{ MOM6 }} endif + {{ MOM6 }} endif + {{ MOM6 }}endif echo "=== Comparing replay checkpoint files from ${NX0}x${NY0} run of duration ${test_duration_step1} with restarts from ${test_duration_step2} + ${test_duration_step3} ${NX0}x${NY0} runs ===" @@ -934,24 +934,24 @@ if ($RUN_LAYOUT == TRUE) then endif end - @MOM6# check MOM.res.nc (MOM6 restart) - @MOM6set file1 = MOM.res.nc.2 - @MOM6set file2 = MOM.res.nc.4 - @MOM6if( -e $file1 && -e $file2 ) then - @MOM6 set check = true - @MOM6 if( $check == true ) then - @MOM6 echo Comparing "MOM6 restarts" - @MOM6 cmp $file1 $file2 - @MOM6 if( $status == 0 ) then - @MOM6 echo Layout Success! - @MOM6 echo " " - @MOM6 else - @MOM6 echo Layout Failed! - @MOM6 echo " " - @MOM6 set pass = false - @MOM6 endif - @MOM6 endif - @MOM6endif + {{ MOM6 }}# check MOM.res.nc (MOM6 restart) + {{ MOM6 }}set file1 = MOM.res.nc.2 + {{ MOM6 }}set file2 = MOM.res.nc.4 + {{ MOM6 }}if( -e $file1 && -e $file2 ) then + {{ MOM6 }} set check = true + {{ MOM6 }} if( $check == true ) then + {{ MOM6 }} echo Comparing "MOM6 restarts" + {{ MOM6 }} cmp $file1 $file2 + {{ MOM6 }} if( $status == 0 ) then + {{ MOM6 }} echo Layout Success! + {{ MOM6 }} echo " " + {{ MOM6 }} else + {{ MOM6 }} echo Layout Failed! + {{ MOM6 }} echo " " + {{ MOM6 }} set pass = false + {{ MOM6 }} endif + {{ MOM6 }} endif + {{ MOM6 }}endif echo "=== Comparing replay checkpoint files from 6-hour ${NX0}x${NY0} run with restarts from 6-hour ${test_NX}x${test_NY} run ===" diff --git a/gcm_run.j b/gcm_run.j index d7946e8d..b4df0e73 100755 --- a/gcm_run.j +++ b/gcm_run.j @@ -4,12 +4,12 @@ # Batch Parameters for Run Job ####################################################################### -#@BATCH_TIME@RUN_T -#@RUN_P -#@BATCH_JOBNAME@RUN_N -#@RUN_Q -#@BATCH_GROUP -#@BATCH_JOINOUTERR +#{{ BATCH_TIME }}{{ RUN_T }} +#{{ RUN_P }} +#{{ BATCH_JOBNAME }}{{ RUN_N }} +#{{ RUN_Q }} +#{{ BATCH_GROUP }} +#{{ BATCH_JOINOUTERR }} #@BATCH_NAME -o gcm_run.o@RSTDATE ####################################################################### @@ -26,20 +26,20 @@ limit stacksize unlimited setenv ARCH `uname` -setenv SITE @SITE -setenv GEOSDIR @GEOSDIR -setenv GEOSBIN @GEOSBIN -setenv GEOSETC @GEOSETC -setenv GEOSUTIL @GEOSSRC +setenv SITE {{ SITE }} +setenv GEOSDIR {{ GEOSDIR }} +setenv GEOSBIN {{ GEOSBIN }} +setenv GEOSETC {{ GEOSETC }} +setenv GEOSUTIL {{ GEOSSRC }} source $GEOSBIN/g5_modules -setenv @LD_LIBRARY_PATH_CMD ${LD_LIBRARY_PATH}:${GEOSDIR}/lib -# We only add BASEDIR to the @LD_LIBRARY_PATH_CMD if BASEDIR is defined (i.e., not running with Spack) +setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH}:${GEOSDIR}/lib +# We only add BASEDIR to the {{ LD_LIBRARY_PATH_CMD }} if BASEDIR is defined (i.e., not running with Spack) if ( $?BASEDIR ) then - setenv @LD_LIBRARY_PATH_CMD ${@LD_LIBRARY_PATH_CMD}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib endif -setenv RUN_CMD "@RUN_CMD" +setenv RUN_CMD "{{ RUN_CMD }}" setenv GCMVER `cat $GEOSETC/.AGCM_VERSION` echo VERSION: $GCMVER @@ -49,9 +49,9 @@ echo VERSION: $GCMVER ####################################################################### -setenv EXPID @EXPID -setenv EXPDIR @EXPDIR -setenv HOMDIR @HOMDIR +setenv EXPID {{ EXPID }} +setenv EXPDIR {{ EXPDIR }} +setenv HOMDIR {{ HOMDIR }} setenv RSTDATE @RSTDATE setenv GCMEMIP @GCMEMIP @@ -87,14 +87,14 @@ set AGCM_LM = `grep '^\s*AGCM_LM:' $HOMDIR/AGCM.rc | cut -d: -f2` set OGCM_IM = `grep '^\s*OGCM\.IM_WORLD:' $HOMDIR/AGCM.rc | cut -d: -f2` set OGCM_JM = `grep '^\s*OGCM\.JM_WORLD:' $HOMDIR/AGCM.rc | cut -d: -f2` -@COUPLED set OGCM_LM = `grep '^\s*OGCM\.LM:' $HOMDIR/AGCM.rc | cut -d: -f2` -@COUPLED set NX = `grep '^\s*OGCM\.NX:' $HOMDIR/AGCM.rc | cut -d: -f2` -@COUPLED set NY = `grep '^\s*OGCM\.NY:' $HOMDIR/AGCM.rc | cut -d: -f2` +{{ COUPLED }} set OGCM_LM = `grep '^\s*OGCM\.LM:' $HOMDIR/AGCM.rc | cut -d: -f2` +{{ COUPLED }} set NX = `grep '^\s*OGCM\.NX:' $HOMDIR/AGCM.rc | cut -d: -f2` +{{ COUPLED }} set NY = `grep '^\s*OGCM\.NY:' $HOMDIR/AGCM.rc | cut -d: -f2` # Calculate number of cores/nodes for IOSERVER # -------------------------------------------- -set USE_IOSERVER = @USE_IOSERVER +set USE_IOSERVER = {{ USE_IOSERVER }} set NUM_OSERVER_NODES = `grep '^\s*IOSERVER_NODES:' $HOMDIR/AGCM.rc | cut -d: -f2` set NUM_BACKEND_PES = `grep '^\s*NUM_BACKEND_PES:' $HOMDIR/AGCM.rc | cut -d: -f2` @@ -110,7 +110,7 @@ endif @ MODEL_NPES = $NX * $NY -set NCPUS_PER_NODE = @NCPUS_PER_NODE +set NCPUS_PER_NODE = {{ NCPUS_PER_NODE }} set NUM_MODEL_NODES=`echo "scale=6;($MODEL_NPES / $NCPUS_PER_NODE)" | bc | awk 'function ceil(x, y){y=int(x); return(x>y?y+1:y)} {print ceil($1)}'` if ( $NCPUS != NULL ) then @@ -180,7 +180,7 @@ set month = `echo $RSTDATE | cut -d_ -f1 | cut -b5-6` >>>EMIP_OLDLAND<<<# --------------------- >>>EMIP_NEWLAND<<<# Copy Jason-3_4 REPLAY MERRA-2 NewLand Restarts >>>EMIP_NEWLAND<<<# ---------------------------------------------- -cp /discover/nobackup/projects/gmao/g6dev/ltakacs/@EMIP_MERRA2/restarts/AMIP/M${month}/restarts.${year}${month}.tar . +cp /discover/nobackup/projects/gmao/g6dev/ltakacs/{{ EMIP_MERRA2 }}/restarts/AMIP/M${month}/restarts.${year}${month}.tar . tar xf restarts.${year}${month}.tar /bin/rm restarts.${year}${month}.tar >>>EMIP_OLDLAND<<>>EMIP_NEWLAND<<<# ------------------------------------------------ set RSTID = `/bin/ls *catch* | cut -d. -f1` set day = `/bin/ls *catch* | cut -d. -f3 | awk 'match($0,/[0-9]{8}/) {print substr($0,RSTART+6,2)}'` -$GEOSBIN/remap_restarts.py command_line -np -ymdh ${year}${month}${day}21 -grout C${AGCM_IM} -levsout ${AGCM_LM} -out_dir . -rst_dir . -expid $RSTID -bcvin @EMIP_BCS_IN -oceanin 1440x720 -nobkg -lbl -nolcv -bcvout @LSMBCS -rs 3 -oceanout @OCEANOUT -in_bc_base @BC_BASE -out_bc_base @BC_BASE +$GEOSBIN/remap_restarts.py command_line -np -ymdh ${year}${month}${day}21 -grout C${AGCM_IM} -levsout ${AGCM_LM} -out_dir . -rst_dir . -expid $RSTID -bcvin {{ EMIP_BCS_IN }} -oceanin 1440x720 -nobkg -lbl -nolcv -bcvout {{ LSMBCS }} -rs 3 -oceanout {{ OCEANOUT }} -in_bc_base @BC_BASE -out_bc_base @BC_BASE >>>EMIP_OLDLAND<<>>EMIP_OLDLAND<<>>EMIP_OLDLAND<<<$GEOSBIN/gogo.x -s $RSTID.Chem_Registry.rc.${year}${month}${day}_21z -t $EXPDIR/RC/Chem_Registry.rc -i gocart_internal_rst.merra2 -o gocart_internal_rst -r C${AGCM_IM} -l ${AGCM_LM} @@ -269,9 +269,9 @@ if (-z input.nml) then exit 0 endif -@MOM6cp -f $HOMDIR/MOM_input . -@MOM6cp -f $HOMDIR/MOM_override . -@CICE6cp -f $HOMDIR/ice_in . +{{ MOM6 }}cp -f $HOMDIR/MOM_input . +{{ MOM6 }}cp -f $HOMDIR/MOM_override . +{{ CICE6 }}cp -f $HOMDIR/ice_in . if( $GCMEMIP == TRUE ) then cp -f $EXPDIR/restarts/$RSTDATE/cap_restart . @@ -315,16 +315,16 @@ done: ####################################################################### # Link Boundary Datasets ####################################################################### -setenv BCSDIR @BCSDIR -@DATAOCEANsetenv SSTDIR @SSTDIR -setenv BCRSLV @ATMOStag_@OCEANtag -@MOM5setenv SSTDIR @COUPLEDIR/SST/MERRA2/${OGCM_IM}x${OGCM_JM}/v1 -@MOM6setenv SSTDIR @COUPLEDIR/SST/MERRA2/${OGCM_IM}x${OGCM_JM}/v1 +setenv BCSDIR {{ BCSDIR }} +{{ DATAOCEAN }}setenv SSTDIR {{ SSTDIR }} +setenv BCRSLV {{ ATMOStag }}_{{ OCEANtag }} +{{ MOM5 }}setenv SSTDIR {{ COUPLEDIR }}/SST/MERRA2/${OGCM_IM}x${OGCM_JM}/v1 +{{ MOM6 }}setenv SSTDIR {{ COUPLEDIR }}/SST/MERRA2/${OGCM_IM}x${OGCM_JM}/v1 #this is hard-wired for NAS for now - should make it more general -@DATAOCEANsetenv BCTAG `basename $BCSDIR` -@COUPLEDsetenv BCTAG `basename @COUPLEDIR/@OCNMODEL/${OGCM_IM}x${OGCM_JM}` -setenv EMISSIONS @EMISSIONS +{{ DATAOCEAN }}setenv BCTAG `basename $BCSDIR` +{{ COUPLED }}setenv BCTAG `basename {{ COUPLEDIR }}/{{ OCNMODEL }}/${OGCM_IM}x${OGCM_JM}` +setenv EMISSIONS {{ EMISSIONS }} chmod +x linkbcs >>>GCMRUN_CATCHCN<< $yearc ) then @ yearf = $yearc + 1 @ nymdf = $yearf * 10000 + 0101 @@ -773,16 +773,16 @@ if ( $DNA_TRUE == 0 && -e DNA_ExtData.rc ) /bin/mv set ACHEM_TRUE = `grep -i '^\s*ENABLE_ACHEM:\s*\.TRUE\.' GEOS_ChemGridComp.rc | wc -l` if ( $ACHEM_TRUE == 0 && -e GEOSachem_ExtData.rc ) /bin/mv GEOSachem_ExtData.rc GEOSachem_ExtData.rc.NOT_USED -@MP_TURN_OFF_WSUB_EXTDATA# 1MOM and GFDL microphysics do not use WSUB_CLIM -@MP_TURN_OFF_WSUB_EXTDATA# ------------------------------------------------- +{{ MP_TURN_OFF_WSUB_EXTDATA }}# 1MOM and GFDL microphysics do not use WSUB_CLIM +{{ MP_TURN_OFF_WSUB_EXTDATA }}# ------------------------------------------------- if ($EXTDATA2G_TRUE == 0 ) then - @MP_TURN_OFF_WSUB_EXTDATA/bin/mv WSUB_ExtData.rc WSUB_ExtData.tmp - @MP_TURN_OFF_WSUB_EXTDATAcat WSUB_ExtData.tmp | sed -e '/^WSUB_CLIM/ s#ExtData.*#/dev/null#' > WSUB_ExtData.rc + {{ MP_TURN_OFF_WSUB_EXTDATA }}/bin/mv WSUB_ExtData.rc WSUB_ExtData.tmp + {{ MP_TURN_OFF_WSUB_EXTDATA }}cat WSUB_ExtData.tmp | sed -e '/^WSUB_CLIM/ s#ExtData.*#/dev/null#' > WSUB_ExtData.rc else - @MP_TURN_OFF_WSUB_EXTDATA/bin/mv WSUB_ExtData.yaml WSUB_ExtData.tmp - @MP_TURN_OFF_WSUB_EXTDATAcat WSUB_ExtData.tmp | sed -e '/collection:/ s#WSUB_SWclim.*#/dev/null#' > WSUB_ExtData.yaml + {{ MP_TURN_OFF_WSUB_EXTDATA }}/bin/mv WSUB_ExtData.yaml WSUB_ExtData.tmp + {{ MP_TURN_OFF_WSUB_EXTDATA }}cat WSUB_ExtData.tmp | sed -e '/collection:/ s#WSUB_SWclim.*#/dev/null#' > WSUB_ExtData.yaml endif -@MP_TURN_OFF_WSUB_EXTDATA/bin/rm WSUB_ExtData.tmp +{{ MP_TURN_OFF_WSUB_EXTDATA }}/bin/rm WSUB_ExtData.tmp # Generate the complete ExtData.rc # -------------------------------- @@ -853,8 +853,8 @@ else # Run the script # -------------- - @SINGULARITY_BUILD $RUN_CMD 1 $SINGULARITY_RUN $GEOSBIN/SaltIntSplitter tile.data $SCRDIR/saltwater_internal_rst - @NATIVE_BUILD $RUN_CMD 1 $GEOSBIN/SaltIntSplitter tile.data $SCRDIR/saltwater_internal_rst + {{ SINGULARITY_BUILD }} $RUN_CMD 1 $SINGULARITY_RUN $GEOSBIN/SaltIntSplitter tile.data $SCRDIR/saltwater_internal_rst + {{ NATIVE_BUILD }} $RUN_CMD 1 $GEOSBIN/SaltIntSplitter tile.data $SCRDIR/saltwater_internal_rst # Move restarts # ------------- @@ -892,8 +892,8 @@ endif if ( -x $GEOSBIN/rs_numtiles.x ) then set N_OPENW_TILES_EXPECTED = `grep '^\s*0' tile.data | wc -l` - @SINGULARITY_BUILD set N_OPENW_TILES_FOUND = `$RUN_CMD 1 $SINGULARITY_RUN $GEOSBIN/rs_numtiles.x openwater_internal_rst | grep Total | awk '{print $NF}'` - @NATIVE_BUILD set N_OPENW_TILES_FOUND = `$RUN_CMD 1 $GEOSBIN/rs_numtiles.x openwater_internal_rst | grep Total | awk '{print $NF}'` + {{ SINGULARITY_BUILD }} set N_OPENW_TILES_FOUND = `$RUN_CMD 1 $SINGULARITY_RUN $GEOSBIN/rs_numtiles.x openwater_internal_rst | grep Total | awk '{print $NF}'` + {{ NATIVE_BUILD }} set N_OPENW_TILES_FOUND = `$RUN_CMD 1 $GEOSBIN/rs_numtiles.x openwater_internal_rst | grep Total | awk '{print $NF}'` if ( $N_OPENW_TILES_EXPECTED != $N_OPENW_TILES_FOUND ) then echo "Error! Found $N_OPENW_TILES_FOUND tiles in openwater. Expect to find $N_OPENW_TILES_EXPECTED tiles." @@ -933,7 +933,7 @@ endif @SETENVS -@MPT_SHEPHERD +{{ MPT_SHEPHERD }} # Run bundleParser.py #--------------------- @@ -968,75 +968,75 @@ endif # Establish safe default number of OpenMP threads # ----------------------------------------------- -@MIT # --------------------------------------------------- -@MIT # For MITgcm restarts - before running GEOSgcm.x -@MIT # --------------------------------------------------- -@MIT -@MIT # set time interval for segment in seconds -@MIT -@MIT set yearc = `echo $nymdc | cut -c1-4` -@MIT set monthc = `echo $nymdc | cut -c5-6` -@MIT set dayc = `echo $nymdc | cut -c7-8` -@MIT set hourc = `echo $nhmsc | cut -c1-2` -@MIT set minutec = `echo $nhmsc | cut -c3-4` -@MIT set secondc = `echo $nhmsc | cut -c5-6` -@MIT -@MIT set yearf = `echo $nymdf | cut -c1-4` -@MIT set monthf = `echo $nymdf | cut -c5-6` -@MIT set dayf = `echo $nymdf | cut -c7-8` -@MIT set hourf = `echo $nhmsf | cut -c1-2` -@MIT set minutef = `echo $nhmsf | cut -c3-4` -@MIT set secondf = `echo $nhmsf | cut -c5-6` -@MIT -@MIT set yearf = `echo $nymdf | cut -c1-4` -@MIT -@MIT set time1 = `date -u -d "${yearc}-${monthc}-${dayc}T${hourc}:${minutec}:${secondc}" "+%s"` -@MIT set time2 = `date -u -d "${yearf}-${monthf}-${dayf}T${hourf}:${minutef}:${secondf}" "+%s"` -@MIT -@MIT @ mitdt = $time2 - $time1 -@MIT echo "Segment time: $mitdt" -@MIT -@MIT -@MIT # Set-up MITgcm run directory -@MIT if (! -e mitocean_run) mkdir -p mitocean_run -@MIT cd mitocean_run -@MIT -@MIT # link mit configuration and initialization files -@MIT ln -sf $EXPDIR/mit_input/* . -@MIT # link mitgcm restarts if exist -@MIT /bin/ln -sf $EXPDIR/restarts/pic* . -@MIT # make an archive folder for mitgcm run -@MIT mkdir $EXPDIR/mit_output -@MIT -@MIT # Calculate segment time steps -@MIT set mit_nTimeSteps = `cat ${SCRDIR}/AGCM.rc | grep OGCM_RUN_DT: | cut -d: -f2 | tr -s " " | cut -d" " -f2` -@MIT @ mit_nTimeSteps = ${mitdt} / $mit_nTimeSteps -@MIT -@MIT #change namelist variables in data - nTimeSteps, chkptFreq and monitorFreq -@MIT sed -i "s/nTimeSteps.*/nTimeSteps = ${mit_nTimeSteps},/" data -@MIT sed -i "s/chkptFreq.*/chkptFreq = ${mitdt}.0,/" data -@MIT sed -i "s/pChkptFreq.*/pChkptFreq = ${mitdt}.0,/" data -@MIT # get nIter0 -@MIT -@MIT if (! -e ${EXPDIR}/restarts/MITgcm_restart_dates.txt ) then -@MIT set nIter0 = `grep nIter0 data | tr -s " " | cut -d"=" -f2 | cut -d"," -f1 | awk '{$1=$1;print}'` -@MIT else -@MIT set nIter0 = `grep "$nymdc $nhmsc" ${EXPDIR}/restarts/MITgcm_restart_dates.txt | cut -d" " -f5` -@MIT if ( $nIter0 == "" ) then -@MIT echo "No ocean restart file for $nymdc $nhmsc, exiting" -@MIT echo "If this is a new initialized experiment, delete:" -@MIT echo "${EXPDIR}/restarts/MITgcm_restart_dates.txt" -@MIT echo "and restart" -@MIT exit -@MIT else -@MIT sed -i "s/nIter0.*/ nIter0 = ${nIter0},/" data -@MIT endif -@MIT endif -@MIT -@MIT cd .. -@MIT # --------------------------------------------------- -@MIT # End MITgcm restarts - before running GEOSgcm.x -@MIT # --------------------------------------------------- +{{ MIT }} # --------------------------------------------------- +{{ MIT }} # For MITgcm restarts - before running GEOSgcm.x +{{ MIT }} # --------------------------------------------------- +{{ MIT }} +{{ MIT }} # set time interval for segment in seconds +{{ MIT }} +{{ MIT }} set yearc = `echo $nymdc | cut -c1-4` +{{ MIT }} set monthc = `echo $nymdc | cut -c5-6` +{{ MIT }} set dayc = `echo $nymdc | cut -c7-8` +{{ MIT }} set hourc = `echo $nhmsc | cut -c1-2` +{{ MIT }} set minutec = `echo $nhmsc | cut -c3-4` +{{ MIT }} set secondc = `echo $nhmsc | cut -c5-6` +{{ MIT }} +{{ MIT }} set yearf = `echo $nymdf | cut -c1-4` +{{ MIT }} set monthf = `echo $nymdf | cut -c5-6` +{{ MIT }} set dayf = `echo $nymdf | cut -c7-8` +{{ MIT }} set hourf = `echo $nhmsf | cut -c1-2` +{{ MIT }} set minutef = `echo $nhmsf | cut -c3-4` +{{ MIT }} set secondf = `echo $nhmsf | cut -c5-6` +{{ MIT }} +{{ MIT }} set yearf = `echo $nymdf | cut -c1-4` +{{ MIT }} +{{ MIT }} set time1 = `date -u -d "${yearc}-${monthc}-${dayc}T${hourc}:${minutec}:${secondc}" "+%s"` +{{ MIT }} set time2 = `date -u -d "${yearf}-${monthf}-${dayf}T${hourf}:${minutef}:${secondf}" "+%s"` +{{ MIT }} +{{ MIT }} @ mitdt = $time2 - $time1 +{{ MIT }} echo "Segment time: $mitdt" +{{ MIT }} +{{ MIT }} +{{ MIT }} # Set-up MITgcm run directory +{{ MIT }} if (! -e mitocean_run) mkdir -p mitocean_run +{{ MIT }} cd mitocean_run +{{ MIT }} +{{ MIT }} # link mit configuration and initialization files +{{ MIT }} ln -sf $EXPDIR/mit_input/* . +{{ MIT }} # link mitgcm restarts if exist +{{ MIT }} /bin/ln -sf $EXPDIR/restarts/pic* . +{{ MIT }} # make an archive folder for mitgcm run +{{ MIT }} mkdir $EXPDIR/mit_output +{{ MIT }} +{{ MIT }} # Calculate segment time steps +{{ MIT }} set mit_nTimeSteps = `cat ${SCRDIR}/AGCM.rc | grep OGCM_RUN_DT: | cut -d: -f2 | tr -s " " | cut -d" " -f2` +{{ MIT }} @ mit_nTimeSteps = ${mitdt} / $mit_nTimeSteps +{{ MIT }} +{{ MIT }} #change namelist variables in data - nTimeSteps, chkptFreq and monitorFreq +{{ MIT }} sed -i "s/nTimeSteps.*/nTimeSteps = ${mit_nTimeSteps},/" data +{{ MIT }} sed -i "s/chkptFreq.*/chkptFreq = ${mitdt}.0,/" data +{{ MIT }} sed -i "s/pChkptFreq.*/pChkptFreq = ${mitdt}.0,/" data +{{ MIT }} # get nIter0 +{{ MIT }} +{{ MIT }} if (! -e ${EXPDIR}/restarts/MITgcm_restart_dates.txt ) then +{{ MIT }} set nIter0 = `grep nIter0 data | tr -s " " | cut -d"=" -f2 | cut -d"," -f1 | awk '{$1=$1;print}'` +{{ MIT }} else +{{ MIT }} set nIter0 = `grep "$nymdc $nhmsc" ${EXPDIR}/restarts/MITgcm_restart_dates.txt | cut -d" " -f5` +{{ MIT }} if ( $nIter0 == "" ) then +{{ MIT }} echo "No ocean restart file for $nymdc $nhmsc, exiting" +{{ MIT }} echo "If this is a new initialized experiment, delete:" +{{ MIT }} echo "${EXPDIR}/restarts/MITgcm_restart_dates.txt" +{{ MIT }} echo "and restart" +{{ MIT }} exit +{{ MIT }} else +{{ MIT }} sed -i "s/nIter0.*/ nIter0 = ${nIter0},/" data +{{ MIT }} endif +{{ MIT }} endif +{{ MIT }} +{{ MIT }} cd .. +{{ MIT }} # --------------------------------------------------- +{{ MIT }} # End MITgcm restarts - before running GEOSgcm.x +{{ MIT }} # --------------------------------------------------- # Set OMP_NUM_THREADS # ------------------- @@ -1054,8 +1054,8 @@ else set IOSERVER_EXTRA = "" endif -@SINGULARITY_BUILD @OCEAN_PRELOAD $RUN_CMD $TOTAL_PES $SINGULARITY_RUN $GEOSEXE $IOSERVER_OPTIONS $IOSERVER_EXTRA --logging_config 'logging.yaml' -@NATIVE_BUILD @OCEAN_PRELOAD $RUN_CMD $TOTAL_PES $GEOSEXE $IOSERVER_OPTIONS $IOSERVER_EXTRA --logging_config 'logging.yaml' +{{ SINGULARITY_BUILD }} {{ OCEAN_PRELOAD }} $RUN_CMD $TOTAL_PES $SINGULARITY_RUN $GEOSEXE $IOSERVER_OPTIONS $IOSERVER_EXTRA --logging_config 'logging.yaml' +{{ NATIVE_BUILD }} {{ OCEAN_PRELOAD }} $RUN_CMD $TOTAL_PES $GEOSEXE $IOSERVER_OPTIONS $IOSERVER_EXTRA --logging_config 'logging.yaml' if( $USE_SHMEM == 1 ) $GEOSBIN/RmShmKeys_sshmpi.csh >& /dev/null @@ -1066,86 +1066,86 @@ else endif echo GEOSgcm Run Status: $rc -@MIT # --------------------------------------------------- -@MIT # For MITgcm restarts - after running GEOSgcm.x -@MIT # --------------------------------------------------- -@MIT -@MIT set STEADY_STATE_OCEAN=`grep STEADY_STATE_OCEAN AGCM.rc | cut -d':' -f2 | tr -d " "` -@MIT -@MIT # update ocean only if activated. Otherwize use the same pickups (passive ocean). -@MIT if ( ${STEADY_STATE_OCEAN} != 0 ) then -@MIT -@MIT if ( ${rc} == 0 ) then -@MIT -@MIT # Update nIter0 for next segment -@MIT set znIter00 = `echo $nIter0 | awk '{printf("%010d",$1)}'` -@MIT @ nIter0 = $nIter0 + $mit_nTimeSteps -@MIT set znIter0 = `echo $nIter0 | awk '{printf("%010d",$1)}'` -@MIT -@MIT # to update MITgcm restart list file -@MIT sed -i "/${nIter0}/d" ${EXPDIR}/restarts/MITgcm_restart_dates.txt -@MIT echo "Date_GEOS5 $nymdf $nhmsf NITER0_MITgcm ${nIter0}" >> ${EXPDIR}/restarts/MITgcm_restart_dates.txt -@MIT -@MIT /bin/mv $SCRDIR/mitocean_run/STDOUT.0000 $EXPDIR/mit_output/STDOUT.${znIter00} -@MIT -@MIT endif -@MIT -@MIT cd $SCRDIR/mitocean_run -@MIT -@MIT # Check existance of roling pickups -@MIT set nonomatch rp = ( pickup*ckptA* ) -@MIT echo $rp -@MIT # Rename and move them if exist -@MIT if ( -e $rp[1] ) then -@MIT set timeStepNumber=`cat pickup.ckptA.meta | grep timeStepNumber | tr -s " " | cut -d" " -f5 | awk '{printf("%010d",$1)}'` -@MIT foreach fname ( pickup*ckptA* ) -@MIT set bname = `echo ${fname} | cut -d "." -f1 | cut -d "/" -f2` -@MIT set aname = `echo ${fname} | cut -d "." -f3` -@MIT echo $EXPDIR/restarts/${bname}.${timeStepNumber}.${aname} -@MIT /bin/mv ${fname} $EXPDIR/restarts/${bname}.${timeStepNumber}.${aname} -@MIT end -@MIT endif -@MIT -@MIT # Check existance of permanent pickups -@MIT set nonomatch pp = ( pickup* ) -@MIT echo $pp -@MIT # Move them if exist -@MIT if ( -e $pp[1] ) then -@MIT foreach fname ( pickup* ) -@MIT if ( ! -e $EXPDIR/restarts/${fname} ) /bin/mv ${fname} $EXPDIR/restarts/${fname} -@MIT end -@MIT endif -@MIT -@MIT /bin/mv T.* $EXPDIR/mit_output/ -@MIT /bin/mv S.* $EXPDIR/mit_output/ -@MIT /bin/mv U.* $EXPDIR/mit_output/ -@MIT /bin/mv V.* $EXPDIR/mit_output/ -@MIT /bin/mv W.* $EXPDIR/mit_output/ -@MIT /bin/mv PH* $EXPDIR/mit_output/ -@MIT /bin/mv Eta.* $EXPDIR/mit_output/ -@MIT -@MIT /bin/mv AREA.* $EXPDIR/mit_output/ -@MIT /bin/mv HEFF.* $EXPDIR/mit_output/ -@MIT /bin/mv HSNOW.* $EXPDIR/mit_output/ -@MIT /bin/mv UICE.* $EXPDIR/mit_output/ -@MIT /bin/mv VICE.* $EXPDIR/mit_output/ -@MIT -@MIT #copy mit output to mit_output -@MIT foreach i (`grep -i filename data.diagnostics | grep "^ " | cut -d"=" -f2 | cut -d"'" -f2 | awk '{$1=$1;print}'`) -@MIT /bin/mv ${i}* $EXPDIR/mit_output/ -@MIT end -@MIT -@MIT foreach i (`grep -i stat_fName data.diagnostics | grep "^ " | cut -d"=" -f2 | cut -d"'" -f2 | awk '{$1=$1;print}'`) -@MIT /bin/mv ${i}* $EXPDIR/mit_output/ -@MIT end -@MIT -@MIT cd $SCRDIR -@MIT -@MIT endif -@MIT -@MIT # --------------------------------------------------- -@MIT # End MITgcm restarts - after running GEOSgcm.x -@MIT # --------------------------------------------------- +{{ MIT }} # --------------------------------------------------- +{{ MIT }} # For MITgcm restarts - after running GEOSgcm.x +{{ MIT }} # --------------------------------------------------- +{{ MIT }} +{{ MIT }} set STEADY_STATE_OCEAN=`grep STEADY_STATE_OCEAN AGCM.rc | cut -d':' -f2 | tr -d " "` +{{ MIT }} +{{ MIT }} # update ocean only if activated. Otherwize use the same pickups (passive ocean). +{{ MIT }} if ( ${STEADY_STATE_OCEAN} != 0 ) then +{{ MIT }} +{{ MIT }} if ( ${rc} == 0 ) then +{{ MIT }} +{{ MIT }} # Update nIter0 for next segment +{{ MIT }} set znIter00 = `echo $nIter0 | awk '{printf("%010d",$1)}'` +{{ MIT }} @ nIter0 = $nIter0 + $mit_nTimeSteps +{{ MIT }} set znIter0 = `echo $nIter0 | awk '{printf("%010d",$1)}'` +{{ MIT }} +{{ MIT }} # to update MITgcm restart list file +{{ MIT }} sed -i "/${nIter0}/d" ${EXPDIR}/restarts/MITgcm_restart_dates.txt +{{ MIT }} echo "Date_GEOS5 $nymdf $nhmsf NITER0_MITgcm ${nIter0}" >> ${EXPDIR}/restarts/MITgcm_restart_dates.txt +{{ MIT }} +{{ MIT }} /bin/mv $SCRDIR/mitocean_run/STDOUT.0000 $EXPDIR/mit_output/STDOUT.${znIter00} +{{ MIT }} +{{ MIT }} endif +{{ MIT }} +{{ MIT }} cd $SCRDIR/mitocean_run +{{ MIT }} +{{ MIT }} # Check existance of roling pickups +{{ MIT }} set nonomatch rp = ( pickup*ckptA* ) +{{ MIT }} echo $rp +{{ MIT }} # Rename and move them if exist +{{ MIT }} if ( -e $rp[1] ) then +{{ MIT }} set timeStepNumber=`cat pickup.ckptA.meta | grep timeStepNumber | tr -s " " | cut -d" " -f5 | awk '{printf("%010d",$1)}'` +{{ MIT }} foreach fname ( pickup*ckptA* ) +{{ MIT }} set bname = `echo ${fname} | cut -d "." -f1 | cut -d "/" -f2` +{{ MIT }} set aname = `echo ${fname} | cut -d "." -f3` +{{ MIT }} echo $EXPDIR/restarts/${bname}.${timeStepNumber}.${aname} +{{ MIT }} /bin/mv ${fname} $EXPDIR/restarts/${bname}.${timeStepNumber}.${aname} +{{ MIT }} end +{{ MIT }} endif +{{ MIT }} +{{ MIT }} # Check existance of permanent pickups +{{ MIT }} set nonomatch pp = ( pickup* ) +{{ MIT }} echo $pp +{{ MIT }} # Move them if exist +{{ MIT }} if ( -e $pp[1] ) then +{{ MIT }} foreach fname ( pickup* ) +{{ MIT }} if ( ! -e $EXPDIR/restarts/${fname} ) /bin/mv ${fname} $EXPDIR/restarts/${fname} +{{ MIT }} end +{{ MIT }} endif +{{ MIT }} +{{ MIT }} /bin/mv T.* $EXPDIR/mit_output/ +{{ MIT }} /bin/mv S.* $EXPDIR/mit_output/ +{{ MIT }} /bin/mv U.* $EXPDIR/mit_output/ +{{ MIT }} /bin/mv V.* $EXPDIR/mit_output/ +{{ MIT }} /bin/mv W.* $EXPDIR/mit_output/ +{{ MIT }} /bin/mv PH* $EXPDIR/mit_output/ +{{ MIT }} /bin/mv Eta.* $EXPDIR/mit_output/ +{{ MIT }} +{{ MIT }} /bin/mv AREA.* $EXPDIR/mit_output/ +{{ MIT }} /bin/mv HEFF.* $EXPDIR/mit_output/ +{{ MIT }} /bin/mv HSNOW.* $EXPDIR/mit_output/ +{{ MIT }} /bin/mv UICE.* $EXPDIR/mit_output/ +{{ MIT }} /bin/mv VICE.* $EXPDIR/mit_output/ +{{ MIT }} +{{ MIT }} #copy mit output to mit_output +{{ MIT }} foreach i (`grep -i filename data.diagnostics | grep "^ " | cut -d"=" -f2 | cut -d"'" -f2 | awk '{$1=$1;print}'`) +{{ MIT }} /bin/mv ${i}* $EXPDIR/mit_output/ +{{ MIT }} end +{{ MIT }} +{{ MIT }} foreach i (`grep -i stat_fName data.diagnostics | grep "^ " | cut -d"=" -f2 | cut -d"'" -f2 | awk '{$1=$1;print}'`) +{{ MIT }} /bin/mv ${i}* $EXPDIR/mit_output/ +{{ MIT }} end +{{ MIT }} +{{ MIT }} cd $SCRDIR +{{ MIT }} +{{ MIT }} endif +{{ MIT }} +{{ MIT }} # --------------------------------------------------- +{{ MIT }} # End MITgcm restarts - after running GEOSgcm.x +{{ MIT }} # --------------------------------------------------- ####################################################################### @@ -1155,8 +1155,8 @@ echo GEOSgcm Run Status: $rc set edate = e`awk '{print $1}' cap_restart`_`awk '{print $2}' cap_restart | cut -c1-2`z -@COUPLED cp -r RESTART ${EXPDIR}/restarts/RESTART.${edate} -@COUPLED cp RESTART/* INPUT +{{ COUPLED }} cp -r RESTART ${EXPDIR}/restarts/RESTART.${edate} +{{ COUPLED }} cp RESTART/* INPUT # Move Intermediate Checkpoints to RESTARTS directory # --------------------------------------------------- @@ -1224,10 +1224,10 @@ endif # --------------------- cd $EXPDIR/restarts if( $FSEGMENT == 00000000 ) then - @DATAOCEAN tar cf restarts.${edate}.tar $EXPID.*.${edate}.${GCMVER}.${BCTAG}_${BCRSLV}.* - @COUPLED tar cvf restarts.${edate}.tar $EXPID.*.${edate}.${GCMVER}.${BCTAG}_${BCRSLV}.* RESTART.${edate} + {{ DATAOCEAN }} tar cf restarts.${edate}.tar $EXPID.*.${edate}.${GCMVER}.${BCTAG}_${BCRSLV}.* + {{ COUPLED }} tar cvf restarts.${edate}.tar $EXPID.*.${edate}.${GCMVER}.${BCTAG}_${BCRSLV}.* RESTART.${edate} /bin/rm -rf `/bin/ls -d -1 $EXPID.*.${edate}.${GCMVER}.${BCTAG}_${BCRSLV}.*` - @COUPLED /bin/rm -rf RESTART.${edate} + {{ COUPLED }} /bin/rm -rf RESTART.${edate} endif @@ -1242,36 +1242,36 @@ foreach collection ( $collections ) /bin/mv `/bin/ls -1 *.${collection}.*` $EXPDIR/holding/$collection end -@COUPLED # MOM-Specific Output Files -@COUPLED # ------------------------- -@MOM5 set dsets="ocean_month" -@MOM6 set dsets="ocean_state prog_z sfc_ave forcing" -@MOM5 foreach dset ( $dsets ) -@MOM5 set num = `/bin/ls -1 $dset.nc | wc -l` -@MOM5 if($num != 0) then -@MOM5 if(! -e $EXPDIR/MOM_Output) mkdir -p $EXPDIR/MOM_Output -@MOM5 /bin/mv $SCRDIR/$dset.nc $EXPDIR/MOM_Output/$dset.${edate}.nc -@MOM5 endif -@MOM5 end -@MOM6 foreach dset ( $dsets ) -@MOM6 set num = `/bin/ls -1 $dset.nc | wc -l` -@MOM6 if($num != 0) then -@MOM6 if(! -e $EXPDIR/MOM_Output) mkdir -p $EXPDIR/MOM_Output -@MOM6 /bin/mv $SCRDIR/$dset.nc $EXPDIR/MOM_Output/$dset.${edate}.nc -@MOM6 endif -@MOM6 end - -@CICE6 # CICE6-Specific Output Files -@CICE6 # ------------------------- -@CICE6 set dsets="iceh" -@CICE6 foreach dset ( $dsets ) -@CICE6 set num = `/bin/ls -1 $dset.*.nc | wc -l` -@CICE6 if($num != 0) then -@CICE6 if(! -e $EXPDIR/CICE_Output) mkdir -p $EXPDIR/CICE_Output -@CICE6 /bin/mv $SCRDIR/$dset.*.nc $EXPDIR/CICE_Output/ -@CICE6 endif -@CICE6 end -@CICE6 +{{ COUPLED }} # MOM-Specific Output Files +{{ COUPLED }} # ------------------------- +{{ MOM5 }} set dsets="ocean_month" +{{ MOM6 }} set dsets="ocean_state prog_z sfc_ave forcing" +{{ MOM5 }} foreach dset ( $dsets ) +{{ MOM5 }} set num = `/bin/ls -1 $dset.nc | wc -l` +{{ MOM5 }} if($num != 0) then +{{ MOM5 }} if(! -e $EXPDIR/MOM_Output) mkdir -p $EXPDIR/MOM_Output +{{ MOM5 }} /bin/mv $SCRDIR/$dset.nc $EXPDIR/MOM_Output/$dset.${edate}.nc +{{ MOM5 }} endif +{{ MOM5 }} end +{{ MOM6 }} foreach dset ( $dsets ) +{{ MOM6 }} set num = `/bin/ls -1 $dset.nc | wc -l` +{{ MOM6 }} if($num != 0) then +{{ MOM6 }} if(! -e $EXPDIR/MOM_Output) mkdir -p $EXPDIR/MOM_Output +{{ MOM6 }} /bin/mv $SCRDIR/$dset.nc $EXPDIR/MOM_Output/$dset.${edate}.nc +{{ MOM6 }} endif +{{ MOM6 }} end + +{{ CICE6 }} # CICE6-Specific Output Files +{{ CICE6 }} # ------------------------- +{{ CICE6 }} set dsets="iceh" +{{ CICE6 }} foreach dset ( $dsets ) +{{ CICE6 }} set num = `/bin/ls -1 $dset.*.nc | wc -l` +{{ CICE6 }} if($num != 0) then +{{ CICE6 }} if(! -e $EXPDIR/CICE_Output) mkdir -p $EXPDIR/CICE_Output +{{ CICE6 }} /bin/mv $SCRDIR/$dset.*.nc $EXPDIR/CICE_Output/ +{{ CICE6 }} endif +{{ CICE6 }} end +{{ CICE6 }} ####################################################################### # Run Post-Processing and Forecasts ####################################################################### @@ -1342,13 +1342,13 @@ else endif endif -@COUPLED cp -rf RESTART $EXPDIR +{{ COUPLED }} cp -rf RESTART $EXPDIR if ( $rc == 0 ) then cd $HOMDIR if ( $GCMEMIP == TRUE ) then - if( $capdate < $enddate ) @BATCH_CMD $HOMDIR/gcm_run.j$RSTDATE + if( $capdate < $enddate ) {{ BATCH_CMD }} $HOMDIR/gcm_run.j$RSTDATE else - if( $capdate < $enddate ) @BATCH_CMD $HOMDIR/gcm_run.j + if( $capdate < $enddate ) {{ BATCH_CMD }} $HOMDIR/gcm_run.j endif endif diff --git a/gcmpy/jinjafy.py b/gcmpy/jinjafy.py new file mode 100755 index 00000000..1dd0efef --- /dev/null +++ b/gcmpy/jinjafy.py @@ -0,0 +1,250 @@ +''' +The purpose of this script is to "jinja-fy" the template files affected by gcm_setup. + +Jinja can't process items unless they are enclosed on both sides with an +identifier. The old/current way to identify template items is "@ITEM_NAME". +This script will replace all these instances with "{{ ITEM_NAME }}". + +Variables templated at run time (identified with ">>>ITEM<<<") are kept +the same but included in case someone wants this changed. + +This script only needs to be run once. +''' + +import os + +def replace_strings_in_file(file_path, replacements): + # Read the content of the file + with open(file_path, 'r') as file: + content = file.read() + + # Replace all occurrences of the keys with their corresponding values + for key, value in replacements.items(): + content = content.replace(key, value) + + # Write the modified content back to the file + with open(file_path, 'w') as file: + file.write(content) + +def main(file_list, replacements): + for file_path in file_list: + file_path = f"{os.path.dirname(os.getcwd())}/{file_path}" + if os.path.isfile(file_path): + replace_strings_in_file(file_path, replacements) + else: + print(f"File {file_path} not found.") + +if __name__ == "__main__": + # List of files to process + file_list = [ + 'gcm_run.j', + 'gcm_post.j', + 'gcm_archive.j', + 'gcm_regress.j', + 'gcm_plot.tmpl', + 'gcm_quickplot.csh', + 'gcm_moveplot.j', + 'gcm_forecast.tmpl', + 'gcm_forecast.setup', + 'gcm_emip.setup', + 'CAP.rc.tmpl', + 'AGCM.rc.tmpl', + 'HISTORY.rc.tmpl', + 'logging.yaml', + 'fvcore_layout.rc' + ] + + # Dictionary with keys to find and values to replace + replacements = { + '@GCMVER': '{{ GCMVER }}', + '@EXPSRC': '{{ EXPSRC }}', + '@EXPID': '{{ EXPID }}', + '@RUN_N': '{{ RUN_N }}', + '@RUN_FN': '{{ RUN_FN }}', + '@RUN_FT': '{{ RUN_FT }}', + '@RUN_T': '{{ RUN_T }}', + '@RUN_P': '{{ RUN_P }}', + '@RUN_FP': '{{ RUN_FP }}', + '@RUN_Q': '{{ RUN_Q }}', + '@POST_N': '{{ POST_N }}', + '@POST_T': '{{ POST_T }}', + '@POST_P': '{{ POST_P }}', + '@POST_Q': '{{ POST_Q }}', + '@MOVE_N': '{{ MOVE_N }}', + '@PLOT_N': '{{ PLOT_N }}', + '@PLOT_T': '{{ PLOT_T }}', + '@PLOT_P': '{{ PLOT_P }}', + '@PLOT_Q': '{{ PLOT_Q }}', + '@MOVE_Q': '{{ MOVE_Q }}', + '@MOVE_P': '{{ MOVE_P }}', + '@ARCHIVE_N': '{{ ARCHIVE_N }}', + '@ARCHIVE_T': '{{ ARCHIVE_T }}', + '@ARCHIVE_P': '{{ ARCHIVE_P }}', + '@ARCHIVE_Q': '{{ ARCHIVE_Q }}', + '@REGRESS_N': '{{ REGRESS_N }}', + '@BCSDIR': '{{ BCSDIR }}', + '@SSTDIR': '{{ SSTDIR }}', + '@SSTNAME': '{{ SSTNAME }}', + '@OCEANOUT': '{{ OCEANOUT }}', + '@LSMBCS': '{{ LSMBCS }}', + '@EMIP_BCS_IN': '{{ EMIP_BCS_IN }}', + '@EMIP_MERRA2': '{{ EMIP_MERRA2 }}', + '@BCSTAG': '{{ BCSTAG }}', + '@SSTFILE': '{{ SSTFILE }}', + '@ICEFILE': '{{ ICEFILE }}', + '@KPARFILE': '{{ KPARFILE }}', + '@CHMDIR': '{{ CHMDIR }}', + '@COUPLEDIR': '{{ COUPLEDIR }}', + '@shared_COUPLED': '{{ shared_COUPLED }}', + '@GWDRSDIR': '{{ GWDRSDIR }}', + '@EXPDIR': '{{ EXPDIR }}', + '@EXPDSC': '{{ EXPDSC }}', + '@HOMDIR': '{{ HOMDIR }}', + '@BATCH_GROUP': '{{ BATCH_GROUP }}', + '@BATCH_TIME': '{{ BATCH_TIME }}', + '@BATCH_CMD': '{{ BATCH_CMD }}', + '@BATCH_JOBNAME': '{{ BATCH_JOBNAME }}', + '@BATCH_OUTPUTNAME': '{{ BATCH_OUTPUTNAME }}', + '@BATCH_JOINOUTERR': '{{ BATCH_JOINOUTERR }}', + '@SITE': '{{ SITE }}', + '@GEOSDIR': '{{ GEOSDIR }}', + '@GEOSSRC': '{{ GEOSSRC }}', + '@GEOSBIN': '{{ GEOSBIN }}', + '@GEOSETC': '{{ GEOSETC }}', + '@GEOSUTIL': '{{ GEOSUTIL }}', + '@SINGULARITY_BUILD': '{{ SINGULARITY_BUILD }}', + '@NATIVE_BUILD': '{{ NATIVE_BUILD }}', + '@MPT_SHEPHERD': '{{ MPT_SHEPHERD }}', + '@SINGULARITY_SANDBOX': '{{ SINGULARITY_SANDBOX }}', + '@REAL_BIND_PATH': '{{ REAL_BIND_PATH }}', + '@BASE_BIND_PATH': '{{ BASE_BIND_PATH }}', + '@BOUNDARY_DIR': '{{ BOUNDARY_DIR }}', + '@CHECKPOINT_TYPE': '{{ CHECKPOINT_TYPE }}', + '@OGCM_NX': '{{ OGCM_NX }}', + '@OGCM_NY': '{{ OGCM_NY }}', + '@OGCM_NPROCS': '{{ OGCM_NPROCS }}', + '@OBSERVER_FRQ': '{{ OBSERVER_FRQ }}', + '@DASTUNING': '{{ DASTUNING }}', + '>>>FORCEDAS<<<': '>>>FORCEDAS<<<', + '>>>FORCEGCM<<<': '>>>FORCEGCM<<<', + '@COUPLED': '{{ COUPLED }}', + '@CLDMICRO': '{{ CLDMICRO }}', + '@MOM5': '{{ MOM5 }}', + '@MOM6': '{{ MOM6 }}', + '@OCNMODEL': '{{ OCNMODEL }}', + '@CICE4': '{{ CICE4 }}', + '@CICE6': '{{ CICE6 }}', + '>>>HIST_CICE4<<<': '{{ >>>HIST_CICE4<<< }}', + '@MIT': '{{ MIT }}', + '@DATAOCEAN': '{{ DATAOCEAN }}', + '>>>GOCART<<<': '>>>GOCART<<<', + '@OPS_SPECIES': '{{ OPS_SPECIES }}', + '@CMIP_SPECIES': '{{ CMIP_SPECIES }}', + '@MERRA2OX_SPECIES': '{{ MERRA2OX_SPECIES }}', + '>>>FVCUBED<<<': '>>>FVCUBED<<<', + '@HIST_GOCART': ' {{ HIST_GOCART }}', + '>>>OSTIA<<<': '>>>OSTIA<<<', + '>>>HIST_CATCHCN<<<': '>>>HIST_CATCHCN<<<', + '>>>GCMRUN_CATCHCN<<<': '>>>GCMRUN_CATCHCN<<<', + '>>>EMIP_OLDLAND<<<': '>>>EMIP_OLDLAND<<<', + '>>>EMIP_NEWLAND<<<': '>>>EMIP_NEWLAND<<<', + '@LSM_PARMS': '{{ LSM_PARMS }}', + '@OCEAN_NAME': '{{ OCEAN_NAME }}', + '@OCEAN_PRELOAD': '{{ OCEAN_PRELOAD }}', + '>>>4DIAUDAS<<<': '>>>4DIAUDAS<<<', + '>>>REGULAR_REPLAY<<<': '>>>REGULAR_REPLAY<<<', + '>>>REGULAR_REPLAY_GMAO<<<': '>>>REGULAR_REPLAY_GMAO<<<', + '>>>REGULAR_REPLAY_NCEP<<<': '>>>REGULAR_REPLAY_NCEP<<<', + '>>>REGULAR_REPLAY_ECMWF<<<': '>>>REGULAR_REPLAY_ECMWF<<<', +'ana4replay.eta.%y4%m2%d2_%h2z.nc4': '{{ ana4replay.eta.%y4%m2%d2_%h2z.nc4 }}', + '@REPLAY_ANA_EXPID': '{{ REPLAY_ANA_EXPID }}', + '@REPLAY_ANA_LOCATION': '{{ REPLAY_ANA_LOCATION }}', + '@M2_REPLAY_ANA_LOCATION': '{{ M2_REPLAY_ANA_LOCATION }}', + '@OX_RELAXTIME': '{{ OX_RELAXTIME }}', + '@PCHEM_CLIM_YEARS': '{{ PCHEM_CLIM_YEARS }}', + '@RATS_PROVIDER': '{{ RATS_PROVIDER }}', + '@AERO_PROVIDER': '{{ AERO_PROVIDER }}', + '@OANA_PROVIDER': '{{ OANA_PROVIDER }}', + '@EMISSIONS': '{{ EMISSIONS }}', + '@DYCORE': '{{ DYCORE }}', + '@AGCM_GRIDNAME': '{{ AGCM_GRIDNAME }}', + '@OGCM_GRIDNAME': '{{ OGCM_GRIDNAME }}', + '@OGCM_IS_FCST': '{{ OGCM_IS_FCST }}', + '@BOOT': '{{ BOOT }}', + '@BCSRES': '{{ BCSRES }}', + '@OCEANtag': '{{ OCEANtag }}', + '@ATMOStag': '{{ ATMOStag }}', + '@RES_DATELINE': '{{ RES_DATELINE }}', + '@TILEDATA': '{{ TILEDATA }}', + '@TILEBIN': '{{ TILEBIN }}', + '@DT': '{{ DT }}', + '@SOLAR_DT': '{{ SOLAR_DT }}', + '@IRRAD_DT': '{{ IRRAD_DT }}', + '@OCEAN_DT': '{{ OCEAN_DT }}', + '@LONG_DT': '{{ LONG_DT }}', + '@NX': '{{ NX }}', + '@NY': '{{ NY }}', + '@USE_SHMEM': '{{ USE_SHMEM }}', + '@USE_IOSERVER': '{{ USE_IOSERVER }}', + '@NUM_OSERVER_NODES': '{{ NUM_OSERVER_NODES }}', + '@NUM_BACKEND_PES': '{{ NUM_BACKEND_PES }}', + '@RESTART_BY_OSERVER': '{{ RESTART_BY_OSERVER }}', + '@NCPUS_PER_NODE': '{{ NCPUS_PER_NODE }}', + '@NUM_READERS': '{{ NUM_READERS }}', + '@NUM_WRITERS': '{{ NUM_WRITERS }}', + '@LATLON_AGCM': '{{ LATLON_AGCM }}', + '@LATLON_OGCM': '{{ LATLON_OGCM }}', + '@CUBE_AGCM': '{{ CUBE_AGCM }}', + '@CUBE_OGCM': '{{ CUBE_OGCM }}', + '@GRID_TYPE': '{{ GRID_TYPE }}', + '@AGCM_NF': '{{ AGCM_NF }}', + '@AGCM_IM': '{{ AGCM_IM }}', + '@AGCM_JM': '{{ AGCM_JM }}', + '@AGCM_LM': '{{ AGCM_LM }}', + '@OGCM_IM': '{{ OGCM_IM }}', + '@OGCM_JM': '{{ OGCM_JM }}', + '@OGCM_LM': '{{ OGCM_LM }}', + '@OGCM_NF': '{{ OGCM_NF }}', + '@OGCM_GRID_TYPE': '{{ OGCM_GRID_TYPE }}', + '@BEG_DATE': '{{ BEG_DATE }}', + '@END_DATE': '{{ END_DATE }}', + '@JOB_SGMT': '{{ JOB_SGMT }}', + '@NUM_SGMT': '{{ NUM_SGMT }}', + '@CONUS': '{{ CONUS }}', + '@FV_HWT': '{{ FV_HWT }}', + '@CONVPAR_OPTION': '{{ CONVPAR_OPTION }}', + '@STRETCH_FACTOR': '{{ STRETCH_FACTOR }}', + '@INTERPOLATE_SST': '{{ INTERPOLATE_SST }}', + '@HIST_IM': '{{ HIST_IM }}', + '@HIST_JM': '{{ HIST_JM }}', + '@ISCCP_SATSIM': '{{ ISCCP_SATSIM }}', + '@MODIS_SATSIM': '{{ MODIS_SATSIM }}', + '@RADAR_SATSIM': '{{ RADAR_SATSIM }}', + '@LIDAR_SATSIM': '{{ LIDAR_SATSIM }}', + '@MISR_SATSIM': '{{ MISR_SATSIM }}', + '@SATSIM': '{{ SATSIM }}', + '@USE_SKIN_LAYER': '{{ USE_SKIN_LAYER }}', + '@ANALYZE_TS': '{{ ANALYZE_TS }}', + '@LSM_CHOICE': '{{ LSM_CHOICE }}', + '@MP_TURN_OFF_WSUB_EXTDATA': '{{ MP_TURN_OFF_WSUB_EXTDATA }}', + '@BACM_1M_': '{{ BACM_1M_ }}', + '@GFDL_1M_': '{{ GFDL_1M_ }}', + '@MGB2_2M_': '{{ MGB2_2M_ }}', + '@PRELOAD_COMMAND': '{{ PRELOAD_COMMAND }}', + '@LD_LIBRARY_PATH_CMD': '{{ LD_LIBRARY_PATH_CMD }}', + '@RUN_CMD': '{{ RUN_CMD }}', + '@HYDROSTATIC': '{{ HYDROSTATIC }}', + '@FV_SCHMIDT': '{{ FV_SCHMIDT }}', + '@FV_STRETCH_FAC': '{{ FV_STRETCH_FAC }}', + '@FV_TARGET_LON': '{{ FV_TARGET_LON }}', + '@FV_TARGET_LAT': '{{ FV_TARGET_LAT }}', + '@FV_MAKENH': '{{ FV_MAKENH }}', + '@FV_HYDRO': '{{ FV_HYDRO }}', + '@GFDL_PROG_CCN': '{{ GFDL_PROG_CCN }}', + '@GFDL_USE_CCN': '{{ GFDL_USE_CCN }}', + '@GFDL_HYDRO': '{{ GFDL_HYDRO }}' + + } + + main(file_list, replacements) + diff --git a/gcmpy/sync.sh b/gcmpy/sync.sh index 5225d453..4e48f1f7 100755 --- a/gcmpy/sync.sh +++ b/gcmpy/sync.sh @@ -1,5 +1,10 @@ #!/bin/bash +''' +The purpose of this script to sync up the gcmpy directories between the @GEOSgcm_App dir and bin dir without having to make install +''' + + # Source and destination directories source_dir="/discover/nobackup/sshakoor/GEOSgcm/src/Applications/@GEOSgcm_App/gcmpy" destination_dir="/discover/nobackup/sshakoor/GEOSgcm/install/bin/gcmpy" From 824e8f54c10c808cd5cc44c62f3f0ae9f6b1f523 Mon Sep 17 00:00:00 2001 From: Shayon Shakoorzadeh Date: Wed, 16 Oct 2024 10:04:27 -0400 Subject: [PATCH 11/11] template files were modified to be compatible with jinja2. AMIP experiments are trivial zero diff --- AGCM.rc.tmpl | 22 +- gcm_archive.j | 2 +- gcm_forecast.tmpl | 2 +- gcm_moveplot.j | 2 +- gcm_plot.tmpl | 2 +- gcm_post.j | 2 +- gcm_regress.j | 2 +- gcm_run.j | 2 +- gcmpy/jinjafy.py | 3 +- gcmpy/scripts/.model.py.swp | Bin 20480 -> 0 bytes .../__pycache__/atmosphere.cpython-312.pyc | Bin 11467 -> 12201 bytes gcmpy/scripts/__pycache__/env.cpython-312.pyc | Bin 4668 -> 4887 bytes .../__pycache__/gocart.cpython-312.pyc | Bin 2472 -> 2697 bytes .../scripts/__pycache__/land.cpython-312.pyc | Bin 3150 -> 2669 bytes .../scripts/__pycache__/ocean.cpython-312.pyc | Bin 12294 -> 12802 bytes .../process_questions.cpython-312.pyc | Bin 12394 -> 12295 bytes .../__pycache__/utility.cpython-312.pyc | Bin 3732 -> 3860 bytes gcmpy/scripts/atmosphere.py | 331 ++++--- gcmpy/scripts/env.py | 22 +- gcmpy/scripts/gcm_setup.py | 919 ++++++++++++++++++ gcmpy/scripts/generate_question.py | 0 gcmpy/scripts/gocart.py | 47 +- gcmpy/scripts/land.py | 59 +- gcmpy/scripts/model.py | 496 ---------- gcmpy/scripts/ocean.py | 256 +++-- gcmpy/scripts/process_questions.py | 9 +- gcmpy/scripts/utility.py | 1 + gcmpy/yaml/atmospheric_model.yaml | 2 +- gcmpy/yaml/directory_setup.yaml | 0 gcmpy/yaml/exp_setup.yaml | 0 gcmpy/yaml/gocart.yaml | 0 gcmpy/yaml/land_model.yaml | 5 +- gcmpy/yaml/mpi_config.yaml | 0 gcmpy/yaml/ocean_model.yaml | 0 linkbcs.tmpl | 88 +- 35 files changed, 1362 insertions(+), 912 deletions(-) delete mode 100644 gcmpy/scripts/.model.py.swp mode change 100644 => 100755 gcmpy/scripts/atmosphere.py mode change 100644 => 100755 gcmpy/scripts/env.py create mode 100755 gcmpy/scripts/gcm_setup.py mode change 100644 => 100755 gcmpy/scripts/generate_question.py mode change 100644 => 100755 gcmpy/scripts/gocart.py mode change 100644 => 100755 gcmpy/scripts/land.py delete mode 100644 gcmpy/scripts/model.py mode change 100644 => 100755 gcmpy/scripts/ocean.py mode change 100644 => 100755 gcmpy/scripts/process_questions.py mode change 100644 => 100755 gcmpy/scripts/utility.py mode change 100644 => 100755 gcmpy/yaml/atmospheric_model.yaml mode change 100644 => 100755 gcmpy/yaml/directory_setup.yaml mode change 100644 => 100755 gcmpy/yaml/exp_setup.yaml mode change 100644 => 100755 gcmpy/yaml/gocart.yaml mode change 100644 => 100755 gcmpy/yaml/land_model.yaml mode change 100644 => 100755 gcmpy/yaml/mpi_config.yaml mode change 100644 => 100755 gcmpy/yaml/ocean_model.yaml diff --git a/AGCM.rc.tmpl b/AGCM.rc.tmpl index 49e1a345..d522d393 100644 --- a/AGCM.rc.tmpl +++ b/AGCM.rc.tmpl @@ -361,21 +361,21 @@ RECORD_REF_TIME: >>>REFTIME<<< >>>FCSTIME<<< # 4DIAU (recreate analysis tendency on the fly) # --------------------------------------------- -#>>>4DIAUDAS<<< AINC_FILE: {{ EXPID }}.ana.eta.%y4%m2%d2_%h2%n2z.nc4 -#>>>4DIAUDAS<<< NUDGE_STATE: YES -##>>>4DIAUDAS<<< TAUANL: 3600 -#>>>4DIAUDAS<<< AGCM_IMPORT_RESTART_FILE: agcm_import_rst.>>>ANA0YYYYMMDDHHMN<<>>4DIAUDAS<<< AGCM_IMPORT_RESTART_TYPE: binary +#>>>_4DIAUDAS<<< AINC_FILE: {{ EXPID }}.ana.eta.%y4%m2%d2_%h2%n2z.nc4 +#>>>_4DIAUDAS<<< NUDGE_STATE: YES +##>>>_4DIAUDAS<<< TAUANL: 3600 +#>>>_4DIAUDAS<<< AGCM_IMPORT_RESTART_FILE: agcm_import_rst.>>>ANA0YYYYMMDDHHMN<<>>_4DIAUDAS<<< AGCM_IMPORT_RESTART_TYPE: binary # Exact REPLAY Mode for 4D-IAU # ---------------------------- ->>>4DIAUDAS<<< REPLAY_MODE: Exact ->>>4DIAUDAS<<< REPLAY_FILE: agcm_import_rst.%y4%m2%d2_%h2%n2z.nc4 ->>>4DIAUDAS<<< REPLAY_FILE_FREQUENCY: 7200 ->>>4DIAUDAS<<< REPLAY_SHUTOFF: 21600.0 # ShutOff Replay after 6-hours for G5NCEP and G5ECMWF Forecasts +>>>_4DIAUDAS<<< REPLAY_MODE: Exact +>>>_4DIAUDAS<<< REPLAY_FILE: agcm_import_rst.%y4%m2%d2_%h2%n2z.nc4 +>>>_4DIAUDAS<<< REPLAY_FILE_FREQUENCY: 7200 +>>>_4DIAUDAS<<< REPLAY_SHUTOFF: 21600.0 # ShutOff Replay after 6-hours for G5NCEP and G5ECMWF Forecasts -#>>>4DIAUDAS<<< REPLAY_INTERVAL: 3600.0 -#>>>4DIAUDAS<<< PREDICTOR_OFFSET: 3600.0 +#>>>_4DIAUDAS<<< REPLAY_INTERVAL: 3600.0 +#>>>_4DIAUDAS<<< PREDICTOR_OFFSET: 3600.0 #################################################################################################### #### Miscellaneous Exact/Regular REPLAY Parameters #### diff --git a/gcm_archive.j b/gcm_archive.j index d48272dd..722f5a1c 100644 --- a/gcm_archive.j +++ b/gcm_archive.j @@ -35,7 +35,7 @@ setenv GEOSBIN {{ GEOSBIN }} source $GEOSBIN/g5_modules setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH} if ( $?BASEDIR ) then - setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{'{'}}{{LD_LIBRARY_PATH_CMD}}{{'}'}}:${BASEDIR}/${ARCH}/lib endif ####################################################################### diff --git a/gcm_forecast.tmpl b/gcm_forecast.tmpl index 86552531..2a26a7ba 100755 --- a/gcm_forecast.tmpl +++ b/gcm_forecast.tmpl @@ -34,7 +34,7 @@ setenv GEOSUTIL {{ GEOSSRC }} source $GEOSBIN/g5_modules setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH}:${GEOSDIR}/lib if ( $?BASEDIR ) then - setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{'{'}}{{LD_LIBRARY_PATH_CMD}}{{'}'}}:${BASEDIR}/${ARCH}/lib endif setenv RUN_CMD "{{ RUN_CMD }}" diff --git a/gcm_moveplot.j b/gcm_moveplot.j index f473fd45..cc293ee3 100755 --- a/gcm_moveplot.j +++ b/gcm_moveplot.j @@ -36,7 +36,7 @@ setenv GEOSUTIL {{ GEOSSRC }} source $GEOSBIN/g5_modules setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH} if ( $?BASEDIR ) then - setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{'{'}}{{LD_LIBRARY_PATH_CMD}}{{'}'}}:${BASEDIR}/${ARCH}/lib endif ####################################################################### diff --git a/gcm_plot.tmpl b/gcm_plot.tmpl index f68ca271..e9f26a7c 100755 --- a/gcm_plot.tmpl +++ b/gcm_plot.tmpl @@ -36,7 +36,7 @@ setenv GEOSUTIL {{ GEOSSRC }} source $GEOSBIN/g5_modules setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH} if ( $?BASEDIR ) then - setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{'{'}}{{LD_LIBRARY_PATH_CMD}}{{'}'}}:${BASEDIR}/${ARCH}/lib endif ####################################################################### diff --git a/gcm_post.j b/gcm_post.j index 177996ef..8a589028 100755 --- a/gcm_post.j +++ b/gcm_post.j @@ -38,7 +38,7 @@ setenv BATCHNAME "{{ POST_N }}" source $GEOSBIN/g5_modules setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH} if ( $?BASEDIR ) then - setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{'{'}}{{LD_LIBRARY_PATH_CMD}}{{'}'}}:${BASEDIR}/${ARCH}/lib endif if( $?SLURM_NTASKS ) then diff --git a/gcm_regress.j b/gcm_regress.j index 5ba8d717..99f70287 100755 --- a/gcm_regress.j +++ b/gcm_regress.j @@ -93,7 +93,7 @@ source $GEOSBIN/g5_modules setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH}:${GEOSDIR}/lib # We only add BASEDIR to the {{ LD_LIBRARY_PATH_CMD }} if BASEDIR is defined (i.e., not running with Spack) if ( $?BASEDIR ) then - setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{'{'}}{{LD_LIBRARY_PATH_CMD}}{{'}'}}:${BASEDIR}/${ARCH}/lib endif setenv RUN_CMD "{{ RUN_CMD }}" diff --git a/gcm_run.j b/gcm_run.j index b4df0e73..7e56e855 100755 --- a/gcm_run.j +++ b/gcm_run.j @@ -36,7 +36,7 @@ source $GEOSBIN/g5_modules setenv {{ LD_LIBRARY_PATH_CMD }} ${LD_LIBRARY_PATH}:${GEOSDIR}/lib # We only add BASEDIR to the {{ LD_LIBRARY_PATH_CMD }} if BASEDIR is defined (i.e., not running with Spack) if ( $?BASEDIR ) then - setenv {{ LD_LIBRARY_PATH_CMD }} ${{{ LD_LIBRARY_PATH_CMD }}}:${BASEDIR}/${ARCH}/lib + setenv {{ LD_LIBRARY_PATH_CMD }} ${{'{'}}{{LD_LIBRARY_PATH_CMD}}{{'}'}}:${BASEDIR}/${ARCH}/lib endif setenv RUN_CMD "{{ RUN_CMD }}" diff --git a/gcmpy/jinjafy.py b/gcmpy/jinjafy.py index 1dd0efef..263541ed 100755 --- a/gcmpy/jinjafy.py +++ b/gcmpy/jinjafy.py @@ -51,7 +51,8 @@ def main(file_list, replacements): 'AGCM.rc.tmpl', 'HISTORY.rc.tmpl', 'logging.yaml', - 'fvcore_layout.rc' + 'fvcore_layout.rc', + 'linkbcs.tmpl' ] # Dictionary with keys to find and values to replace diff --git a/gcmpy/scripts/.model.py.swp b/gcmpy/scripts/.model.py.swp deleted file mode 100644 index 9828119ed456ce9298323498d949b7ee20b6c8d8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 20480 zcmeHPU5p!76`oSk(9!~G_z{2YjjiHc>e#zU6E%yr>elh@x?QhVwv$b?O{e4W*q&rO z`TdApt56s4e9?cjkJ= zv$LBtN_l`8>9cq4J#+54XYM)oo^$WIF?FO=W(RXq60Ubi(o_FfYNdaC>CR7nP?8*{ zqnxxXo2s_En!2M}>zX~icRMOc%C~zhr<-q@jeYy_f_Z+SSgEzuZeBMXS22uyLpSrd z?+x;fYU@4M$#<=$X5@NRloc?UjQM;Tm5pu|L#crrGEX5{`W(XdcQdNN;HsYAkjdg zfkXp|1`-V<8b~ydXdux*qJcyM|Cbt2nv!$?JwHS7GWP#P{Qu%RCFwEXo4{v*lfX^D zdx6*9AxVz_4+9SYeV_xZ03QKfy-Jdv0UiME1=fHYfUAH%UMWdG1ik_^fhpi};OyJM z6W9P&fZKpufh=$(@YgFO=}F)T;0*8}@CD!)a1*c_V8FG21f0EGlD-3c2Dlrj0q+JL zf14z&10MrsfP=t?fJ=d=cS_RFfzJWQfTO^Nf#)uhq-TN81FOIh;1b|>J0Js~0}H@T z-~}Wqz7O<(n}J=xKarex8Tb-V1FFC^z%xj?`~tWGxEk02(0ll)pSyTt1CoWVH0$;a zS8v-^zsGFtuD)(-4pUraXo}-9cf(>0MLnsRP1e&KDfA>8yD?>-Oa>X7WnIPX;Vh&Z|duMQ){xuAVTan%6Ov~2B9k=QKSn(TIyV$Y36!USKYOgDHm1_JE`MQ z`BlYG`-b9b7t+3`KvQnnmgz$AT%*@RWl=qj$?qx6wQw>fq&m@IUv+{dqx+uGcS58w ziX7KaNLj35Ys$&7_C>AUVuo&NImgA3oQX*`JIhitGpU(aVtQzEzu5J&Y&w=6%yn~Y zC@C12UVA-5cPBC|lV#JBYg5PBZZw?Urd<`<)y=*Z+YkvBX=#?#?JcNBeU8dNoi9|D zmWqXXsj^&)Q$x$Lc?ZxOYw3ozSy7whm{$x(8#liV-R&@|r1=2=AD$WimCCNx z?a^q=9M9b)djwgQu``oQabg;cc_XuIYD`aQwrp*%4UNHXv^&NCT-tqGV^$wNW6VCo zc8M-hN@f! zBmw6}Mi6b^YlgdrF%J-G{yfZoUbo>pp`ES#fY*M+3t<$6US3MKWY0yWg11K2M58bm zBxzO{`I<_tSUpm#dfp9Q(}4@LOn7zG>UQBsaH|+?%f?vg1|9di?0FqWGg>*t?OIN5 z`6x6UzCXrCKvQd>)b;%K1l3P^(xj;vlZbYzr`bB%bA_87jU3MHgw>FpcGrc4&9YYN zbf^hYHDpu#N(FuIRls#USswrlU7p%0Dmg8!!tJ%4FX(=@>31`<- zTxGK)=Vph8BkUTgBh%!0zS0Gc(<8itIvl>;v&dHETCENRaa)&&fG!P&AG~`i+2XM= zSA~HUQgW%xClYqktXJx(t5o**ir4E{T}^h}eybJtj;WTdE4HpQ4D?uUp2P}-#G#9~ z)wWc{c5`sD@C*}ywem^>UVDowZ|wPxabi!t<)hiKxXZ^PEIq2R=BO)Pfy~FaBYF}q zYR`uDWpv7k6?f}uPvlMY)Vx=Sv#K6n>JO|%A}#R``ors_zZ$~$aiKRV$JDW03C0K~ zgKf!8-KJ`B&4!&u#wQ3ldeBLNmh&(@(-md(JqvGc1{0xA7GBVC>Uc7-H1eG07TjnV z15495AMXSG+k5cf!Kr;ydl`-z;)-;(p|vsfjXi~Drf)nrjc2BXi;x6MF17F=q7dJ4 zQb+A<3$l>E5e(zFh5X2NWFh~s9a(E#R{IV3>fl1RBf8NYdArD4Hc zczie+SdmxiunFE;mT^{DE@am4)g;zYu7zn&V-3Ac-|$RW33S;B-8`Y}WqP+~!N7{< zx{mcVO;cGmk9nmwz(mnqvXrJ~XITxaSJgd?IkSbvn;wTAVj4ql2o@{2m3(#-?m!*~ zPO8(#c*rtZbJ2P(%LYo<@EA~y?hXV4Ka9vSVoWQxEJZOmMbG6$tso4R1?a49o;>AI z_%GKN!MpQn=(UN5>2I%;YpvJOuuk)z36)?a$nQK-@6GvC{6?bq|F?*7e}(v$;(xJ! z{wU)7uL1+$2rvy?0h~pA{{ld@P1$ia6jVx0x$)<2Y3bX{PVzffiD6Uumns3mjQo3T>m5BKH%fPDsTg^ z3%DA134MDWcn)|LpuYYRNUlT!i3So4BpOIGkZ9n4O9R~jj}9qeK%o~cVnvZ^^#(F6 z;vB3|T}wuEH9@zsfu!?f5JvJPZl<-aS~lvG0kU97Ze@;3V{6}xVYLy_v~#o|M4FOc z5t1M^mUEG_<5UrnxrIU*+=F`XiI5cLR-zP968zt!tabHXjwfwAX(C*R1ldVV#YTW2 z+m6H+IEJK)|M~10mYX}VWkc9_P)~9i)#-pLh6}G9X=PKVNnBfN<25>>oj5Lvt#e@?k2FQ*idlz!ZBG42e3^a+&RzN>_8%@aS=#@(YLm;>mz7 zNtB{?kN|ZDylMRCzWuh3U3)Tx9M4j1}9?eO~)DoZm8@7b+hB7g#PO0-I% zdh~~{Cy2C`DJ|EF)#bUROg2+0uT-k_s8{!%&eZDFxq4x-SkBCNIosigo#2n}%tE;o zCd$SV73QktIl(8Mq&zojC?N314dgzZS=?J0aR}UCn(evJi^u;6-K7s)f)B+1;(YxR zi1p6^p8ytsS3vX|;Mc(SfO~;kfH~lG#Q9GFj{^hXX5eMS@!tSCz%9Vf5XV0Wd>!~S z@F}1JP@GS({ZD{9fmPt|h}q8oMc@aB(;oxA4D0|53Yrb2gbx?H;1LsVdPjW~scD^tPEn=TaIx=g>vV&k|>oLrIm)Y$!hMLMpGtwa-* zTrTe>5K1$~jfK`X+(>As!$yR8ypeIZKi}3YXa6}ZIy~ZI%sN!!3qJt~v6W!%-nEj(9{_93AqRVbM6-Km8-?%}{D09Q=A)q&NW5 z8%#L1nq@cvipKdu*n2~xp7^UQuLn_DuTLc5=m^pr_D5Y8O7k9Mt-i_6oN}S5LFyxE z0=u19Z{qjJ5Cif;Iu@qGsA&+>-!!ZxQqjd)g>qi>Aug_S4Kg6bh9j)HFB3`Wi%3(m zXBZb#2x?yvLLhNPSri=7LD+~J+J@#SD1UQ$ErY^L=zwb)R*f`=wvN+n-gA7n8mUiP zLpV+P#S@eq;D0wXDuFVwdo0?dZ@(!tO4-nyfxw(+3bH&#*X&tdJl{7AZ%aO~+#n^8 z$>s7ymIpJx6JBmLl*e8#hQH15RMx;U>uB7xFED*wBN uOiDF$O0T?9!U+VV6B%kqjTbo{3O64%Mq1n;yLn2ckj6i6<;NnE(tiL>oMEj1 diff --git a/gcmpy/scripts/__pycache__/atmosphere.cpython-312.pyc b/gcmpy/scripts/__pycache__/atmosphere.cpython-312.pyc index d2b48271bf2b4f467fcfa465a730cd402a9551c6..d97f50573188cdca56fab4774fce29ab835dbe8a 100644 GIT binary patch delta 2858 zcmZ`*eQZy62pG@43GB{GEjTp3P?A;P*SlIQ#d5>-Ja4@%0-=EwcFv_ky)xiCEX~ z2Y5wT|L3EIcL^$T+$AnyV>d|$F|uL4jXfkTv%MnWIQCzIdr&s5h?<(C#+K2@S%+LY zGJ&j-y}<8lX{zz^T1<#!Gc4jYH`gS%0K3l{onX{bV_xFsI@w>XO&%*QEE`rp0CIOt zE;oQ#Tcd1y(*W#Noz1aHgO}LR=kv1TmxAon6xw$bK;5e&DF@9iZaWuavg4AEr#mIKV&Ch8Obl0RZ`VG{fU>(|{qZZKY(UBWaNJp)# zD2&W#yAaix7-Xyt#dXvMvVA&2&HXy^0P5FKJD`CYDsT}myDzw%j$;#v@1wbvJ_Fwze9A@m3h>QPJc4S&LMBDCi{}fuOe&|EG7E|GnOt5q%~dDT zEX~L>nfbIT&1KIeaIl{tbzfn7NwxiMPM%F*bj|wn2aZ%OPpWMGq4VenO zjc30#z2rK+89K2Q8s7|!|0yt06t0<=Svq1U1%eyLq&{-US_*`K+fW%CyN8F8w-k;R zg@;nl6C)Rd*)`DKQxvY7+4s$#lS#I0iIOSyjwM9C%I;aF$yw&NddMpq&se7kSztFB z9+Q8uRoi27c_VATL&)t7k8_L|M1`Ltj6QFo&=>a4MksLxYgaj`>#rsaNhf!>K z^jRc_k$eTn9Rr<&L6x){nt*EvGbdcugi^?15PrX(POU$$C>Pzh+|egk->{cL z(KX@E&fv%Ff5AcK=V9uJvg{|I%hYNa*9ZeTa8zCs*V z;9#c=bu#_X6*yVYB$8%>A!)w?SVhE1dIS*GjiX1A36#HLAq;u&6$CUuZ}J-e|MzCg}sXYC{gfd`JBLUJ0(?r;FB=3F|D9eXLa(wxjMWOIw>(=;8|xu@Ca z-m}$8zf`TXkr zNP%*K#kDKq z3hPA-bC?zT23j9VZ6&E;OX}E^IzE)Tv`{7^4eSqnJ*!_wzb_)$S@kCfpIVv$?+&M# z(kYZ1YXtMiVUN1PmH7d&r#uhu)G1Q7%o)TzS;V1on;;H- zY8OR6g-!o%G;;js=nN~Q`Tk5gvXGUR=F`LU7r?_k&*4A9Ck8^uM_l_yoaf)dw?*PF Jb4WGA{{t~7kmLXW delta 2334 zcmaJ?Z%kWN6o2>iJ^H_l{s9W5lu{_=Pk}OEW^8T_m>|^6u{gerus$5!MvJ$d8R%r_ z)W!V+57X$}hxtYM;0Gl!(NDU>7_(#s6SuC>WFL%%#JE`!W9ApndC(T3^EL1H?(dv) z&pG$pb8i<%ZVp>+n#~3QpU$7#p0l>iTh6eq`O7<`DSf~&f7>gjH1iMdflDkho5_lv zmxNHlQLc`noR2E%C>|Lbjit4yJrR!`O>3gDWLoSSOpE>ah7aL8%&9Zdrio~T z$0x_9646K^t?e5e8ILBCX>I=?{8_NIB7A5h5|6zw#*=A7csMtf_dFdwcmVcl7d+ZT zWPBp3pguXukB%iNV1A4VZ`VzU-!S-0cNiM=?Mvf&KPxn)ytU_~3x(?EVj+BO_?zv4 zuZ)eX8-6#|vQ}`IhRS;}!H6bX8T`_i-qj;ZXH7$ljlf;=efBGSYq`(jOP?1mGUI!v znBqKMCkXrlh?i6?T`KZ0jTBpjPptRZi*Vhx!V2KLeMO!}LqA-$AB0_aqdglI{WHKFLD97JO)Ty_;(P+5k`7S+#r z%pd~4l`iBBJaa7eCK|M=2D6U{6oN`y9W=SBz);e^UG<(zr43`*vJAaR=xwX&?eu?o zn}wbBz7A!xllF~HVy`G2s)LswvFf#Cj^|eaPIC>UTtTTb%#C2E@H)3(M%;w3QizZi zTehvxeZUK$ry{0Zxq)5G9q>g(txJxycXqTy&*DC=d`Y%aB-GimLf1?hWcQ94JEtw? zbaXi#G43I#BB0~Ry##b#xSybrpoyTFpoO57AV5%0KsN+0B?u9;AuMY69{i+r>OmR7 z6OkPmhZ7XvxVrr4MFaaj(Tr*PwiX>H}$Bm;agv`}vN>F4~qf_NlUXJ%dL6}oY zJV|hzpp#%5LO|x&`NEoBty50LpFl-|62x^O!Y{SWRUYJtc4TLSl*M|-Qn_rY{Hdrc zRarGBEtpc3UTF5aSuc1Rop99uWCO*R)>e1-4D<~3s0jqbwTNA)*%Sh9`$McA^mPHN zdW<)f2I}j52CB$Ofv?v?hijIQ*24RBF1Twgb}QQT66mlrxuZBG&@Z5ezv}8Rvzmav zWi6{<@9u#EJ;8va-fMgZCA5=3t;ojJ!|Q>q&$iOi2T;Ghbnmjo`(u$K`q*x3#8xO>^B>1|Mhyd+32yu$FMOR7;`yXIQ`5YL8O_ z)Ngzj!ES<$sUT156L@K=_vy5KG&vDZOpcH7v0zTx1-BaZ`~GzoQd6k?bq)vb;D5+5 z2G<=nm~5J;#{#i1R^l|pn<03W;57pJx8Bg7gn62=cyWiRK-?fpk$&XQ`dV3rJ5uDJFslXN8Oyko}oMOe61Q8N;|n_GN~&I@z1C zOLA?-qL+i2VuRedS|-WPHCN;y411eBW#@5}cmGh=H4#@%o*3K7FTrfHv#?KQ&Wu2) G8vO&v3-RIr diff --git a/gcmpy/scripts/__pycache__/env.cpython-312.pyc b/gcmpy/scripts/__pycache__/env.cpython-312.pyc index e52fa08ecff323f9113d7f65f0e2a68ceda3adf0..857deed04a32a112bf630b96a7ba5159043101db 100644 GIT binary patch delta 934 zcmZvY&rjN57{}k2LQ5$KYKXc4qJYTMxv6pfz#pIl$rKD|G7bwXmQ7}<1YA&e&?U>p zfdgAjmMlwlTH;|@vT)shpwWR#Bt3cAL9+wNmTbum?kx&(SeriY_kF&f=STX*{HCLB zPo+`-^!z08N8g)X)iuK(pRFG;IKZzF>}j)@BAtmD2I-x9Rb!P)7M#~X0@7ecchr{CfCxB`MYo`|pr z4lS+5Vl>AH^=0k?2!`ez3o}9!%Y@iyI6)^iLUDR+xx#Kzv3NA`gfsSVaic&XRdECb znl`&v=&Tbq4SaDdCaYT|$*Q0Aba+Xt~j0L3Wn_Vr_jeGtWe(;N&1 delta 621 zcmbQPwnv5UG%qg~0}z-$F-|KIoyaG_cw(da8fHeX$s1UddD&8wQrS|JZGZ|Y8G$0Q zlmD}DaI2zk0%4E$WDoWPusnMbPbFhHLj`jrLph_S#bybP2u8+$$%UM9vc*No`i=z! zIho0cC7Jno#rh8JuKvO4$+_`B(ap0sjTx0Z5{t`Ib8=qF0KI>UEic|VAhh@vdvJDT zPGWZIOJHa*On%5EEbvkSB+uzol%JRaQa@RU`~T+cJd+u@Vt^8iKwKQNIfU;Oqks?t z3wMWEqy2=mOUyDi*aasC2#7OYn*3fsZ;~L3^amz3PM!~Z46^ba>=QUHaY%j;WZ*LY zAi}`LJ9(mjvK&z42B+`@?~9xY*EuyWa%!%KzQk#9Lq-m)9Ab(#*9T?>F5?euAWeFL zOIbfMF!9em!e)`Pk^cX>;k&eG2^LYhEAPE3R1fs(L diff --git a/gcmpy/scripts/__pycache__/gocart.cpython-312.pyc b/gcmpy/scripts/__pycache__/gocart.cpython-312.pyc index 1a7f17b4b5bb6349e346bc40b068d8c8f02a4605..065a559f75a2ceaa227597b8a472d08fbfdc3861 100644 GIT binary patch delta 977 zcmZ8gOH30%7~bhVy0G11|#gd^Jkx{Ezv+nO$!!w9B7lxtReo zedgkLRsMp0uI_v|#V;^3>N4F@7+?~eQBwq9y80ZL7MM)lp<75#44Qx;oypE*GqGqQ zmaXcdsYHf^H6DFcm!6x!v1DW}mq^9PlE%s5NG?lWYVv52IN1Bbrf94~GGtaoi$*M3O&CN!VmoV+Rs=t^_q$0U=G96DuBgrtP%H*}q z(8RTg@PwPgwqvk>k5dpRoS_N8fU2 zQ)dP(mP603s>2pg*|0KInEGn*mSu0n(cAFpsrw8phrnl7%jkXBO3w2}^bJ%Q6BV|h z%5TBkqISwbLDlBT8)0Nv(Ka;UKofp{PZ5C~sGu|fJB*{6G?Q_y@k*Za`~3bYd;P}k zAAJy;*9L~p1^j-WTfhe3RX&?ZCUUqP=D34GCxuoD|H$!N>s?I4YF&w7A-FABOOmz7 zZb+`;&63o&ex@u9R$OY=_KFL--qhJEHsD)23zcTNFug6?OR~M_+>m>VPfD_T{Z3g9 zG-AvLu{O6FL!~;6k31^UBHkPH(vuDSe>~t-A)0>tAQ8{Y@pLqTbG|zDkr$#JS;>2G zz}`*6+!PM8oCW%dQOm%kOHEx}I=;Ww&GMH~ty$zfwKkFO-|Nuu7Pz&sCijIA8c$D! oEdYPS(NtQQO~!&a2$VjaEY)(CMF{OMT|12N7yV6Y41gN;7umkw&Hw-a delta 766 zcmZ`$O=uHA6rS1L+5B$ICg%UgO-j>jk_uW;5v^7WMMW)b@3Cgn5R6HgRJ29JlaPz1 zj7O24q#itZ^&ogukfOyUCE&q>0S_TP2qNm6jUb9Xm~Y&hGaX}$X*g>Ipu73N2V49vwnZiWq*o4v+!SYi8wgX*Q(`C@aqT&k9v9ru|F zXJ^?Jp}-yrBhuW|!aRE^tl=AMQoQ`^u^7NO#-2+B9A`Z#js46`7Gg;#TEbTCA&9W? zwR@L+mt*dm*uYn_2+=7xj4TYBKKwr#Jrqo&ME?q4T;8}g;;|GT~i zU7tJ(U5i%NUW^P0Bop9W%M1dH&u7^x$ZO^8v9cugjU>RG4a+jwu!eqxvm;A zv(}`eW>#-Y+uqR~HTWx}Sv!RK1o=WV$ree3{jgs=P~Z<8`ujED9l6w~FI6jRN~KX+ zq$~S}be!FGq;QgLIP$3kzXm^w=5CZSE{b+hn@m7TIG%qg~0}$Ll#gcxBeIwslMitiF{FKz3Tg*wx#kaT$5{nXZQ%h2dif?hJ z=4KYe14ZLA^CojM?U0BA>T6)QqolJU>Y|dxbtT7(N{$^YJzSITFi9}=@NfRkbf0nZ zJ}yosGsejeS)@2p7_&7P7>ZOT%W=p~R$n&z?Uz1x5x7d7LgMu85ZZZ4$7~kT9aN_+^%Y72_Qf@Jq85&J~$?8+T z0PLU}-2D9(ofZ>HFLBGl>G8jzrrqIkg$Kyh0MW8wVKA-N;c^uy&MVj-(HSvA z^b)TeSPilkpb225Fs){|w5TGi!LH{6Hv^yi`MY(N$>&|6?o$xz9l$?kWH(>bvuIU_eUujC~oP-1c{hotIDCLpUw z9Y|=h6mbKoTiiLoz===J$j?kpEfNCB3Qz9gG>gy%GK>9zL<0jTsH{4zW=LP+R=J_3 z4h*Q!x5+k7$`tBAJ`fA41gZXWYYoJV>DTs zTZ7SbvI}<@H$(s=YBG5#w;`7htNAA_4pz<2B9k9*>oHnP=I4=6w+70Cd{Qh5Bt9@R aGBVy}kh;qt`JKg>QT7u9kSLM>$^ZZ&RnTAn delta 1326 zcmb_bO-vI(6rSB~e-|ngN?Lzlt@6_bN(B^xdZ=xK#IisE4JNwO-8HRR+DsP}qN(2S zqL?|TQDeNBcrsp0OgworYA_{i78+vW#e-TwW5NYzwxA@Qj83xO_vU+V-g~n%^Qroc z%l@jo+y?OI{Lp@S+WN%)nhLPz;^UGaCele!)(oe_tX6R=)J#&xY99O`ir}@$GSx?uzCphZ~PBVFn3%#XXhP-9R(~tBLWi#L~@~yl; zgXV2z7b%H4+XMitrvUoS9Hu1`dTt!-vGAIySB%4n?5cTJI1-HvN2b`G`1oW>mRdT5 zfCe;EUnCX_wymCubJ@9bsclt3^P~3EmYR7yEcXDOL54?l5Rb;$O1*F`(|Isz6bTD`Fxgk1J@+?9RakEKVhR`v&@B!$E$yo6DS&<5`)@$_f_^@?l{h7z_3db#qFZ z3-LlI+Sd`_lH*(~)Y=p29}4u3YHz{mRsDf=NoVh3wfo-4 zgV8&qw~rJ7Xd0xy0T82)(LV^Cpud&~y9Vi#G^T9={T%@37@b@AkI9N=P~$7wKx5l< zIA6A1ZD=K2r}JFd&OaX-4bYNh#N;DC#*ezKO;i^Wtw-6CK-2(wY29NhH|}4qtuzK! p8qk{6Pjw)_&DGe6{p1m3ofwJ+ilPc&YXP`_F#D(-%K(F3@FyRPP&NPn diff --git a/gcmpy/scripts/__pycache__/ocean.cpython-312.pyc b/gcmpy/scripts/__pycache__/ocean.cpython-312.pyc index aea87ddfe0755e1d3558ec146a1fca3f9d2039fc..83d662d19ed58579ab51062aaf63f96da3c938bd 100644 GIT binary patch delta 1545 zcma)6ZA@EL7(VB=_x8KDx7XJ7wzSmJ^8KL*6NZC9KNxPqWr9L*I=#pkrMPx15?GkV zm}0gJaz+57*tx}J#zahiNaDv|qx;i)b?HsTi2l&bABNTaP=6TDy#)hvA>QP?=e+Ox zocHTIxo^=vYp5 zYIF=eqduUIy^IGYU zQhh<6j+4CvtUzCBeuH&rRr?97Prs}iBVc2C-S8O!JJ37kkhXuOf3ZO{6{mkS`yqS+ zSu6*rvuXeq06Jl^qwkFxxF1beCSkR#@8QXjG63RQo|*#BLAe+Ci`f-0!>`jRI&w z2bmH*uU8ZwZ$NWaA6jO-GHweqB4f0?S;6$6o6HB>y-EZvZ!bhZuUf3YZ*st-xfx z>|dyyt4tXMYt@J2A5Gqv6s*m+jQb=Lb4ea`qiFI<&Tuk#@z9n@p1$M_B!k}?-Rqlq zY@)$EeXTiD6B2yktgl<}b!U$CiM~F`z6WQGNOo>vXl^J4=SCFO>rV!68Qt4MUhB`) zMFf9m)_+v+AIdP95bH7`Jr%s(b){=1AeKC~ z#ujDV4FcPkWm^TdHOqzsHuMb}Ne0*Kj>VG2(@Ui&Do^VkRdN;Mq7HB9fWImJD1P%7 z3%s@zctPMzh1c(AQIX?wU4h>TdJ1JMHV3a&=&9v(m`GIN0>CsGXRdT)JO2rB#KGZ0=Fr6+Q-acoEX0xBdYm^bgFI18dh(r6MMhxW2+6WMr3eGLFV9hA7tC8A8HRbPlJw>Wpp$gv pThO`MAordeOeT&J`!8(^{-uvc2lvO{N8i=D=(q|7@<5gf=pQH_-UI*u delta 1370 zcmZ`%TWlLe6rI_J9Xqk>$96Yfd)JTHiQ}Y6TotKlppEk~cJqQt6_yYo%7!T9RoSI# z0Feq1YEcneO`^DJ1F?#f!WWg1TBL&b;ExZop_R8utQ3I|@PUk>^beTbBvp#U?9S}W zx#!-Qopaul-g(jbjm2UHxP86U_R{gX8`kehnwlP-8buGtK_=0+P9$;f?OP`keFN)c zvTp#TsF=C`xpksHg?kVBi0X#VqF*Ud$fQS_W2a6{j*kqV9-o=cG`GjQ2TqKQObuZa zIq9eDjF_EOnEC>9P+<<9r!JcE!}Li4)}vd--(VA3skjNF`61H`0T1QBt-MXZ7`kZf zu1H--WkYKgZ~mS&1YrXGvoB^iPXe$A(4d8_ItwK@5e6WA3<4zCUK3mxp>@Orfn<0c z3_)1unT`WfSsKmU7WZyvD~Y$EuTGcv+YKk!XCYAJErA%lQOkXwP9z~P(-Hg z*aG)=YqFcHZ`FTTVOtAqhr)IgU0yX1%+66)jcTBFr7l-@J(g?8YRd!BEOoa!Qgr$B z>7DAxraQD++p4&a6x^MPyYs78*4({nZ8+O?HLli1R>pE;pAxyrY}Zo!{^KZ_o6^vt zE3jKi+jP&Kb6vTnEVX3Z_+Q2jp=1Pn&BF@d@X7+(f5ouL`Bww23U{Qybt+tEflDY{ z;ydoROl|T)R$7i;9=p_qSfK_Iy~ zjGD|wbeVsz{Vc?msK%C{bQ?HzOKe-tB30;^89U8{MK|Ddy=kKFxbQ7h^0P>GCpBYk zJJ5!M`%sg|qj}J#SHKWz^oSbAu?t8}je~A^o+Q#!=r2zc&Y-aHuJH}LMP>`2&xNPw zwpHG;iT08U^_Uuf-|HzL)V!#=K)pp%3bqu{j(7Nkq@qh4l8_X zf$vcGj-n8FByF88o(9x!YrO;1VAb~k8@v#F94XO243)gV!R7CXVEDU(&4f}A1dj#GH;-huxx{L4=fv+=}=jMA||v$G#l-L*U*>I>c$mZ fMN>Y*@ATr diff --git a/gcmpy/scripts/__pycache__/process_questions.cpython-312.pyc b/gcmpy/scripts/__pycache__/process_questions.cpython-312.pyc index a3c2d8d18617d8d070e247a01f4f3c0ad7040e28..32776d843a9266e04b39f6c58a40d0835faa9dd4 100644 GIT binary patch delta 898 zcmYjPUr1A76#u^aM|Zb7b#v;Rb5m2-CEYYzsbGI_GtECqgk(i(-I<%ZyFM>rF~NK& zp@)n^sfgjDAw9?*0w1DefzZoD!5Ti~YxLkBNQfS~pJsR-ew_0==lst34&RUNXWhmx zMx!2Swpo#Ke{9jX#NZZKVH}2Fcu4IT81UQ;hGQ(-lWqRRAT)`hSjPP)5t_in3ZuDI znal#F?Fh{vnVn@u7!|=K&d!7cz+5IcZTiA?0jY+=B7~xh%q?hA)Vyefjj|ylblYsO zY_>pou`W&1;iE`S$<+!%88B{hVgcrDE{~ovvf&{jEi$hbXu*NWhrCF)B8+SdW0`8C z)9rFHj%{~bwK0#X_T|-`IKKb?iWxM^Mzy!niCqG$6|5G*j{W92(ya-R z9u1P55($Kp90~NuNq$Hr!3Z(ZO## z4HTBd4JsubQFsE|CBID~FXKHHrI@!F>TWw=wKO+oaqVN=3!+qu`{A+_zy#h&9)3dW zdmG!5c3C_D>*X%2!k_Yd^MPEZmXo-{sW0G^yOf{On2)gzcR$ACuu!PPs6;_gM>f*pJAQk+fbhx!YfrRntm delta 1030 zcmZuuO-xf+6u$SpK1yGCErnA0Lt8DiP_;aj2;#(`w%|{!B$`a3ld&|sH_$5W^R}a8 zQeZZd85xb5LqZK2Su$fxO1i)>$pj4=5?x@%WF{RJvv8ve1G2!lntN$#T=*6z-~G<{ zzVpsGKli`wm!3A3oO8cXn`K zI7ZpxIv2M!`FMn;P!22jVk<(KDvCIUZebFHMQ6K}e}I`aLeogm(JWE~C(JjyJ@j35 zPdCbC%V`>u)t?}OzFltk*CBRB4Ja#Bctu}v!zgR6usKete!!Jq`_U+$GFuVq6JPO+r0Qb&F2*)WaP zO8YN3dtJC~#tK9#(koJ!qJ&3yfrZTCt;X!cA27kq`i1!3MeXDmvyU)2Q zteP!1uU@-)bL`q!UU_A1UgHGIFkZDf@`1_ds>7XEX2R3nHG~_IcsBCS{;v6RbLWcR z6rzik#e>V~m%i|Yzr%ws!-FeFhgS9_^5V?iJJKt=yvZ}gI@5vdSui`0H?%1-Zp#Zk zc%p5-#vTqm?%EaU;Gg)bDPHOjBHj9>uk;i@#V7A&6qDwW1j?ldtY!kKVPHsQ zOA)jI(v^&wLYsFmB{4EOO_pYnKe#pw=XPX5D_%;g9)m=TDJoi=CkDl;-)WDuWRz$YS*a9Jv` PfulqE5`$B+hAK+(+|EXz11$MPm~IRH&!1ma@l&8K*k896R7h!^n! F`2c?N9f$w` diff --git a/gcmpy/scripts/atmosphere.py b/gcmpy/scripts/atmosphere.py old mode 100644 new mode 100755 index ead977d4..ab853446 --- a/gcmpy/scripts/atmosphere.py +++ b/gcmpy/scripts/atmosphere.py @@ -8,30 +8,32 @@ def __init__(self): self.force_gcm = "#" self.num_readers = 1 self.num_writers = 1 - self.DT = answerdict["heartbeat"].q_answer - self.DT_solar = None - self.DT_irrad = None - self.DT_ocean = None - self.DT_long = None - self.IM = int(answerdict["AM_horizontal_res"].q_answer[1:]) - self.JM = self.IM * 6 - self.NX = None - self.NY = None + self.dt = answerdict["heartbeat"].q_answer + self.dt_solar = None + self.dt_irrad = None + self.dt_ocean = None + self.dt_long = None + self.lm = int(answerdict['AM_vertical_res'].q_answer) + self.im = int(answerdict['AM_horizontal_res'].q_answer[1:]) + self.jm = self.im * 6 + self.nx = None + self.ny = None + self.nf = 6 self.use_hydrostatic = answerdict["use_hydrostatic"].q_answer self.microphysics = answerdict["AM_microphysics"].q_answer - self.IM_hist = self.IM * 4 - self.JM_hist = self.JM * 2 + 1 - self.gridfile = f"Gnomonic_c{self.IM}.dat" + self.im_hist = self.im * 4 + self.jm_hist = self.jm * 2 + 1 + self.gridfile = f"Gnomonic_c{self.im}.dat" self.job_sgmt = None self.num_sgmt = None - self.res = f"CF{self.IM:04}x6C" + self.res = f"CF{self.im:04}x6C" self.post_NDS = None - self.NX_convert = 2 - self.NY_convert = 24 - self.CONUS = "#" + self.nx_convert = 2 + self.ny_convert = 24 + self.conus = "#" self.stretch_factor = None - self.gridname = f"PE{self.IM}x{self.JM}-CF" - self.res_dateline = f"{self.IM}x{self.JM}" + self.gridname = f"PE{self.im}x{self.jm}-CF" + self.res_dateline = f"{self.im}x{self.jm}" self.BACM_1M = "#" self.GFDL_1M = "#" self.MGB2_2M = "#" @@ -41,9 +43,17 @@ def __init__(self): self.MP_turnoff_wsub = None self.FV_make_NH = None self.FV_hydro = None + self.FV_hwt = None self.schmidt = None self.target_lon = None self.target_lat = None + self.convpar_option = 'GF' + self.mp_turn_off_wsub_extdata = None + + # These are superfluous for GCM, but are needed SCM (considered latlon) + self.latlon = '#DELETE' + self.cube = '' + # for debugging purposes def print_vars(self): @@ -51,245 +61,246 @@ def print_vars(self): for var_name, var_value in all_vars.items(): print(f"{color.BLUE}{var_name}: {var_value}{color.RESET}") - def hres(self, ocean_NX, ocean_NY): + def hres(self, ocean_nx, ocean_ny): match answerdict["AM_horizontal_res"].q_answer: case "c12": - self.DT_solar = 3600 - self.DT_irrad = 3600 - self.DT_ocean = self.DT_irrad - self.DT_long = self.DT + self.dt_solar = 3600 + self.dt_irrad = 3600 + self.dt_ocean = self.dt_irrad + self.dt_long = self.dt if answerdict["OM_name"].q_answer == "MOM6": - self.NX = 1 + self.nx = 1 else: - self.NX = 2 - self.NY = self.NX * 6 + self.nx = 2 + self.ny = self.nx * 6 self.job_sgmt = f"{15:08}" self.num_sgmt = 20 self.post_NDS = 4 - self.NX_convert = 1 - self.NY_convert = 6 + self.nx_convert = 1 + self.ny_convert = 6 case "c24": - self.DT_solar = 3600 - self.DT_irrad = 3600 - self.DT_ocean = self.DT_irrad - self.DT_long = self.DT - self.NX = 4 - self.NY = self.NX * 6 + self.dt_solar = 3600 + self.dt_irrad = 3600 + self.dt_ocean = self.dt_irrad + self.dt_long = self.dt + self.nx = 4 + self.ny = self.nx * 6 self.job_sgmt = f"{15:08}" self.num_sgmt = 20 self.post_NDS = 4 - self.NX_convert = 1 - self.NY_convert = 6 + self.nx_convert = 1 + self.ny_convert = 6 case "c48": - self.DT_solar = 3600 - self.DT_irrad = 3600 - self.DT_ocean = self.DT_irrad - self.DT_long = self.DT - self.NX = 4 - self.NY = self.NX * 6 - self.IM_hist = 180 - self.JM_hist = 91 + self.dt_solar = 3600 + self.dt_irrad = 3600 + self.dt_ocean = self.dt_irrad + self.dt_long = self.dt + self.nx = 4 + self.ny = self.nx * 6 + self.im_hist = 180 + self.jm_hist = 91 self.job_sgmt = f"{15:08}" self.num_sgmt = 20 self.post_NDS = 4 case "c90": - self.DT_solar = 3600 - self.DT_irrad = 3600 - self.DT_long = self.DT + self.dt_solar = 3600 + self.dt_irrad = 3600 + self.dt_long = self.dt match answerdict["OM_name"].q_answer: case "MIT": - self.NX = 10 - self.NY = 36 - self.DT_ocean = self.DT + self.nx = 10 + self.ny = 36 + self.dt_ocean = self.dt case "MOM5","MOM6": - self.NX = ocean_NX - self.NY = ocean_NY - self.DT_ocean = self.DT + self.nx = ocean_nx + self.ny = ocean_ny + self.dt_ocean = self.dt case _: - self.NX = 3 - self.NY = self.NX * 6 - self.DT_ocean = self.DT_irrad + self.nx = 3 + self.ny = self.nx * 6 + self.dt_ocean = self.dt_irrad self.job_sgmt = f"{32:08}" self.num_sgmt = 4 self.post_NDS = 8 case "c180": - self.DT_solar = 3600 - self.DT_irrad = 3600 - self.DT_long = self.DT + self.dt_solar = 3600 + self.dt_irrad = 3600 + self.dt_long = self.dt if answerdict["OM_coupled"].q_answer == True: - self.NX = ocean_NX - self.NY = ocean_NY - self.DT_ocean = self.DT + self.nx = ocean_nx + self.ny = ocean_ny + self.dt_ocean = self.dt else: - self.NX = 6 - self.NY = self.NX * 6 - self.DT_ocean = self.DT_irrad + self.nx = 6 + self.ny = self.nx * 6 + self.dt_ocean = self.dt_irrad self.job_sgmt = f"{16:08}" self.num_sgmt = 1 self.post_NDS = 8 self.num_readers = 2 case "c360": - self.DT_solar = 3600 - self.DT_irrad = 3600 - self.DT_ocean = self.DT_irrad - self.DT_long = self.DT - self.NX = 12 - self.NY = self.NX * 6 + self.dt_solar = 3600 + self.dt_irrad = 3600 + self.dt_ocean = self.dt_irrad + self.dt_long = self.dt + self.nx = 12 + self.ny = self.nx * 6 self.num_readers = 4 self.job_sgmt = f"{5:08}" self.num_sgmt = 1 self.post_NDS = 12 - self.NX_convert = 4 + self.nx_convert = 4 case "c720": - self.DT_solar = 3600 - self.DT_irrad = 3600 - self.DT_ocean = self.DT_irrad - self.DT_long = 450 - self.NX = 24 - self.NY = self.NX * 6 + self.dt_solar = 3600 + self.dt_irrad = 3600 + self.dt_ocean = self.dt_irrad + self.dt_long = 450 + self.nx = 24 + self.ny = self.nx * 6 self.num_readers = 6 self.job_sgmt = f"{5:08}" self.num_sgmt = 1 self.post_NDS = 16 - self.NX_convert = 8 + self.nx_convert = 8 self.use_SHMEM = True case "c1440": - self.DT_solar = 1800 - self.DT_irrad = 1800 - self.DT_ocean = self.DT_irrad - self.DT_long = 300 - self.NX = 48 - self.NY = self.NX * 6 + self.dt_solar = 1800 + self.dt_irrad = 1800 + self.dt_ocean = self.dt_irrad + self.dt_long = 300 + self.nx = 48 + self.ny = self.nx * 6 self.num_readers = 6 self.job_sgmt = f"{1:08}" self.num_sgmt = 1 self.post_NDS = 32 - self.NX_convert = 8 + self.nx_convert = 8 self.use_SHMEM = True case "c2880": - self.DT_solar = 1800 - self.DT_irrad = 1800 - self.DT_ocean = self.DT_irrad - self.DT_long = 300 - self.NX = 96 - self.NY = self.NX * 6 + self.dt_solar = 1800 + self.dt_irrad = 1800 + self.dt_ocean = self.dt_irrad + self.dt_long = 300 + self.nx = 96 + self.ny = self.nx * 6 self.num_readers = 6 self.job_sgmt = f"{1:08}" self.num_sgmt = 1 self.post_NDS = 32 - self.NX_convert = 8 + self.nx_convert = 8 self.use_SHMEM = True + self.convpar_option = 'NONE' case "c5760": - self.DT_solar = 900 - self.DT_irrad = 900 - self.DT_ocean = self.DT_irrad - self.DT_long = 300 - self.NX = 192 - self.NY = self.NX * 6 + self.dt_solar = 900 + self.dt_irrad = 900 + self.dt_ocean = self.dt_irrad + self.dt_long = 300 + self.nx = 192 + self.ny = self.nx * 6 self.num_readers = 6 self.job_sgmt = f"{1:08}" self.num_sgmt = 1 self.post_NDS = 32 - self.NX_convert = 8 + self.nx_convert = 8 self.use_SHMEM = True + self.convpar_option = 'NONE' case "c270": - self.DT_solar = 3600 - self.DT_irrad = 3600 - self.DT_ocean = self.DT_irrad - self.DT_long = self.DT - self.NX = 18 - self.NY = self.NX * 6 + self.dt_solar = 3600 + self.dt_irrad = 3600 + self.dt_ocean = self.dt_irrad + self.dt_long = self.dt + self.nx = 18 + self.ny = self.nx * 6 self.num_readers = 6 self.job_sgmt = f"{1:08}" self.num_sgmt = 1 self.post_NDS = 32 - self.NX_convert = 8 + self.nx_convert = 8 self.use_SHMEM = True - self.CONUS = "" + self.conus = "" self.stretch_factor = 2.5 case "c540": - self.DT_solar = 3600 - self.DT_irrad = 3600 - self.DT_ocean = self.DT_irrad - self.DT_long = self.DT - self.NX = 36 - self.NY = self.NX * 6 * 2 + self.dt_solar = 3600 + self.dt_irrad = 3600 + self.dt_ocean = self.dt_irrad + self.dt_long = self.dt + self.nx = 36 + self.ny = self.nx * 6 * 2 self.num_readers = 6 self.job_sgmt = f"{1:08}" self.num_sgmt = 1 self.post_NDS = 32 - self.NX_convert = 8 + self.nx_convert = 8 self.use_SHMEM = True - self.CONUS = "" + self.conus = "" self.stretch_factor = 2.5 case "c1080": - self.DT_solar = 900 - self.DT_irrad = 900 - self.DT_ocean = self.DT_irrad - self.DT_long = 300 - self.NX = 72 - self.NY = self.NX * 6 * 2 + self.dt_solar = 900 + self.dt_irrad = 900 + self.dt_ocean = self.dt_irrad + self.dt_long = 300 + self.nx = 72 + self.ny = self.nx * 6 * 2 self.num_readers = 6 self.job_sgmt = f"{1:08}" self.num_sgmt = 1 self.post_NDS = 32 - self.NX_convert = 8 + self.nx_convert = 8 self.use_SHMEM = True - self.CONUS = "" + self.conus = "" self.stretch_factor = 2. case "c1536": - self.DT_solar = 900 - self.DT_irrad = 900 - self.DT_ocean = self.DT_irrad - self.DT_long = 300 - self.NX = 96 - self.NY = self.NX * 6 + self.dt_solar = 900 + self.dt_irrad = 900 + self.dt_ocean = self.dt_irrad + self.dt_long = 300 + self.nx = 96 + self.ny = self.nx * 6 self.num_readers = 6 self.job_sgmt = f"{5:08}" self.num_sgmt = 1 self.post_NDS = 16 - self.NX_convert = 8 + self.nx_convert = 8 self.use_SHMEM = True - self.CONUS = "" + self.conus = "" self.stretch_factor = 3.0 case "c2160": - self.DT_solar = 900 - self.DT_irrad = 900 - self.DT_ocean = self.DT_irrad - self.DT_long = 300 - self.NX = 192 - self.NY = self.NX * 6 * 2 + self.dt_solar = 900 + self.dt_irrad = 900 + self.dt_ocean = self.dt_irrad + self.dt_long = 300 + self.nx = 192 + self.ny = self.nx * 6 * 2 self.num_readers = 6 self.job_sgmt = f"{5:08}" self.num_sgmt = 1 self.post_NDS = 32 - self.NX_convert = 8 + self.nx_convert = 8 self.use_SHMEM = True - self.CONUS = "" + self.conus = "" self.stretch_factor = 2.5 if answerdict["OM_name"].q_answer == "MIT": - self.DT_ocean = self.DT + self.dt_ocean = self.dt def set_microphysics(self): match self.microphysics: case "BACM_1M": self.BACM_1M = "" - self.DT_long = 450 case "GFDL_1M": self.GFDL_1M = "" case "MGB2_2M": @@ -303,18 +314,19 @@ def set_turnoff_wsub(self): # settings for fvcore_layour.rc def set_fvcore_layout(self): - match self.use_hydrostatic: - case True: - self.FV_make_NH = "Make_NH = .F." - self.FV_hydro = "hydrostatic = .T." - case False: - self.FV_make_NH = "Make_NH = .T." - self.FV_hydro = "hydrostatic = .F." - if self.microphysics == "MGB2_2M": - self.FV_hydro = ".FALSE." + if self.use_hydrostatic == True: + self.FV_make_NH = "Make_NH = .F." + self.FV_hydro = "hydrostatic = .T." + self.FV_hwt = '#' + else: + self.FV_make_NH = "Make_NH = .T." + self.FV_hydro = "hydrostatic = .F." + self.FV_hwt = '' + if self.microphysics == "MGB2_2M": + self.FV_hydro = ".FALSE." - def set_CONUS(self): - if self.CONUS == "#": + def set_conus(self): + if self.conus == "#": self.schmidt = "do_schmidt = .false." self.stretch_factor = "stretch_fac = 1.0" self.target_lon = "target_lon = 0.0" @@ -325,9 +337,16 @@ def set_CONUS(self): self.target_lon = "target_lon = -98.35" self.target_lat = "target_lat = 39.5" + def set_wsub_extdata(self): + if self.microphysics == 'BACM_1M' or self.microphysics == 'GFDL_1M': + self.mp_turn_off_wsub_extdata = '' + else: + self.mp_turn_off_wsub_extdata = '#DELETE#' + - def config(self, ocean_NX, ocean_NY): - self.hres(ocean_NX, ocean_NY) + def config(self, ocean_nx, ocean_ny): + self.hres(ocean_nx, ocean_ny) self.set_microphysics() self.set_fvcore_layout() - self.set_CONUS() + self.set_conus() + self.set_wsub_extdata() diff --git a/gcmpy/scripts/env.py b/gcmpy/scripts/env.py old mode 100644 new mode 100755 index 1f02b253..a4be198d --- a/gcmpy/scripts/env.py +++ b/gcmpy/scripts/env.py @@ -73,11 +73,10 @@ ####################################################################### # Set Number of CPUs per Node ####################################################################### -# ----------------------------> PUT IN HANDLE CLASS FROM process_questions.py (MAYBE) <------------------------ if envdict['site'] == 'NCCS': - if answerdict['processor'].q_answer == 'Haswell': - envdict['n_CPUs'] = 28 - elif answerdict['processor'].q_answer == 'Skylake': + # NOTE: in the current version of gcm_setup, we never build on + # SLES15, so milan nodes are not an option for NCCS. + if answerdict['processor'].q_answer == 'Skylake': envdict['n_CPUs'] = 40 elif answerdict['processor'].q_answer == 'Cascade': ''' @@ -132,3 +131,18 @@ print(f"ERROR: Unknown architecture", envdict['arch']) sys.exit(1) +####################################################################### +# Build Directory Locations +####################################################################### +if envdict['arch'] == 'Darwin': + envdict['preload_command'] = 'DYLD_INSERT_LIBRARIES' + envdict['ld_library_path_command'] = 'DYLD_LIBRARY_PATH' + # On macOS we seem to need to call mpirun directly and not use esma_mpirun + # For some reason SIP does not let the libraries be preloaded + envdict['run_command'] = 'mpirun -np ' +else: + envdict['preload_command'] = 'LD_PRELOAD' + envdict['ld_library_path_command'] = 'LD_LIBRARY_PATH' + envdict['run_command'] = '$GEOSBIN/esma_mpirun -np ' + + diff --git a/gcmpy/scripts/gcm_setup.py b/gcmpy/scripts/gcm_setup.py new file mode 100755 index 00000000..c1602591 --- /dev/null +++ b/gcmpy/scripts/gcm_setup.py @@ -0,0 +1,919 @@ +from ocean import ocean +from atmosphere import atmosphere as atmos +from land import land +from gocart import gocart +from env import answerdict, linkx +from utility import envdict, pathdict, color +import math, os, shutil, tempfile, yaml, re +from pathlib import Path +from jinja2 import Environment, FileSystemLoader, Undefined + + +# combines all models (atmos, ocean, land, gocart) into one big one +class setup: + def __init__(self): + self.ocean = ocean() + self.atmos = atmos() + self.land = land() + self.gocart = gocart() + self.is_FCST = False + self.fv_cubed = '' + self.bcs_res = None + self.tile_data = None + self.tile_bin = None + self.interpolate_sst = None + self.job_sgmt = None + self.begin_date = '18910301 000000' + self.end_date = '29990302 210000' + self.n_oserver_nodes = None + self.n_backend_pes = None + self.n_nodes = None + self.exp_dir = answerdict['exp_dir'].q_answer + self.restart_by_oserver = 'NO' + self.gcm_version = Path(f"{pathdict['etc']}/.AGCM_VERSION").read_text() + self.file_list = ['gcm_run.j', + 'gcm_post.j', + 'gcm_archive.j', + 'gcm_regress.j', + 'gcm_plot.tmpl', + 'gcm_quickplot.csh', + 'gcm_moveplot.j', + 'gcm_forecast.tmpl', + 'gcm_forecast.setup', + 'gcm_emip.setup', + 'CAP.rc.tmpl', + 'AGCM.rc.tmpl', + 'HISTORY.rc.tmpl', + 'logging.yaml', + 'fvcore_layout.rc', + 'linkbcs.tmpl'] + + def config_models(self): + self.ocean.config() + self.atmos.config(self.ocean.nx, self.ocean.ny) + self.land.config() + self.gocart.config() + + + # setup some variables idk + def set_some_stuff(self): + if self.atmos.im_hist >= self.ocean.im: + self.interpolate_sst = True + else: + self.interpolate_sst = False + self.bcs_res = f"{self.atmos.res}_{self.ocean.res}" + self.tile_data = f"{self.atmos.res}_{self.ocean.res}_Pfafstetter.til" + self.tile_bin = f"{self.atmos.res}_{self.ocean.res}_Pfafstetter.TIL" + self.job_sgmt = f"{self.atmos.job_sgmt} 000000" + + + # setup experiment nodes + def set_nodes(self): + model_npes = self.atmos.nx * self.atmos.ny + + # Calculate OSERVER nodes based on recommended algorithm + if answerdict['io_server'].q_answer == True: + + # First we calculate the number of model nodes + n_model_nodes = math.ceil(model_npes / envdict["n_CPUs"]) + + # Next the number of frontend PEs is 10% of the model PEs + n_frontend_pes = math.ceil(model_npes * 0.1) + + # Now we roughly figure out the number of collections in the HISTORY.rc + n_hist_collections = 0 + with open(answerdict['history_template'].q_answer, 'r') as file: + in_collections = False + for line in file: + if line.split(' ', 1)[0] == "COLLECTIONS:": + in_collections = True + continue + if in_collections and line.split(' ', 1)[0] != "#": + n_hist_collections += 1 + if line.strip() == "::": + break + + # The total number of oserver PEs is frontend PEs plus number of history collections + n_oserver_pes = n_frontend_pes + n_hist_collections + + # calculate the number of oserver nodes + n_oserver_nodes = math.ceil(n_oserver_pes / envdict["n_CPUs"]) + + # The number of backend PEs is the number of history collections divided by the number of oserver nodes + n_backend_pes = math.ceil(n_hist_collections / n_oserver_nodes) + + # multigroup requires at least two backend pes + if (n_backend_pes < 2): n_backend_pes = 2 + + # Calculate the total number of nodes to request from batch + self.nodes = n_model_nodes + n_oserver_nodes + + else: + self.nodes = math.ceil(model_npes / envdict["n_CPUs"]) + self.n_oserver_nodes = 0 + self.n_backend_pes = 0 + + + + def set_stuff(self): + self.set_nodes() + # Longer job names are now supported with SLURM and PBS. Limits seem to be 1024 characters with SLURM + # and 230 with PBS. To be safe, we will limit to 200 + self.run_n = f"{answerdict['experiment_id'].q_answer[:200]}_RUN" # RUN Job Name + self.run_fn = f"{answerdict['experiment_id'].q_answer[:200]}_FCST" # Forecast Job Name + self.post_n = f"{answerdict['experiment_id'].q_answer[:200]}_POST" # POST Job Name + self.plot_n = f"{answerdict['experiment_id'].q_answer[:200]}_PLT" # PLOT Job Name + self.move_n = f"{answerdict['experiment_id'].q_answer[:200]}_PLT" # MOVE Job Name + self.archive_n = f"{answerdict['experiment_id'].q_answer[:200]}_ARCH" # ARCHIVE Job Name + self.regress_n = f"{answerdict['experiment_id'].q_answer[:200]}_RGRS" # REGRESS Job Name + + + # Here we need to convert POST_NDS to total tasks. Using 16 cores + # per task as a good default + post_npes = self.atmos.post_NDS * 16 + NPCUS = (post_npes + envdict["n_CPUs"] - 1)/envdict["n_CPUs"] + + ''' + Definition for each variable in the following if-else block: + + batch_cmd - PBS Batch command + batch_group - PBS Syntax for GROUP + batch_time - PBS Syntax for walltime + batch_jobname - PBS Syntax for job name + batch_outputname - PBS Syntax for job output name + batch_joinouterr - PBS Syntax for joining output and error + run_ft - Wallclock Time for gcm_forecast.j + run_ft - Wallclock Time for gcm_run.j + post_t - Wallclock Time for gcm_post.j + plot_t - Wallclock Time for gcm_plot.j + archive_t - Wallclock Time for gcm_archive.j + run_q - Batch queue name for gcm_run.j + run_p - PE Configuration for gcm_run.j + run_fp - PE Configuration for gcm_forecast.j + post_q - Batch queue name for gcm_post.j + plot_q - Batch queue name for gcm_plot.j + move_q - Batch queue name for gcm_moveplot.j + archive_q - Batch queue name for gcm_archive.j + post_p - PE Configuration for gcm_post.j + plot_p - PE Configuration for gcm_plot.j + archive_p - PE Configuration for gcm_archive.j + move_p - PE Configuration for gcm_moveplot.j + bcs_dir - Location of Boundary Conditions + replay_ana_expID - Default Analysis Experiment for REPLAY + replay_ana_location - Default Analysis Location for REPLAY + M2_replay_ana_location - Default Analysis Location for M2 REPLAY + sst_dir - Location of SST Boundary Conditions + chem_dir - Locations of Aerosol Chemistry BCs + work_dir - User work directory <----------------- change this later + gwdrs_dir - Location of GWD_RIDGE files + coupled_dir - Coupled Ocean/Atmos Forcing + ''' + + if envdict['site'] == "NAS": + self.batch_cmd = "qsub" + self.batch_group = "PBS -W group_list=" + self.batch_time = "PBS -l walltime=" + self.batch_jobname = "PBS -N" + self.batch_outputname = "PBS -o " + self.batch_joinouterr = "PBS -j oe -k oed" + self.run_ft = "6:00:00" + self.run_t = "8:00:00" + self.post_t = "8:00:00" + self.plot_t = "8:00:00" + self.archive_t = "8:00:00" + self.run_q = f"PBS -q normal" + self.run_p = f"PBS -l select={self.nodes}:ncpus={envdict['n_CPUs']}:mpiprocs={envdict['n_CPUs']}:model={answerdict['processor']}" + self.run_fp = f"PBS -l select=24:ncpus={envdict['n_CPUs']}:mpiprocs={envdict['n_CPUs']}:model={answerdict['processor']}" + self.post_q = "PBS -q normal" + self.plot_q = "PBS -q normal" + self.move_q = "PBS -q normal" + self.archive_q = "PBS -q normal" + self.post_p = f"PBS -l select={NPCUS}:ncpus={envdict['n_CPUs']}:mpiprocs={envdict['n_CPUs']}:model={answerdict['processor']}" + self.plot_p = f"PBS -l select=1:ncpus={envdict['n_CPUs']}:mpiprocs=1:model={answerdict['processor']}" + self.archive_p = f"PBS -l select=1:ncpus={envdict['n_CPUs']}:mpiprocs={envdict['n_CPUs']}:model={answerdict['processor']}" + self.move_p = "PBS -l select=1:ncpus=1" + self.boundary_path = "/nobackup/gmao_SIteam/ModelData" + self.bcs_dir = f"{self.boundary_path}/bcs/{self.land.bcs}/{self.land.bcs}_{self.ocean.tag}" + self.replay_ana_expID = "ONLY_MERRA2_SUPPORTED" + self.replay_ana_location = "ONLY_MERRA2_SUPPORTED" + self.M2_replay_ana_location = f"{self.boundary_path}/merra2/data" + + # defines location of SST Boundary Conditions + oceanres = f"{self.ocean.im}x{self.ocean.jm}" + if oceanres == "1440x720": + self.sst_dir = f"{self.boundary_path}/fvInput/g5gcm/bcs/SST/{oceanres}" + else: + self.sst_dir = f"{self.boundary_path}/fvInput/g5gcm/bcs/realtime/{self.ocean.sst_name}/{oceanres}" + if self.ocean.gridtype_abrv == "LL": + self.sst_dir = "/nobackupp2/estrobac/geos5/SSTDIR" + + self.chem_dir = f"{self.boundary_path}/fvInput_nc3" + self.work_dir = f"/nobackup/{os.environ.get('LOGNAME')}" + self.gwdrs_dir = f"{self.boundary_path}/GWD_RIDGE" + + # Coupled Ocean/Atmos Forcing + if self.ocean.model == "MIT": + self.coupled_dir = "/nobackupp2/estrobac/geos5/GRIDDIR" + else: + self.coupled_dir = f"{boundary_path}/aogcm" + + + elif envdict['site'] == "NCCS": + self.batch_cmd = "sbatch" + self.batch_group = "SBATCH --account=" + self.batch_time = "SBATCH --time=" + self.batch_jobname = "SBATCH --job-name=" + self.batch_outputname = "SBATCH --output=" + self.batch_joinouterr = "DELETE" + self.run_ft = "06:00:00" + self.run_t = "12:00:00" + self.post_t = "8:00:00" + self.plot_t = "12:00:00" + self.archive_t = "2:00:00" + self.run_q = f"SBATCH --constraint={answerdict['processor']}" + self.run_p = f"SBATCH --nodes={self.nodes} --ntasks-per-node={envdict['n_CPUs']}" + self.run_fp = f"SBATCH --nodes={self.nodes} --ntasks-per-node={envdict['n_CPUs']}" + self.post_q = f"SBATCH --constraint={answerdict['processor']}" + self.plot_q = f"SBATCH --constraint={answerdict['processor']}" + self.move_q = "SBATCH --partition=datamove" + self.archive_q = "SBATCH --partition=datamove" + self.post_p = f"SBATCH --nodes={NPCUS} --ntasks-per-node={envdict['n_CPUs']}" + self.plot_p = f"SBATCH --nodes=4 --ntasks=4" + self.archive_p = "SBATCH --ntasks=1" + self.move_p = "SBATCH --ntasks=1" + self.boundary_path = "/discover/nobackup/projects/gmao" + self.bcs_dir = f"{self.boundary_path}bcs_shared/fvInput/ExtData/esm/tiles/{self.land.bcs}" + self.replay_ana_expID = "x0039" + self.replay_ana_location = f"{self.boundary_path}/g6dev/ltakacs/x0039" + self.M2_replay_ana_location = f"{self.boundary_path}/merra2/data" + + + # define location of SST Boundary Conditions + oceanres = f"{self.ocean.im}x{self.ocean.jm}" + if oceanres == "1440x720": + self.sst_dir = f"{os.environ.get('SHARE')}/gmao_ops/fvInput/g5gcm/bcs/SST/{oceanres}" + else: + self.sst_dir = f"{os.environ.get('SHARE')}/gmao_ops/fvInput/g5gcm/bcs/realtime/{self.ocean.sst_name}/{oceanres}" + if self.ocean.gridtype_abrv == "LL": + self.sst_dir = "/discover/nobackup/estrobac/geos5/SSTDIR" + + self.chem_dir = f"{os.environ.get('SHARE')}/gmao_ops/fvInput_nc3" + self.work_dir = f"/discover/nobackup/{os.environ.get('LOGNAME')}" + self.gwdrs_dir = f"{self.boundary_path}/osse2/stage/BCS_FILES/GWD_RIDGE" + + # Coupled Ocean/Atmos Forcing + if self.ocean.model == "MIT": + self.coupled_dir = "/gpfsm/dnb32/estrobac/geos5/GRIDDIR" + else: + self.coupled_dir = f"{self.boundary_path}/bcs_shared/make_bcs_inputs/ocean" + + + elif envdict['site'] == "AWS" or envdict['SITE'] == "Azure": + self.batch_cmd = "sbatch" + self.batch_group = "#DELETE" + self.batch_time = "SBATCH --time=" + self.batch_jobname = "SBATCH --job-name=" + self.batch_outputname = "SBATCH --output=" + self.batch_joinouterr = "DELETE" + self.run_ft = "06:00:00" + self.run_t = "12:00:00" + self.post_t = "8:00:00" + self.plot_t = "12:00:00" + self.archive_t = "1:00:00" + self.run_q = f"SBATCH --constraint={answerdict['processor']}" + self.run_p = f"SBATCH --nodes={self.nodes} --ntasks-per-node={envdict['n_CPUs']}" + self.run_fp = f"SBATCH --nodes={self.nodes} --ntasks-per-node={envdict['n_CPUs']}" + self.post_q = "NULL" + self.plot_q = "NULL" + self.move_q = "NULL" + self.archive_q = "NULL" + self.post_p = f"SBATCH --ntasks={post_npes}" + self.plot_p = f"SBATCH --nodes=4 --ntasks=4" + self.archive_p = "SBATCH --ntasks=1" + self.move_p = "SBATCH --ntasks=1" + self.boundary_path = "/ford1/share/gmao_SIteam/ModelData" + self.bcs_dir = f"{self.boundary_path}/bcs/{self.land.bcs}_{self.ocean.tag}" + self.replay_ana_expID = "REPLAY_UNSUPPORTED" + self.replay_ana_location = "REPLAY_UNSUPPORTED" + self.M2_replay_ana_location = "REPLAY_UNSUPPORTED" + self.sst_dir = f"{self.boundary_path}/{self.ocean.sst_name}/{self.ocean.im}x{self.ocean.jm}" + self.chem_dir = f"{self.boundary_path}/fvInput_nc3" + self.work_dir = os.environ.get('HOME') + self.gwdrs_dir = f"{self.boundary_path}/GWD_RIDGE" + self.coupled_dir = f"{self.boundary_path}/aogcm" + + else: + # These are defaults for the desktop + self.batch_cmd = "sbatch" + self.batch_group = "SBATCH --account=" + self.batch_time = "SBATCH --time=" + self.batch_jobname = "SBATCH --job-name=" + self.batch_outputname = "SBATCH --output=" + self.batch_joinouterr = "DELETE" + self.run_ft = "06:00:00" + self.run_t = "12:00:00" + self.post_t = "8:00:00" + self.plot_t = "12:00:00" + self.archive_t = "1:00:00" + self.run_q = "NULL" + self.run_p = "NULL" + self.run_fp = "NULL" + self.post_q = "NULL" + self.plot_q = "NULL" + self.move_q = "NULL" + self.archive_q = "NULL" + self.post_p = "NULL" + self.plot_p = "NULL" + self.archive_p = "NULL" + self.move_p = "NULL" + self.boundary_path = "/ford1/share/gmao_SIteam/ModelData" + self.bcs_dir = f"{self.boundary_path}/bcs/{self.land.bcs} /{self.land.bcs}_{self.ocean.tag}" + self.replay_ana_expID = "REPLAY_UNSUPPORTED" + self.replay_ana_location = "REPLAY_UNSUPPORTED" + self.M2_replay_ana_location = "REPLAY_UNSUPPORTED" + self.sst_dir = f"{self.boundary_path}/{self.ocean.sst_name}/{self.ocean.im}x{self.ocean.jm}" + self.chem_dir = f"{self.boundary_path}/fvInput_nc3" + self.work_dir = os.environ.get('HOME') + self.gwdrs_dir = f"{self.boundary_path}/GWD_RIDGE" + self.coupled_dir = f"{self.boundary_path}/aogcm" + + # By default on desktop, just ignore IOSERVER for now + self.atmos.NX = 1 + self.atmos.NY = 6 + answerdict["io_server"] = False + self.n_oserver_nodes = 0 + self.n_backend_pes = 0 + + ''' + def set_hist_temp(self): + tmphist_d, tmphist_path = tempfile.mkstemp() + print(self.ocean.history_template) + shutil.copy(self.ocean.history_template, tmphist_path) + return tmphist_d, tmphist_path + ''' + + ''' + mainly used to create .{*}root files and/or populate them + ''' + def create_dotfile(self, path, content): + try: + path = Path(path) + path.parent.mkdir(parents=True, exist_ok=True) + path.touch() + with open(path, 'w') as file: + file.write(os.path.dirname(content)) + except Exception as e: + print(f"An error occurred while creating directory: {str(e)}") + exit(1) + + + ####################################################################### + # Copy Model Executable and RC Files to Experiment Directory + ####################################################################### + def RC_setup(self): + + # Make the experiment directory and the RC directory inside of it + RC_dir = os.path.join(self.exp_dir, 'RC') + + # Delete the destination directory if it exists + if os.path.exists(RC_dir): + shutil.rmtree(RC_dir) + + # Copy over all files and subdirs in install/etc, keeping symlinks, and ignoring *.tmpl files + shutil.copytree(pathdict['etc'], RC_dir, symlinks=True, ignore=shutil.ignore_patterns('*.tmpl', 'fvcore.layout.rc')) + + # Copy or symlink GEOSgcm.x (((IGNORE SINGULARITY/NATIVE BUILDS FOR NOW!!))) + geosgcmx_path = os.path.join(pathdict['bin'], 'GEOSgcm.x') + if linkx == True: + os.symlink(geosgcmx_path, os.path.join(self.exp_dir, 'GEOSgcm.x')) + else: + shutil.copy(geosgcmx_path, self.exp_dir) + + + ####################################################################### + # Set Recommended MPI Stack Settings + ####################################################################### + def mpistacksettings(self): + + # load mpi config from YAML + with open('../yaml/mpi_config.yaml') as file: + mpidict = yaml.load(file, Loader=yaml.FullLoader) + + # retrieve config from correlating mpi setting being used + mpi_config = mpidict.get(envdict['mpi']) + + # restart by oserver if using openmpi or mvapich + if envdict['mpi'] == 'openmpi' or envdict['mpi'] == 'mvapich': + self.restart_by_oserver = 'YES' + + + + ####################################################################### + # Create directories and copy files over + ####################################################################### + # A little helper function for copying files and displaying the info to the user + def copy_helper(self, src, destination, filename): + shutil.copy(src, destination) + print(f"Creating {color.RED}{filename}{color.RESET} for Experiment: {answerdict['experiment_id'].q_answer}") + + def copy_files_into_exp(self): + print("\n\n\n") + + for file in self.file_list: + self.copy_helper(f"{pathdict['GEOSgcm_App']}/{file}", f"{self.exp_dir}/{file}", file) + + self.copy_helper(f"{pathdict['install']}/post/plot.rc", f"{self.exp_dir}/plot.rc", "plot.rc") + self.copy_helper(f"{pathdict['install']}/post/post.rc", f"{self.exp_dir}/post.rc", "post.rc") + + # These files will be added if user chose to run coupled, regardless of ocean model selected. + if self.ocean.coupled == True: + self.copy_helper(f"{pathdict['install']}/coupled_diagnostics/g5lib/plotcon.j", f"{self.exp_dir}/plotcon.j", "plotocn.j") + self.copy_helper(f"{pathdict['install']}/coupled_diagnostics/g5lib/confon.py", f"{self.exp_dir}/__init__.py", "confocn.py") + self.file_list.extend(['input.nml', 'diag_table', 'plotocn.j', '__init__.py']) + + if self.ocean.model == 'MOM5': + self.copy_helper(f"{pathdict['etc']}/MOM5/geos5/{self.ocean.im}x{self.ocean.jm}/input.nml", f"{self.exp_dir}/input.nml", "input.nml") + self.copy_helper(f"{pathdict['etc']}/MOM5/geos5/{self.ocean.im}x{self.ocean.jm}/diag_table", f"{self.exp_dir}/diag_table.nml", "diag_table") + self.copy_helper(f"{pathdict['etc']}/MOM5/geos5/{self.ocean.im}x{self.ocean.jm}/field_table", f"{self.exp_dir}/field_table.nml", "field_table") + self.file_list.append('field_table') + elif self.ocean.model == 'MOM6': + self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.im}x{self.ocean.jm}/MOM_input", f"{self.exp_dir}/MOM_input", "MOM_input") + self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.im}x{self.ocean.jm}/MOM_override", f"{self.exp_dir}/MOM_override", "MOM_override") + self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.im}x{self.ocean.jm}/input.nml", f"{self.exp_dir}/input.nml", "input.nml") + self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.im}x{self.ocean.jm}/diag_table", f"{self.exp_dir}/diag_table", "diag_table") + self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.im}x{self.ocean.jm}/field_table", f"{self.exp_dir}/field_table", "field_table") + self.file_list.extend(['MOM_input', 'MOM_override', 'data_table']) + + if self.ocean.seaice_model == 'CICE6': + self.copy_helper(f"{pathdict['etc']}/CICE6/cice6_app/{self.ocean.im}x{self.ocean.jm}/ice_in", f"{self.exp_dir}/ice_in") + self.file_list.append('ice_in') + + print(f"{color.GREEN}Done!{color.RESET}\n") + + + ####################################################################### + # Produce Final script and .rc files + ####################################################################### + + # THIS WHOLE SECTION IS WILDLY OUT OF DATE, HOWEVER I KEPT IT AS IT WAS + # IN THE ORIGINAL SCRIPT FOR NOW + def restarts(self): + # comment or un-comment restarts based on exp configuration + # --------------------------------------------------------- + rsnames = {'H2O': False, + 'MAM': False, + 'CARMA': False, + 'GMICHEM': False, + 'STRATCHEM': False} + rstypes = ['INTERNAL','IMPORT'] + + with open(f"{answerdict['exp_dir'].q_answer}/AGCM.rc.tmpl", 'r') as file: + file_content = file.read() + + # Template in a "#" if restart is set to false + for rst in rsnames: + for typ in rstypes: + rst_string = f"{rst}_{typ}" + comment = "" if rsnames[rst] else "#" + file_content = file_content.replace(rst_string, f"{comment}{rst_string}") + + with open(f"{answerdict['exp_dir'].q_answer}/AGCM.rc.tmpl", 'w') as file: + file.write(file_content) + + ####################################################################### + # Modify RC Directory for LM and GOCART.data/GOCART Options + ####################################################################### + def mod_RC_dir_for_pchem(self): + if self.atmos.lm == 72: + return + + rc_dir = f"{answerdict['exp_dir'].q_answer}/RC" + + # if atmospheric vertical resolution != 72, we loop through every + # file in the RC dir and modify the atmos.lm values + for file_name in os.listdir(rc_dir): + file_path = os.path.join(rc_dir, file_name) + + with open(file_path, 'r') as file: + file_content = file.read() + + file_content = file_content.replace("/L72/", f"/L{self.atmos.lm}/") + file_content = file_content.replace("z72", f"z{self.atmos.lm}") + file_content = file_content.replace("_72_", f"_{self.atmos.lm}_") + + with open(file_path, 'w') as file: + file.write(file_content) + + # configure pchem and TR in GEOS_ChemGridComp.rc + def config_chemGridComp(self): + if self.gocart.rats_provider == 'PCHEM': + pchem = 'TRUE' + else: + pchem = 'FALSE' + + chemgridcomp = f"{answerdict['exp_dir'].q_answer}/RC/GEOS_ChemGridComp.rc" + with open(chemgridcomp, 'r') as file: + file_content = file.read() + + # we always enable TR and gocart + file_content = re.sub(r'(ENABLE_PCHEM:\s*\.).*(\.)', r'\1'+pchem+r'\2', file_content) + file_content = re.sub(r'(ENABLE_TR:\s*\.).*(\.)', r'\1TRUE\2', file_content) + file_content = re.sub(r'(ENABLE_GOCART_DATA:\s*\.).*(\.)', r'\1TRUE\2', file_content) + + with open(chemgridcomp, 'w') as file: + file.write(file_content) + + # update LAND_PARAMS choices + def config_surfaceGridComp(self): + surfacegridcomp = f"{answerdict['exp_dir'].q_answer}/RC/GEOS_SurfaceGridComp.rc" + with open(surfacegridcomp, 'r') as file: + file_content = file.read() + + if self.land.model == 'CatchmentCN-CLM4.0': + file_content = re.sub(r'(LAND_PARAMS:\s*).*', r'\1CN_CLM40', file_content) + + if self.land.bcs == 'ICA': + file_content = re.sub(r'(LAND_PARAMS:\s*).*', r'\1Icarus', file_content) + file_content = re.sub(r'(Z0_FORMULATION:\s*).*', r'\1 2', file_content) + + with open(surfacegridcomp, 'w') as file: + file.write(file_content) + + # enable DATA_DRIVEN gocart2G + def config_gocartGridComp(self): + gocartgridcomp = f"{answerdict['exp_dir'].q_answer}/RC/GOCART2G_GridComp.rc" + with open(gocartgridcomp, 'r') as file: + file_content = file.read() + + if self.gocart.data_driven == True: + file_content = re.sub(r'(ACTIVE_INSTANCES_DU:\s*)DU', r'\1DU.data', file_content) + file_content = re.sub(r'(ACTIVE_INSTANCES_SS:\s*)SS', r'\1SS.data', file_content) + file_content = re.sub(r'(ACTIVE_INSTANCES_SU:\s*)SU', r'\1SU.data', file_content) + file_content = re.sub(r'(ACTIVE_INSTANCES_NI:\s*)NI', r'\1NI.data', file_content) + file_content = re.sub(r'(ACTIVE_INSTANCES_CA:\s*)CA.oc', r'\1CA.oc.data', file_content) + file_content = re.sub(r'(ACTIVE_INSTANCES_CA:\s*)CA.bc', r'\1CA.bc.data', file_content) + file_content = re.sub(r'(ACTIVE_INSTANCES_CA:\s*)CA.br', r'\1CA.br.data', file_content) + + with open(gocartgridcomp, 'w') as file: + file.write(file_content) + + + def config_heartbeat(self): + + # With MOM5 we need to change dt lines in input.nml to + # use $OCEAN_DT instead. NOTE: This regex assumes integer followed by comma + if self.ocean.model == 'MOM5': + with open(f"{answerdict['exp_dir'].q_answer}/input.nml", 'r') as file: + file_content = file.read() + + file_content = re.sub(r'(dt_cpld\s*=\s*)[0-9]+(\,)', r'\1'+self.atmos.DT_ocean+r'\2', file_content) + file_content = re.sub(r'(dt_atmos\s*=\s*)[0-9]+(\,)', r'\1'+self.atmos.DT_ocean+r'\2', file_content) + + with open(f"{answerdict['exp_dir'].q_answer}/input.nml", 'r') as file: + file.write(file_content) + + # We also must change the MOM_override file to + # have consistent DTs with the AGCM. So we use OCEAN_DT + # and change MOM_override to match. NOTE: This sed + # assumes floating point number with a decimal + if self.ocean.model == 'MOM6': + with open(f"{answerdict['exp_dir'].q_answer}/MOM_override", 'r') as file: + file_content = file.read() + + file_content = re.sub(r'(DT\s*=\s*).*(\,)', r'\1'+self.atmos.DT_ocean+r'\2', file_content) + file_content = re.sub(r'(DT_THERM\s*=\s*).*(\,)', r'\1'+self.atmos.DT_ocean+r'\2', file_content) + + with open(f"{answerdict['exp_dir'].q_answer}/MOM_override", 'r') as file: + file.write(file_content) + + ''' + This is a helper function that extends jinja's Undefined class to ignore + template variables that aren't detemplated by this script + ''' + def get_undefined_handler(variable_start_string, variable_end_string, self): + class PreserveUndefined(Undefined): + __slots__ = () + + def __init__(self, *args, **kwargs): + super(PreserveUndefined, self).__init__(**kwargs) + + def __str__(self): + return f"{variable_start_string}{self._undefined_name}{variable_end_string}" + return PreserveUndefined + + + # another templating helper function that removes lines from a file that begin with #DELETE + def cleanup(self, file_path): + with open(file_path, 'r') as file: + content = file.read() + + content = re.sub(r'^\s*#DELETE.*\n', r'', content, flags=re.MULTILINE) + + with open(file_path, 'w') as file: + file.write(content) + + + + def template(self): + # this dictionary holds template values for the default jinja2 delimiter "{{ val }}" + jinja_dict = { + 'GCMVER': self.gcm_version, + 'EXPSRC': self.gcm_version, + 'EXPID': answerdict['experiment_id'].q_answer, + 'RUN_N': self.run_n, + 'RUN_FN': self.run_fn, + 'RUN_FT': self.run_ft, + 'RUN_T': self.run_t, + 'RUN_P': self.run_p, + 'RUN_FP': self.run_fp, + 'RUN_Q': self.run_q, + 'POST_N': self.post_n, + 'POST_T': self.post_t, + 'POST_P': self.post_p, + 'POST_Q': self.post_q, + 'MOVE_N': self.move_n, + 'PLOT_N': self.plot_n, + 'PLOT_T': self.plot_t, + 'PLOT_P': self.plot_p, + 'PLOT_Q': self.plot_q, + 'MOVE_Q': self.move_q, + 'MOVE_P': self.move_p, + 'ARCHIVE_N': self.archive_n, + 'ARCHIVE_T': self.archive_t, + 'ARCHIVE_P': self.archive_p, + 'ARCHIVE_Q': self.archive_q, + 'REGRESS_N': self.regress_n, + 'BCSDIR': self.bcs_dir, + 'SSTDIR': self.sst_dir, + 'SSTNAME': self.ocean.sst_name, + 'OCEANOUT': self.ocean.out, + 'LSMBCS': self.land.bcs, + 'EMIP_BCS_IN': self.land.emip_bcs_in, + 'EMIP_MERRA2': self.land.emip_MERRA2, + 'BCSTAG': self.ocean.tag, + 'SSTFILE': self.ocean.sst_file, + 'ICEFILE': self.ocean.ice_file, + 'KPARFILE': self.ocean.kpar_file, + 'CHMDIR': self.chem_dir, + 'COUPLEDIR': self.coupled_dir, + 'GWDRSDIR': self.gwdrs_dir, + 'EXPDIR': self.exp_dir, + 'EXPDSC': answerdict['experiment_description'].q_answer, + 'HOMDIR': self.exp_dir, + 'BATCH_GROUP': self.batch_group, + 'BATCH_TIME': self.batch_time, + 'BATCH_CMD': self.batch_cmd, + 'BATCH_JOBNAME': self.batch_jobname, + 'BATCH_OUTPUTNAME': self.batch_outputname, + 'BATCH_JOINOUTERR': self.batch_joinouterr, + 'SITE': envdict['site'], + 'GEOSDIR': pathdict['install'], + 'GEOSSRC': pathdict['install'], + 'GEOSBIN': pathdict['bin'], + 'GEOSETC': pathdict['etc'], + 'GEOSUTIL': pathdict['install'], + 'SINGULARITY_BUILD': '#DELETE', + 'NATIVE_BUILD': '', + 'MPT_SHEPHERD': self.ocean.mpt_shepherd, + 'SINGULARITY_SANDBOX': '', + 'REAL_BIND_PATH': '', + 'BASE_BIND_PATH': '', + 'BOUNDARY_DIR': self.boundary_path, + 'CHECKPOINT_TYPE': 'default', + 'OGCM_NX': self.ocean.nx, + 'OGCM_NY': self.ocean.ny, + 'OGCM_NPROCS': self.ocean.n_procs, + 'OBSERVER_FRQ': 0, + 'DASTUNING': '#', + 'COUPLED': self.ocean.coupled, + 'CLDMICRO': self.atmos.microphysics, + 'MOM5': self.ocean.MOM5, + 'MOM6': self.ocean.MOM6, + 'OCNMODEL': self.ocean.model, + 'CICE4': '#DELETE', + 'CICE6': '#DELETE', + 'MIT': self.ocean.MIT, + 'DATAOCEAN': self.ocean.data, + 'OPS_SPECIES': self.gocart.ops_species, + 'CMIP_SPECIES': self.gocart.cmip_species, + 'MERRA2OX_SPECIES': self.gocart.MERRA2OX_species, + 'HIST_GOCART': self.gocart.gocart_hist, + 'LSM_PARMS': self.land.parameters, + 'OCEAN_NAME': self.ocean.model, + 'OCEAN_PRELOAD': self.ocean.preload, + #'ana4replay.eta.%y4%m2%d2_%h2z.nc4': '/discover/nobackup/projects/gmao/merra2/data/ana/MERRA2_all/Y%y4/M%m2/MERRA2.ana.eta.%y4%m2%d2_%h2z.nc4?g', + 'REPLAY_ANA_EXPID': self.replay_ana_expID, + 'REPLAY_ANA_LOCATION': self.replay_ana_location, + 'M2_REPLAY_ANA_LOCATION': self.M2_replay_ana_location, + 'OX_RELAXTIME': self.gocart.ox_relaxtime, + 'PCHEM_CLIM_YEARS': self.gocart.pchem_clim_years, + 'RATS_PROVIDER': self.gocart.rats_provider, + 'AERO_PROVIDER': self.gocart.aero_provider, + 'OANA_PROVIDER': 'PCHEM', + 'EMISSIONS': self.gocart.emissions, + 'DYCORE': 'FV3', + 'AGCM_GRIDNAME': self.atmos.gridname, + 'OGCM_GRIDNAME': self.ocean.gridname, + 'OGCM_IS_FCST': '0', + 'BOOT': 'YES', + 'BCSRES': self.bcs_res, + 'OCEANtag': self.ocean.res, + 'ATMOStag': self.atmos.res, + 'RES_DATELINE': self.atmos.res_dateline, + 'TILEDATA': self.tile_data, + 'TILEBIN': self.tile_bin, + 'DT': self.atmos.dt, + 'SOLAR_DT': self.atmos.dt_solar, + 'IRRAD_DT': self.atmos.dt_irrad, + 'OCEAN_DT': self.atmos.dt_ocean, + 'LONG_DT': self.atmos.dt_long, + 'NX': self.atmos.nx, + 'NY': self.atmos.ny, + 'USE_SHMEM': int(self.atmos.use_SHMEM), + 'USE_IOSERVER': int(answerdict['io_server'].q_answer), + 'NUM_OSERVER_NODES': self.n_oserver_nodes, + 'NUM_BACKEND_PES': self.n_backend_pes, + 'RESTART_BY_OSERVER': self.restart_by_oserver, + 'NCPUS_PER_NODE': envdict['n_CPUs'], + 'NUM_READERS': self.atmos.num_readers, + 'NUM_WRITERS': self.atmos.num_writers, + 'LATLON_AGCM': self.atmos.latlon, + 'LATLON_OGCM': self.ocean.latlon, + 'CUBE_AGCM': self.atmos.cube, + 'CUBE_OGCM': self.ocean.cube, + 'GRID_TYPE': 'Cubed-Sphere', + 'AGCM_NF': self.atmos.nf, + 'AGCM_IM': self.atmos.im, + 'AGCM_JM': self.atmos.jm, + 'AGCM_LM': self.atmos.lm, + 'OGCM_IM': self.ocean.im, + 'OGCM_JM': self.ocean.jm, + 'OGCM_LM': self.ocean.lm, + 'OGCM_NF': self.ocean.nf, + 'OGCM_GRID_TYPE': self.ocean.gridtype, + 'BEG_DATE': self.begin_date, + 'END_DATE': self.end_date, + 'JOB_SGMT': self.job_sgmt, + 'NUM_SGMT': self.atmos.num_sgmt, + 'CONUS': self.atmos.conus, + 'FV_HWT': self.atmos.FV_hwt, + 'CONVPAR_OPTION': self.atmos.convpar_option, + 'STRETCH_FACTOR': self.atmos.stretch_factor, + 'INTERPOLATE_SST': self.interpolate_sst, + 'HIST_IM': self.atmos.im_hist, + 'HIST_JM': self.atmos.jm_hist, + 'ISCCP_SATSIM': 1, + 'MODIS_SATSIM': 0, + 'RADAR_SATSIM': 0, + 'LIDAR_SATSIM': 0, + 'MISR_SATSIM': 0, + 'SATSIM': 0, + 'USE_SKIN_LAYER': 1, + 'ANALYZE_TS': 0, + 'LSM_CHOICE': self.land.model, + 'MP_TURN_OFF_WSUB_EXTDATA': self.atmos.mp_turn_off_wsub_extdata, + 'BACM_1M_': self.atmos.BACM_1M, + 'GFDL_1M_': self.atmos.GFDL_1M, + 'MGB2_2M_': self.atmos.MGB2_2M, + 'PRELOAD_COMMAND': envdict['preload_command'], + 'LD_LIBRARY_PATH_CMD': envdict['ld_library_path_command'], + 'RUN_CMD': envdict['run_command'], + 'HYDROSTATIC': self.atmos.use_hydrostatic, + 'FV_SCHMIDT': self.atmos.schmidt, + 'FV_STRETCH_FAC': self.atmos.stretch_factor, + 'FV_TARGET_LON': self.atmos.target_lon, + 'FV_TARGET_LAT': self.atmos.target_lat, + 'FV_MAKENH': self.atmos.FV_make_NH, + 'FV_HYDRO': self.atmos.FV_hydro, + 'GFDL_PROG_CCN': self.atmos.GFDL_prog_ccn, + 'GFDL_USE_CCN': self.atmos.GFDL_use_ccn, + 'GFDL_HYDRO': self.atmos.GFDL_hydro + } + + # this dictionary holds values that use the ">>>val<<<" delimiter + jinja_dict_special_delimiter = { + 'FORCEDAS': self.atmos.force_das, + 'FORCEGCM': self.atmos.force_gcm, + 'HIST_CICE4': '#DELETE', + 'GOCART': self.gocart.gocart, + 'FVCUBED': '', + 'OSTIA': self.ocean.ostia, + 'HIST_CATCHCN': self.land.HIST_catchment, + 'GCMRUN_CATCHCN': self.land.GCMRUN_catchment, + 'EMIP_OLDLAND': self.land.emip_oldland, + 'EMIP_NEWLAND': self.land.emip_newland, + '_4DIAUDAS': '#DELETE', + 'REGULAR_REPLAY': '#', + 'REGULAR_REPLAY_GMAO': '#', + 'REGULAR_REPLAY_NCEP': '#DELETE', + 'REGULAR_REPLAY_ECMWF': '#DELETE' + } + + + exp_dir = answerdict['exp_dir'].q_answer + + # this is an edge-case that can't be handled with jinja2 + # original csh line: s?^[ \t]*RECORD_?#RECORD_?g + for file in self.file_list: + with open(f"{exp_dir}/{file}", 'r') as tmpl: + file_content = tmpl.read() + + file_content = re.sub(r'^[ \t]*(RECORD_.*)', r'#\1', file_content) + + with open(f"{exp_dir}/{file}", 'w') as tmpl: + tmpl.write(file_content) + + # this block handles the default case for jinja templating + default_env = Environment( + loader=FileSystemLoader(exp_dir) + ) + for file in self.file_list: + template = default_env.get_template(file) + content = template.render(jinja_dict) + with open(f"{exp_dir}/{file}", 'w') as tmpl: + tmpl.write(content) + + # this block handles the special case for jinja templating + PreserveUndefined = self.get_undefined_handler('>>>', '<<<') + special_env = Environment( + loader=FileSystemLoader(exp_dir), + undefined=PreserveUndefined, + variable_start_string=">>>", + variable_end_string="<<<" + ) + for file in self.file_list: + template = special_env.get_template(file) + content = template.render(jinja_dict_special_delimiter) + with open(f"{exp_dir}/{file}", 'w') as tmpl: + tmpl.write(content) + + # remove #DELETE lines + for file in self.file_list: + file_path = f"{exp_dir}/{file}" + self.cleanup(file_path) + + + # organize files into sub directories and update file permissions + def organize_exp_dir(self): + exp_dir = answerdict['exp_dir'].q_answer + + # make sub dirs + sub_dirs = ['archive', 'forecasts', 'plot', 'post' , 'regress'] + for i in sub_dirs: + os.makedirs(os.path.join(exp_dir, i), exist_ok=True) + + # archive dir + shutil.move(f"{exp_dir}/gcm_archive.j", f"{exp_dir}/archive/gcm_archive.j") + + # forecasts dir + shutil.move(f"{exp_dir}/gcm_forecast.setup", f"{exp_dir}/forecasts/gcm_forecast.setup") + shutil.move(f"{exp_dir}/gcm_forecast.tmpl", f"{exp_dir}/forecasts/gcm_forecast.tmpl") + + # plot dir + shutil.move(f"{exp_dir}/gcm_moveplot.j", f"{exp_dir}/plot/gcm_moveplot.j") + shutil.move(f"{exp_dir}/gcm_plot.tmpl", f"{exp_dir}/plot/gcm_plot.tmpl") + shutil.move(f"{exp_dir}/gcm_quickplot.csh", f"{exp_dir}/plot/gcm_quickplot.csh") + shutil.move(f"{exp_dir}/plot.rc", f"{exp_dir}/plot/plot.rc") + + # post dir + shutil.move(f"{exp_dir}/gcm_post.j", f"{exp_dir}/post/gcm_post.j") + shutil.move(f"{exp_dir}/post.rc", f"{exp_dir}/post/post.rc") + + # regress dir + shutil.move(f"{exp_dir}/gcm_regress.j", f"{exp_dir}/regress/gcm_regress.j") + + # rename tmpl files + os.rename(f"{exp_dir}/CAP.rc.tmpl", f"{exp_dir}/CAP.rc") + os.rename(f"{exp_dir}/AGCM.rc.tmpl", f"{exp_dir}/AGCM.rc") + os.rename(f"{exp_dir}/HISTORY.rc.tmpl", f"{exp_dir}/HISTORY.rc") + os.rename(f"{exp_dir}/linkbcs.tmpl", f"{exp_dir}/linkbcs") + + # update file permissions + os.chmod(f"{exp_dir}/CAP.rc", 0o755) + os.chmod(f"{exp_dir}/fvcore_layout.rc", 0o755) + os.chmod(f"{exp_dir}/archive/gcm_archive.j", 0o755) + os.chmod(f"{exp_dir}/linkbcs", 0o755) + os.chmod(f"{exp_dir}/logging.yaml", 0o755) + os.chmod(f"{exp_dir}/forecasts/gcm_forecast.tmpl", 0o644) + os.chmod(f"{exp_dir}/plot/gcm_plot.tmpl", 0o644) + + +my_exp = setup() +my_exp.config_models() +#my_exp.print_all_vars() +my_exp.set_some_stuff() +my_exp.set_nodes() +my_exp.set_stuff() +my_exp.create_dotfile(f"{os.environ.get('HOME')}/.EXPDIRroot", answerdict['exp_dir'].q_answer) +my_exp.create_dotfile(f"{os.environ.get('HOME')}/.GROUProot", answerdict['group_root'].q_answer) +my_exp.RC_setup() +my_exp.mpistacksettings() +my_exp.copy_files_into_exp() +my_exp.restarts() +my_exp.mod_RC_dir_for_pchem() +my_exp.config_chemGridComp() +my_exp.config_surfaceGridComp() +my_exp.config_gocartGridComp() +my_exp.config_heartbeat() +my_exp.template() +my_exp.organize_exp_dir() diff --git a/gcmpy/scripts/generate_question.py b/gcmpy/scripts/generate_question.py old mode 100644 new mode 100755 diff --git a/gcmpy/scripts/gocart.py b/gcmpy/scripts/gocart.py old mode 100644 new mode 100755 index d6d9c9db..309109a1 --- a/gcmpy/scripts/gocart.py +++ b/gcmpy/scripts/gocart.py @@ -6,14 +6,15 @@ def __init__(self): self.aerosol = answerdict["gocart_aerosols"].q_answer self.emissions = f"{answerdict['gocart_emission'].q_answer}_EMISSIONS" self.data_driven = None - self.OPS_species = "#" - self.CMIP_species = "#" - self.MERRA2OX_species = "#" - self.pchem_clim_years = "" - self.gocart = None + self.ops_species = '#' + self.cmip_species = '#' + self.MERRA2OX_species = '#' + self.pchem_clim_years = '' + self.ox_relaxtime = None + self.gocart = '' self.gocart_hist = None - self.aero_provider = "GOCART2G" - self.RATS_provider = "PCHEM" + self.aero_provider = 'GOCART2G' + self.rats_provider = 'PCHEM' # for debugging purposes def print_vars(self): @@ -22,24 +23,24 @@ def print_vars(self): print(f"{color.BLUE}{var_name}: {var_value}{color.RESET}") def set_gocart(self): - match self.aerosol: - case "Actual": - self.data_driven = False - self.gocart = "" - self.gocart_hist = "" - case "Climatological": - self.data_driven = True - self.gocart = "" - self.gocart_hist = "" + if self.aerosol == 'Actual': + self.data_driven = False + self.gocart = '' + self.gocart_hist = '' + elif self.aerosol == 'Climatological': + self.data_driven = True + self.gocart = '#' + self.gocart_hist = '#DELETE' def set_emissions(self): - match self.emissions.split("_")[0]: - case "AMIP": - self.MERRA2OX_species = "" - self.pchem_clim_years = 1 - case "OPS": - self.OPS_species = "" - self.pchem_clim_years = 39 + if self.emissions.split('_')[0] == 'AMIP': + self.MERRA2OX_species = '' + self.pchem_clim_years = 1 + self.ox_relaxtime = '0.00' + elif self.emissions.split('_')[0] == 'OPS': + self.ops_species = '' + self.pchem_clim_years = 39 + self.ox_relaxtime = '259200.' def config(self): diff --git a/gcmpy/scripts/land.py b/gcmpy/scripts/land.py old mode 100644 new mode 100755 index 5bc46d64..0cc2d664 --- a/gcmpy/scripts/land.py +++ b/gcmpy/scripts/land.py @@ -3,10 +3,10 @@ class land: def __init__(self): - self.land_choice = answerdict["LS_model"].q_answer + self.model = answerdict["LS_model"].q_answer self.bcs = answerdict["LS_boundary_conditions"].q_answer - self.bound_parameters = None - self.emip_BCS_IN = None + self.parameters = None + self.emip_bcs_in = None self.emip_oldland = None self.emip_newland = None self.emip_MERRA2 = None @@ -20,38 +20,35 @@ def print_vars(self): print(f"{color.CYAN}{var_name}: {var_value}{color.RESET}") def set_bcs(self): - match self.bcs: - case "Icarus": - self.bound_parameters = "#DELETE" - self.emip_BCS_IN = "Ganymed-4_0" - self.emip_oldland = "" - self.emip_newland = "#DELETE" - self.emip_MERRA2 = "MERRA2" - case "Icarus-NLv3": - self.bound_parameters = "" - self.emip_BCS_IN = "Icarus-NLv3" - self.emip_oldland = "#DELETE" - self.emip_newland = "" - self.emip_MERRA2 = "MERRA2_NewLand" + if self.bcs == "ICA": + self.parameters = "#DELETE" + self.emip_bcs_in = "GM4" + self.emip_oldland = "" + self.emip_newland = "#DELETE" + self.emip_MERRA2 = "MERRA2" + elif self.bcs == "NL3": + self.parameters = "" + self.emip_bcs_in = "NL3" + self.emip_oldland = "#DELETE" + self.emip_newland = "" + self.emip_MERRA2 = "MERRA2_NewLand" + elif self.bcs == "v12": + self.parameters = "" + self.emip_bcs_in = "NL3" + self.emip_oldland = "#DELETE" + self.emip_newland = "" + self.emip_MERRA2 = "MERRA2_NewLand" + def set_catchment(self): - if self.bcs == "Icarus-NLv3": - match self.land_choice: - case "Catchment": - self.HIST_catchment = "#DELETE" - self.GCMRUN_catchment = "#DELETE" - case "CatchmentCN-CLM4.0": - self.HIST_catchment = "" - self.GCMRUN_catchment = "" - print(f"{color.RED}IMPORTANT: please set LAND_PARAMS: to CN_CLM40 in RC/GEOS_SurfaceGridComp.rc in the experiment directory.{color.RESET}") - case "CatchmentCN-CLM4.5": - self.HIST_catchment = "" - self.GCMRUN_catchment = "" - print(f"{color.RED}IMPORTANT: please set LAND_PARAMS: to CN_CLM45 in RC/GEOS_SurfaceGridComp.rc in the experiment directory.{color.RESET}") - else: - self.land_choice = "Catchment" + if self.model == "Catchment": + self.land_choice = 1 self.HIST_catchment = "#DELETE" self.GCMRUN_catchment = "#DELETE" + elif self.model == "CatchmentCN-CLM4.0": + self.model = 2 + self.HIST_catchment = "" + self.GCMRUN_catchment = "" def config(self): self.set_bcs() diff --git a/gcmpy/scripts/model.py b/gcmpy/scripts/model.py deleted file mode 100644 index 7f373228..00000000 --- a/gcmpy/scripts/model.py +++ /dev/null @@ -1,496 +0,0 @@ -from ocean import ocean -from atmosphere import atmosphere as atmos -from land import land -from gocart import gocart -from env import answerdict, linkx -from utility import envdict, pathdict, color -import math, os, shutil, tempfile, yaml -from pathlib import Path -from jinja2 import Environment, FileSystemLoader, StrictUndefined - - -# combines all models (atmos, ocean, land, gocart) into one big one -class model: - def __init__(self): - self.ocean = ocean() - self.atmos = atmos() - self.land = land() - self.gocart = gocart() - self.is_FCST = False - self.fv_cubed = "" - self.bcs_res = None - self.tile_data = None - self.tile_bin = None - self.interpolate_SST = None - self.job_sgmt = None - self.begin_date = "18910301 000000" - self.end_date = "29990302 210000" - self.n_oserver_nodes = None - self.n_backend_pes = None - self.n_nodes = None - self.exp_dir = answerdict['exp_dir'].q_answer - self.oserver_restart = "NO" - - - def print_all_vars(self): - self.atmos.print_vars() - self.land.print_vars() - self.gocart.print_vars() - - - def config_models(self): - self.ocean.config() - self.atmos.config(self.ocean.NX, self.ocean.NY) - self.land.config() - self.gocart.config() - - - # setup some variables idk - def set_some_stuff(self): - if self.atmos.IM_hist >= self.ocean.IM: - self.interpolate_SST = True - else: - self.interpolate_SST = False - self.bcs_res = f"{self.atmos.res}_{self.ocean.res}" - self.tile_data = f"{self.atmos.res}_{self.ocean.res}_Pfafstetter.til" - self.tile_bin = f"{self.atmos.res}_{self.ocean.res}_Pfafstetter.TIL" - self.job_sgmt = f"{self.atmos.job_sgmt} 000000" - - - # setup experiment nodes - def set_nodes(self): - model_npes = self.atmos.NX * self.atmos.NY - - # Calculate OSERVER nodes based on recommended algorithm - if answerdict["io_server"].q_answer == True: - - # First we calculate the number of model nodes - n_model_nodes = math.ceil(model_NPES / envdict["n_CPUs"]) - - # Next the number of frontend PEs is 10% of the model PEs - n_frontend_pes = math.ceil(model_NPES * 0.1) - - # Now we roughly figure out the number of collections in the HISTORY.rc - n_hist_collections = 0 - with open(answerdict['history_template'].q_answer, 'r') as file: - in_collections = False - for line in file: - if line.split(' ', 1)[0] == "COLLECTIONS:": - in_collections = True - continue - if in_collections and line.split(' ', 1)[0] != "#": - n_hist_collections += 1 - if line.strip() == "::": - break - - # The total number of oserver PEs is frontend PEs plus number of history collections - n_oserver_pes = n_frontend_pes + n_hist_collections - - # calculate the number of oserver nodes - n_oserver_nodes = math.ceil(n_oserver_pes / envdict["n_CPUs"]) - - # The number of backend PEs is the number of history collections divided by the number of oserver nodes - n_backend_pes = math.ceil(n_hist_collections / n_oserver_nodes) - - # multigroup requires at least two backend pes - if (n_backend_pes < 2): n_backend_pes = 2 - - # Calculate the total number of nodes to request from batch - nodes = n_model_nodes + n_oserver_nodes - - else: - self.nodes = math.ceil(model_npes / envdict["n_CPUs"]) - self.n_oserver_nodes = 0 - self.n_backend_pes = 0 - - - - def set_stuff(self): - self.set_nodes() - # Longer job names are now supported with SLURM and PBS. Limits seem to be 1024 characters with SLURM - # and 230 with PBS. To be safe, we will limit to 200 - run_n = f"{answerdict['experiment_id'].q_answer[:200]}_RUN" # RUN Job Name - run_fn = f"{answerdict['experiment_id'].q_answer[:200]}_FCST" # Forecast Job Name - post_n = f"{answerdict['experiment_id'].q_answer[:200]}_POST" # POST Job Name - plot_n = f"{answerdict['experiment_id'].q_answer[:200]}_PLT" # PLOT Job Name - move_n = f"{answerdict['experiment_id'].q_answer[:200]}_PLT" # MOVE Job Name - archive_n = f"{answerdict['experiment_id'].q_answer[:200]}_ARCH" # ARCHIVE Job Name - regress_n = f"{answerdict['experiment_id'].q_answer[:200]}_RGRS" # REGRESS Job Name - - - # Here we need to convert POST_NDS to total tasks. Using 16 cores - # per task as a good default - post_npes = self.atmos.post_NDS * 16 - NPCUS = (post_npes + envdict["n_CPUs"] - 1)/envdict["n_CPUs"] - - ''' - Definition for each variable in the following if-else block: - - batch_cmd - PBS Batch command - batch_group - PBS Syntax for GROUP - batch_time - PBS Syntax for walltime - batch_jobname - PBS Syntax for job name - batch_outputname - PBS Syntax for job output name - batch_joinouterr - PBS Syntax for joining output and error - run_FT - Wallclock Time for gcm_forecast.j - run_FT - Wallclock Time for gcm_run.j - post_T - Wallclock Time for gcm_post.j - plot_T - Wallclock Time for gcm_plot.j - archive_T - Wallclock Time for gcm_archive.j - run_Q - Batch queue name for gcm_run.j - run_P - PE Configuration for gcm_run.j - run_FP - PE Configuration for gcm_forecast.j - post_Q - Batch queue name for gcm_post.j - plot_Q - Batch queue name for gcm_plot.j - move_Q - Batch queue name for gcm_moveplot.j - archive_Q - Batch queue name for gcm_archive.j - post_P - PE Configuration for gcm_post.j - plot_P - PE Configuration for gcm_plot.j - archive_P - PE Configuration for gcm_archive.j - move_P - PE Configuration for gcm_moveplot.j - bcs_dir - Location of Boundary Conditions - replay_ana_expID - Default Analysis Experiment for REPLAY - replay_ana_location - Default Analysis Location for REPLAY - M2_replay_ana_location - Default Analysis Location for M2 REPLAY - sst_dir - Location of SST Boundary Conditions - chem_dir - Locations of Aerosol Chemistry BCs - work_dir - User work directory <----------------- change this later - gwdrs_dir - Location of GWD_RIDGE files - coupled_dir - Coupled Ocean/Atmos Forcing - ''' - - if envdict['site'] == "NAS": - batch_cmd = "qsub" - batch_group = "PBS -W group_list=" - batch_time = "PBS -l walltime=" - batch_jobname = "PBS -N" - batch_outputname = "PBS -o " - batch_joinouterr = "PBS -j oe -k oed" - run_FT = "6:00:00" - run_T = "8:00:00" - post_T = "8:00:00" - plot_T = "8:00:00" - archive_T = "8:00:00" - run_Q = f"PBS -q normal" - run_P = f"PBS -l select={self.nodes}:ncpus={envdict['n_CPUs']}:mpiprocs={envdict['n_CPUs']}:model={answerdict['processor']}" - run_FP = f"PBS -l select=24:ncpus={envdict['n_CPUs']}:mpiprocs={envdict['n_CPUs']}:model={answerdict['processor']}" - post_Q = "PBS -q normal" - plot_Q = "PBS -q normal" - move_Q = "PBS -q normal" - archive_Q = "PBS -q normal" - post_P = f"PBS -l select={NPCUS}:ncpus={envdict['n_CPUs']}:mpiprocs={envdict['n_CPUs']}:model={answerdict['processor']}" - plot_P = f"PBS -l select=1:ncpus={envdict['n_CPUs']}:mpiprocs=1:model={answerdict['processor']}" - archive_P = f"PBS -l select=1:ncpus={envdict['n_CPUs']}:mpiprocs={envdict['n_CPUs']}:model={answerdict['processor']}" - move_P = "PBS -l select=1:ncpus=1" - boundary_path = "/nobackup/gmao_SIteam/ModelData" - bcs_dir = f"{boundary_path}/bcs/{self.land.bcs}/{self.land.bcs}_{self.ocean.tag}" - replay_ana_expID = "ONLY_MERRA2_SUPPORTED" - replay_ana_location = "ONLY_MERRA2_SUPPORTED" - M2_replay_ana_location = f"{boundary_path}/merra2/data"# - - # defines location of SST Boundary Conditions - oceanres = f"{self.ocean.IM}x{self.ocean.JM}" - if oceanres == "1440x720": - sst_dir = f"{boundary_path}/fvInput/g5gcm/bcs/SST/{oceanres}" - else: - sst_dir = f"{boundary_path}/fvInput/g5gcm/bcs/realtime/{self.ocean.sst_name}/{oceanres}" - if self.ocean.gridtype_abrv == "LL": - sst_dir = "/nobackupp2/estrobac/geos5/SSTDIR" - - chem_dir = f"{boundary_path}/fvInput_nc3" - work_dir = f"/nobackup/{os.environ.get('LOGNAME')}" - gwdrs_dir = f"{boundary_path}/GWD_RIDGE" - - # Coupled Ocean/Atmos Forcing - if self.ocean.name == "MIT": - coupled_dir = "/nobackupp2/estrobac/geos5/GRIDDIR" - else: - coupled_dir = f"{boundary_path}/aogcm" - - - elif envdict['site'] == "NCCS": - batch_cmd = "sbatch" - batch_group = "SBATCH --account=" - batch_time = "SBATCH --time=" - batch_jobname = "SBATCH --job-name=" - batch_outputname = "SBATCH --output=" - batch_joinouterr = "DELETE" - run_FT = "06:00:00" - run_T = "12:00:00" - post_T = "8:00:00" - plot_T = "12:00:00" - archive_T = "2:00:00" - run_Q = f"SBATCH --constraint={answerdict['processor']}" - run_P = f"SBATCH --nodes={self.nodes} --ntasks-per-node={envdict['n_CPUs']}" - run_FP = f"SBATCH --nodes={self.nodes} --ntasks-per-node={envdict['n_CPUs']}" - post_Q = f"SBATCH --constraint={answerdict['processor']}" - plot_Q = f"SBATCH --constraint={answerdict['processor']}" - move_Q = "SBATCH --partition=datamove" - archive_Q = "SBATCH --partition=datamove" - post_P = f"SBATCH --nodes={NPCUS} --ntasks-per-node={envdict['n_CPUs']}" - plot_P = f"SBATCH --nodes=4 --ntasks=4" - archive_P = "SBATCH --ntasks=1" - move_P = "SBATCH --ntasks=1" - boundary_path = "/discover/nobackup/projects/gmao" - bcs_dir = f"{boundary_path}bcs_shared/fvInput/ExtData/esm/tiles/{self.land.bcs}" - replay_ana_expID = "x0039" - replay_ana_location = f"{boundary_path}/g6dev/ltakacs/x0039" - M2_replay_ana_location = f"{boundary_path}/merra2/data" - - - # define location of SST Boundary Conditions - oceanres = f"{self.ocean.IM}x{self.ocean.JM}" - if oceanres == "1440x720": - sst_dir = f"{os.environ.get('SHARE')}/gmao_ops/fvInput/g5gcm/bcs/SST/{self.ocean.IM}x{self.ocean.JM}" - else: - sst_dir = f"{os.environ.get('SHARE')}/gmao_ops/fvInput/g5gcm/bcs/realtime/{self.ocean.sst_name}/{self.ocean.IM}x{self.ocean.JM}" - if self.ocean.gridtype_abrv == "LL": - sst_dir = "/discover/nobackup/estrobac/geos5/SSTDIR" - - chem_dir = f"{os.environ.get('SHARE')}/gmao_ops/fvInput_nc3" - work_dir = f"/discover/nobackup/{os.environ.get('LOGNAME')}" - gwdrs_dir = f"{boundary_path}/osse2/stage/BCS_FILES/GWD_RIDGE" - - # Coupled Ocean/Atmos Forcing - if self.ocean.name == "MIT": - coupled_dir = "/gpfsm/dnb32/estrobac/geos5/GRIDDIR" - else: - coupled_dir = f"{boundary_path}/bcs_shared/make_bcs_inputs/ocean" - - - elif envdict['site'] == "AWS" or envdict['SITE'] == "Azure": - batch_cmd = "sbatch" - batch_group = "#DELETE" - batch_time = "SBATCH --time=" - batch_jobname = "SBATCH --job-name=" - batch_outputname = "SBATCH --output=" - batch_joinouterr = "DELETE" - run_FT = "06:00:00" - run_T = "12:00:00" - post_T = "8:00:00" - plot_T = "12:00:00" - archive_T = "1:00:00" - run_Q = f"SBATCH --constraint={answerdict['processor']}" - run_P = f"SBATCH --nodes={self.nodes} --ntasks-per-node={envdict['n_CPUs']}" - run_FP = f"SBATCH --nodes={self.nodes} --ntasks-per-node={envdict['n_CPUs']}" - post_Q = "NULL" - plot_Q = "NULL" - move_Q = "NULL" - archive_Q = "NULL" - post_P = f"SBATCH --ntasks={post_npes}" - plot_P = f"SBATCH --nodes=4 --ntasks=4" - archive_P = "SBATCH --ntasks=1" - move_P = "SBATCH --ntasks=1" - boundary_path = "/ford1/share/gmao_SIteam/ModelData" - bcs_dir = f"{boundary_path}/bcs/{self.land.bcs}_{self.ocean.tag}" - replay_ana_expID = "REPLAY_UNSUPPORTED" - replay_ana_location = "REPLAY_UNSUPPORTED" - M2_replay_ana_location = "REPLAY_UNSUPPORTED" - sst_dir = f"{boundary_path}/{self.ocean.sst_name}/{self.ocean.IM}x{self.ocean.JM}" - chem_dir = f"{boundary_path}/fvInput_nc3" - work_dir = os.environ.get('HOME') - gwdrs_dir = f"{boundary_path}/GWD_RIDGE" - coupled_dir = f"{boundary_path}/aogcm" - - else: - # These are defaults for the desktop - batch_cmd = "sbatch" - batch_group = "SBATCH --account=" - batch_time = "SBATCH --time=" - batch_jobname = "SBATCH --job-name=" - batch_outputname = "SBATCH --output=" - batch_joinouterr = "DELETE" - run_FT = "06:00:00" - run_T = "12:00:00" - post_T = "8:00:00" - plot_T = "12:00:00" - archive_T = "1:00:00" - run_Q = "NULL" - run_P = "NULL" - run_FP = "NULL" - post_Q = "NULL" - plot_Q = "NULL" - move_Q = "NULL" - archive_Q = "NULL" - post_P = "NULL" - plot_P = "NULL" - archive_P = "NULL" - move_P = "NULL" - boundary_path = "/ford1/share/gmao_SIteam/ModelData" - bcs_dir = f"{boundary_path}/bcs/{self.land.bcs} /{self.land.bcs}_{self.ocean.tag}" - replay_ana_expID = "REPLAY_UNSUPPORTED" - replay_ana_location = "REPLAY_UNSUPPORTED" - M2_replay_ana_location = "REPLAY_UNSUPPORTED" - sst_dir = f"{boundary_path}/{self.ocean.sst_name}/{self.ocean.IM}x{self.ocean.JM}" - chem_dir = f"{boundary_path}/fvInput_nc3" - work_dir = os.environ.get('HOME') - gwdrs_dir = f"{boundary_path}/GWD_RIDGE" - coupled_dir = f"{boundary_path}/aogcm" - - # By default on desktop, just ignore IOSERVER for now - self.atmos.NX = 1 - self.atmos.NY = 6 - answerdict["io_server"] = False - self.n_oserver_nodes = 0 - self.n_backend_pes = 0 - - ''' - def set_hist_temp(self): - tmphist_d, tmphist_path = tempfile.mkstemp() - print(self.ocean.history_template) - shutil.copy(self.ocean.history_template, tmphist_path) - return tmphist_d, tmphist_path - ''' - - ''' - mainly used to create .{*}root files and/or populate them - ''' - def create_dotfile(self, path, content): - try: - path = Path(path) - path.parent.mkdir(parents=True, exist_ok=True) - path.touch() - with open(path, 'w') as file: - file.write(os.path.dirname(content)) - except Exception as e: - print(f"An error occurred while creating directory: {str(e)}") - exit(1) - - - ####################################################################### - # Copy Model Executable and RC Files to Experiment Directory - ####################################################################### - def RC_setup(self): - - # Make the experiment directory and the RC directory inside of it - RC_dir = os.path.join(self.exp_dir, 'RC') - - # Delete the destination directory if it exists - if os.path.exists(RC_dir): - shutil.rmtree(RC_dir) - - # Copy over all files and subdirs in install/etc, keeping symlinks, and ignoring *.tmpl files - shutil.copytree(pathdict['etc'], RC_dir, symlinks=True, ignore=shutil.ignore_patterns('*.tmpl', 'fvcore.layout.rc')) - - # Copy or symlink GEOSgcm.x (((IGNORE SINGULARITY/NATIVE BUILDS FOR NOW!!))) - geosgcmx_path = os.path.join(pathdict['bin'], 'GEOSgcm.x') - if linkx == True: - os.symlink(geosgcmx_path, os.path.join(self.exp_dir, 'GEOSgcm.x')) - else: - shutil.copy(geosgcmx_path, self.exp_dir) - - ####################################################################### - # Set Recommended MPI Stack Settings - ####################################################################### - def mpistacksettings(self): - - # load mpi config from YAML - with open('../yaml/mpi_config.yaml') as file: - mpidict = yaml.load(file, Loader=yaml.FullLoader) - - # retrieve config from correlating mpi setting being used - mpi_config = mpidict.get(envdict['mpi']) - - ####################################################################### - # Create directories and copy files over - ####################################################################### - # A little helper function for copying files and displaying the info to the user - def copy_helper(self, src, destination, filename): - shutil.copy(src, destination) - print(f"Creating {color.RED}{filename}{color.RESET} for Experiment: {answerdict['experiment_id'].q_answer}") - - def copy_files_into_exp(self, file_list): - print("\n\n\n") - - for file in file_list: - if file[-5:] == '.tmpl': - self.copy_helper(f"{pathdict['GEOSgcm_App']}/{file}", f"{self.exp_dir}/{file[:-5]}", file) - else: - self.copy_helper(f"{pathdict['GEOSgcm_App']}/{file}", f"{self.exp_dir}/{file}", file) - - # These files will be added if user chose to run coupled, regardless of ocean model selected. - if self.ocean.coupled == True: - self.copy_helper(f"{pathdict['install']}/coupled_diagnostics/g5lib/plotcon.j", f"{self.exp_dir}/plotcon.j", "plotcon.j") - self.copy_helper(f"{pathdict['install']}/coupled_diagnostics/g5lib/confon.py", f"{self.exp_dir}/__init__.py", "confon.py") - - if self.ocean.name == 'MOM5': - self.copy_helper(f"{pathdict['etc']}/MOM5/geos5/{self.ocean.IM}x{self.ocean.JM}/input.nml", f"{self.exp_dir}/input.nml", "input.nml") - self.copy_helper(f"{pathdict['etc']}/MOM5/geos5/{self.ocean.IM}x{self.ocean.JM}/diag_table", f"{self.exp_dir}/diag_table.nml", "diag_table") - self.copy_helper(f"{pathdict['etc']}/MOM5/geos5/{self.ocean.IM}x{self.ocean.JM}/field_table", f"{self.exp_dir}/field_table.nml", "field_table") - elif self.ocean.name == 'MOM6': - self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.IM}x{self.ocean.JM}/MOM_input", f"{self.exp_dir}/MOM_input", "MOM_input") - self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.IM}x{self.ocean.JM}/MOM_override", f"{self.exp_dir}/MOM_override", "MOM_override") - self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.IM}x{self.ocean.JM}/input.nml", f"{self.exp_dir}/input.nml", "input.nml") - self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.IM}x{self.ocean.JM}/diag_table", f"{self.exp_dir}/diag_table", "diag_table") - self.copy_helper(f"{pathdict['etc']}/MOM6/mom6_app/{self.ocean.IM}x{self.ocean.JM}/field_table", f"{self.exp_dir}/field_table", "field_table") - - if self.ocean.seaice_model == 'CICE6': - self.copy_helper(f"{pathdict['ect']}/CICE6/cice6_app/{self.ocean.IM}x{self.ocean.JM}/ice_in", f"{self.exp_dir}/ice_in") - - print(f"{color.GREEN}Done!{color.RESET}\n") - - - ####################################################################### - # Produce Final script and .rc files - ####################################################################### - - # THIS WHOLE SECTION IS WILDLY OUT OF DATE, HOWEVER I KEPT IT AS IT WAS - # IN THE ORIGINAL SCRIPT FOR NOW - def restarts(self): - # comment or un-comment restarts based on exp configuration - # --------------------------------------------------------- - rsnames = {'H2O': False, - 'MAM': False, - 'CARMA': False, - 'GMICHEM': False, - 'STRATCHEM': False} - rstypes = ['INTERNAL','IMPORT'] - - # Load Jinja2 template - with open(f"{answerdict['exp_dir'].q_answer}/AGCM.rc", "r") as file: - file_content = file.read() - #file = Template(file_content) - - # Template in a "#" if restart is set to false - for rst in rsnames: - for typ in rstypes: - rst_string = f"{rst}_{typ}" - comment = "#" if not rsnames[rst] else "" - file_content = file_content.replace(rst_string, f"{comment}{rst_string}") - - with open(f"{answerdict['exp_dir'].q_answer}/AGCM.rc", "w") as file: - file.write(file_content) - - - - -mymodel = model() -mymodel.config_models() -#mymodel.print_all_vars() -mymodel.set_nodes() -mymodel.set_stuff() -mymodel.create_dotfile(f"{os.environ.get('HOME')}/.EXPDIRroot", answerdict['exp_dir'].q_answer) -mymodel.create_dotfile(f"{os.environ.get('HOME')}/.GROUProot", answerdict['group_root'].q_answer) -mymodel.RC_setup() -mymodel.mpistacksettings() -file_list = ['gcm_run.j', - 'gcm_post.j', - 'gcm_archive.j', - 'gcm_regress.j', - 'gcm_plot.tmpl', - 'gcm_quickplot.csh', - 'gcm_moveplot.j', - 'gcm_forecast.tmpl', - 'gcm_forecast.setup', - 'gcm_emip.setup', - 'CAP.rc.tmpl', - 'AGCM.rc.tmpl', - 'HISTORY.rc.tmpl', - 'logging.yaml', - 'fvcore_layout.rc'] - -mymodel.copy_files_into_exp(file_list) -mymodel.restarts() - - - diff --git a/gcmpy/scripts/ocean.py b/gcmpy/scripts/ocean.py old mode 100644 new mode 100755 index 2f832bce..67c4d556 --- a/gcmpy/scripts/ocean.py +++ b/gcmpy/scripts/ocean.py @@ -4,38 +4,39 @@ class ocean: def __init__(self): - self.name = answerdict["OM_name"].q_answer - self.coupled = answerdict["OM_coupled"].q_answer - self.seaice_model = answerdict["OM_seaice_model"].q_answer - self.gridtype = "" - self.gridtype_abrv = "" - self.gridname = "" - self.data = "" - self.preload = "" - self.history_template = answerdict["history_template"].q_answer - self.IM = None - self.JM = None - self.LM = answerdict["OM_vertical_res"].q_answer - self.IMO = None - self.JMO = None - self.res = "" - self.tag = "Reynolds" - self.sst_name = "" - self.sst_file = "" - self.ice_file = "" - self.kpar_file = "" - self.ostia = "" - self.out = "" - self.NX = None - self.NY = None - self.NF = None - self.latlon = "" - self.cube = "" + self.model = answerdict['OM_name'].q_answer + self.coupled = answerdict['OM_coupled'].q_answer + self.seaice_model = answerdict['OM_seaice_model'].q_answer + self.gridtype = '' + self.gridtype_abrv = '' + self.gridname = '' + self.data = '' + self.preload = '' + self.history_template = answerdict['history_template'].q_answer + self.im = None + self.jm = None + self.lm = answerdict['OM_vertical_res'].q_answer + self.imo = None + self.jmo = None + self.res = '' + self.tag = 'Reynolds' + self.sst_name = '' + self.sst_file = '' + self.ice_file = '' + self.kpar_file = '' + self.ostia = '' + self.out = '' + self.nx = None + self.ny = None + self.nf = None + self.latlon = '' + self.cube = '' self.n_procs = None - self.MOM5 = "" - self.MOM6 = "" - self.MIT = "" - self.mpt_shepherd = "" + self.MOM5 = '' + self.MOM6 = '' + self.MIT = '' + self.mpt_shepherd = '' + # for debugging purposes def print_vars(self): @@ -43,20 +44,21 @@ def print_vars(self): for var_name, var_value in all_vars.items(): print(f"{color.CYAN}{var_name}: {var_value}{color.RESET}") + def set_IMO(self): - self.IMO = f"{str(self.IM):04}" + self.imo = f"{str(self.im):04}" def set_JMO(self): - self.JMO = f"{str(self.JM):04}" + self.jmo = f"{str(self.jm):04}" def set_res(self): hres = answerdict["OM_horizontal_res"].q_answer if self.coupled == False and hres == "CS": - self.res = f"{self.gridtype_abrv}{self.IMO}x6C" + self.res = f"{self.gridtype_abrv}{self.imo}x6C" elif self.coupled == False: - self.res = f"{self.gridtype_abrv}{self.IMO}xPE{self.JMO}" + self.res = f"{self.gridtype_abrv}{self.imo}xPE{self.jmo}" elif self.coupled == True: - self.res = f"{self.gridtype_abrv}{self.IMO}x{self.gridtype_abrv}{self.JMO}" + self.res = f"{self.gridtype_abrv}{self.imo}x{self.gridtype_abrv}{self.jmo}" # Testing at NAS shows that coupled runs *require* MPI_SHEPHERD=true # to run. We believe this is due to LD_PRELOAD. For now we only set # this for coupled runs. @@ -64,110 +66,104 @@ def set_res(self): def set_gridname(self): if self.gridtype_abrv == "CF": - self.gridname = f"OC{self.IM}x{self.JM}-{self.gridtype_abrv}" - elif self.name == "MIT": - self.gridname = f"{self.gridtype_abrv}{self.IM}x{self.JM}-{self.gridtype_abrv}" + self.gridname = f"OC{self.im}x{self.jm}-{self.gridtype_abrv}" + elif self.model == "MIT": + self.gridname = f"{self.gridtype_abrv}{self.im}x{self.jm}-{self.gridtype_abrv}" else: - self.gridname = f"PE{self.IM}x{self.JM}-{self.gridtype_abrv}" + self.gridname = f"PE{self.im}x{self.jm}-{self.gridtype_abrv}" def set_kpar_file(self): - self.kpar_file = f"SEAWIFS_KPAR_mon_clim.{self.IM}x{self.JM}" + self.kpar_file = f"SEAWIFS_KPAR_mon_clim.{self.im}x{self.jm}" def coupled_hres(self): - match self.name: - case "MOM5": - self.name = "MOM" - self.preload = "env LD_PRELOAD=$GEOSDIR/lib/libmom.dylib" - mom5_warning = ( - ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n" - "You have chosen to set up a coupled model experiment with MOM5.\n" - "Be aware that such a set up is _known_ to have problems. See following for more details:\n" - "https://github.com/GEOS-ESM/MOM5/issues/19\n" - "If your intent is to help _fix_ above issue, your help is much appreciated. Thank you and good luck!\n" - "Otherwise, until this above issue is _fixed_ you are on your own with above choice.\n" - "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<" - ) - print(color.GREEN + mom5_warning + color.RESET) - case "MOM6": - self.preload = "env LD_PRELOAD=$GEOSDIR/lib/libmom6.dylib" - case "MIT": - self.gridtype_abrv = "llc" - - match self.name: - case "MIT": - match answerdict["OM_MIT_horizontal_res"].q_answer: - case "cs32": - self.JM = 32 - self.IM = self.JM * 6 - self.gridtype_abrv = "CM" - case "llc90": - self.gridtype_abrv = "LL" - if answerdict["AM_horizontal_res"].q_answer == "c48": - self.JM = 30 - self.IM = self.JM * 96 - else: - self.JM = 15 - self.IM = self.JM * 360 - case "llc1080": - self.gridtype_abrv = "LL" - self.JM = 60 - self.IM = self.JM * 2880 - case "llc2160": - self.gridtype_abrv = "LL" - self.JM = 72 - self.IM = self.JM * 7776 - case "MOM", "MOM6": - temp = answerdict["OM_MOM_horizontal_res"].q_answer.split() - self.IM = int(temp[0]) - self.JM = int(temp[1]) - self.gridtype = "TM" + if self.model == 'MOM5': + self.model = 'MOM' + self.preload = "env LD_PRELOAD=$GEOSDIR/lib/libmom.dylib" + mom5_warning = ( + ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n" + "You have chosen to set up a coupled model experiment with MOM5.\n" + "Be aware that such a set up is _known_ to have problems. See following for more details:\n" + "https://github.com/GEOS-ESM/MOM5/issues/19\n" + "If your intent is to help _fix_ above issue, your help is much appreciated. Thank you and good luck!\n" + "Otherwise, until this above issue is _fixed_ you are on your own with above choice.\n" + "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<" + ) + print(color.GREEN + mom5_warning + color.RESET) + elif self.model == 'MOM6': + self.preload = "env LD_PRELOAD=$GEOSDIR/lib/libmom6.dylib" + elif self.model == 'MIT': + self.gridtype_abrv = "llc" + + if self.model == 'MIT' and answerdict["OM_MIT_horizontal_res"].q_answer == 'cs32': + self.jm = 32 + self.im = self.jm * 6 + self.gridtype_abrv = "CM" + elif self.model == 'MIT' and answerdict["OM_MIT_horizontal_res"].q_answer == 'llc90': + self.gridtype_abrv = "LL" + if answerdict["AM_horizontal_res"].q_answer == "c48": + self.jm = 30 + self.im = self.jm * 96 + else: + self.jm = 15 + self.im = self.jm * 360 + elif self.model == 'MIT' and answerdict["OM_MIT_horizontal_res"].q_answer == 'llc1080': + self.gridtype_abrv = "LL" + self.jm = 60 + self.im = self.jm * 2880 + elif self.model == 'MIT' and answerdict["OM_MIT_horizontal_res"].q_answer == 'llc2160': + self.gridtype_abrv = "LL" + self.jm = 72 + self.im = self.jm * 7776 + elif self.model == "MOM" or self.model == "MOM6": + temp = answerdict["OM_MOM_horizontal_res"].q_answer.split() + self.im = int(temp[0]) + self.jm = int(temp[1]) + self.gridtype = "TM" def coupled_vres(self): if answerdict["AM_horizontal_res"].q_answer == "c12": - self.NX = 3 - self.NY = 2 + self.nx = 3 + self.ny = 2 else: - self.NX = 36 - self.NY = 10 + self.nx = 36 + self.ny = 10 - self.n_procs = self.NX*self.NY + self.n_procs = self.nx*self.ny - match self.name: - case "MOM", "MOM6": - self.gridtype = "Tripolar" - case "MIT": - if self.gridtype_abrv == "CM": - self.NX = 6 - self.NY = 1 - else: - match answerdict["AM_horizontal_res"].q_answer: - case "c48": - self.NX = 96 - self.NY = 1 - case "c90": - self.NX = 360 - self.NY = 1 - case "c720": - self.NX = 2880 - self.NY = 1 - case "c1440": - self.NX = 7776 - self.NY = 1 + if self.model == "MOM" or self.model == "MOM6": + self.gridtype = "Tripolar" + elif self.model == "MIT": + if self.gridtype_abrv == "CM": + self.nx = 6 + self.ny = 1 + elif answerdict["AM_horizontal_res"].q_answer == 'c48': + self.nx = 96 + self.ny = 1 + elif answerdict["AM_horizontal_res"].q_answer == 'c90': + self.nx = 360 + self.ny = 1 + elif answerdict["AM_horizontal_res"].q_answer == 'c720': + self.nx = 2880 + self.ny = 1 + elif answerdict["AM_horizontal_res"].q_answer == 'c1440': + self.nx = 7776 + self.ny = 1 + def uncoupled_hres(self): todays_date = date.today() match answerdict["OM_horizontal_res"].q_answer: case "o1": temp_res = "360 180" - self.IM, self.JM = map(int, temp_res.split()) + self.im, self.jm = map(int, temp_res.split()) self.gridtype = "LatLon" - self.NF = 1 + self.nf = 1 self.tag = "Reynolds" self.sst_name = "SST" self.out = "c" - self.sst_file = f"dataoceanfile_MERRA_sst_1971-current.{self.IM}x{self.JM}.LE" - self.ice_file = f"dataoceanfile_MERRA_fraci_1971-current.{self.IM}x{self.JM}.LE" + self.sst_file = f"dataoceanfile_MERRA_sst_1971-current.{self.im}x{self.jm}.LE" + self.ice_file = f"dataoceanfile_MERRA_fraci_1971-current.{self.im}x{self.jm}.LE" self.set_kpar_file() self.gridtype_abrv = "DE" self.latlon = "" @@ -176,14 +172,14 @@ def uncoupled_hres(self): self.data = "" case "o2": temp_res = "1440 720" - self.IM, self.JM = map(int, temp_res.split()) + self.im, self.jm = map(int, temp_res.split()) self.gridtype = "LatLon" - self.NF = 1 + self.nf = 1 self.tag = "MERRA-2" self.sst_name = "MERRA2" self.out = "e" - self.sst_file = f"dataoceanfile_MERRA2_SST.{self.IM}x{self.JM}.{todays_date.year}.data" - self.ice_file = f"dataoceanfile_MERRA2_ICE.{self.IM}x{self.JM}.{todays_date.year}.data" + self.sst_file = f"dataoceanfile_MERRA2_SST.{self.im}x{self.jm}.{todays_date.year}.data" + self.ice_file = f"dataoceanfile_MERRA2_ICE.{self.im}x{self.jm}.{todays_date.year}.data" self.set_kpar_file() self.gridtype_abrv = "DE" self.latlon = "" @@ -192,9 +188,9 @@ def uncoupled_hres(self): self.data = "" case "o3": temp_res = "2880 1440" - self.IM, self.JM = map(int, temp_res.split()) + self.im, self.jm = map(int, temp_res.split()) self.gridtype = "LatLon" - self.NF = 1 + self.nf = 1 self.tag = "Ostia" self.sst_name = "OSTIA_REYNOLDS" self.out = "f" @@ -208,15 +204,15 @@ def uncoupled_hres(self): self.data = "" case "CS": if int(answerdict["AM_horizontal_res"].q_answer[1:]) >= 90: - self.IM = int(answerdict["AM_horizontal_res"].q_answer[1:]) - self.JM = self.IM * 6 + self.im = int(answerdict["AM_horizontal_res"].q_answer[1:]) + self.jm = self.im * 6 self.gridtype = "Cubed-Sphere" - self.NF = 6 + self.nf = 6 self.tag = "Ostia" self.sst_name = "OSTIA_REYNOLDS" self.out = "f" - self.sst_file = f"dataoceanfile_OSTIA_REYNOLDS_SST.{self.IM}x{self.JM}.{todays_date.year}.data" - self.ice_file = f"dataoceanfile_OSTIA_REYNOLDS_ICE.{self.IM}x{self.JM}.{todays_date.year}.data" + self.sst_file = f"dataoceanfile_OSTIA_REYNOLDS_SST.{self.im}x{self.jm}.{todays_date.year}.data" + self.ice_file = f"dataoceanfile_OSTIA_REYNOLDS_ICE.{self.im}x{self.jm}.{todays_date.year}.data" self.set_kpar_file() self.gridtype_abrv = "CF" self.latlon = "#DELETE" @@ -232,7 +228,7 @@ def uncoupled_hres(self): self.set_IMO() self.set_JMO() self.set_res() - self.LM = 34 + self.lm = 34 self.model = f"Data Ocean ({answerdict['AM_horizontal_res'].q_answer})" self.coupled = "#DELETE" self.MOM5 = "#DELETE" diff --git a/gcmpy/scripts/process_questions.py b/gcmpy/scripts/process_questions.py old mode 100644 new mode 100755 index 0e09cb97..2f8299e4 --- a/gcmpy/scripts/process_questions.py +++ b/gcmpy/scripts/process_questions.py @@ -133,15 +133,14 @@ def history_template_valid(answerdict, i): @staticmethod def exp_dir_default(answerdict, i): - if i == "home_dir" or i == "exp_dir": - root = f"{os.environ.get('HOME')}/.{i[:3].upper()}DIRroot" + if i == "exp_dir": + root = f"{os.environ.get('HOME')}/.EXPDIRroot" if os.path.exists(root): try: - print("here") with open(root, "r") as file: - answerdict[i].q_default = f"{file.read()}/{answerdict['experiment_id'].q_answer}" + answerdict[i].q_default = f"{file.read().strip()}/{answerdict['experiment_id'].q_answer}" except Exception as e: - print(f"An error occurred while reading {color.BLUE}.HOMDIRroot{color.RESET}: {str(e)}") + print(f"An error occurred while reading {color.BLUE}.EXPDIRroot{color.RESET}: {str(e)}") elif envdict['site'] in ['NAS','NCCS']: answerdict[i].q_default = f"/{'discover/' if envdict['site'] == 'NCCS' else ''}nobackup/{os.environ.get('LOGNAME')}/{answerdict['experiment_id'].q_answer}" else: diff --git a/gcmpy/scripts/utility.py b/gcmpy/scripts/utility.py old mode 100644 new mode 100755 index 9d60c84a..64085b74 --- a/gcmpy/scripts/utility.py +++ b/gcmpy/scripts/utility.py @@ -64,3 +64,4 @@ def cleanup(): pathdict['GEOSgcm'] = os.path.dirname(pathdict['install']) pathdict['build'] = os.path.join(pathdict['GEOSgcm'], 'build') pathdict['GEOSgcm_App'] = os.path.join(pathdict['GEOSgcm'], 'src/Applications/@GEOSgcm_App') +pathdict['GEOS_Util'] = os.path.join(pathdict['GEOSgcm'], 'src/Shared/@GMAO_Shared/@GEOS_Util') diff --git a/gcmpy/yaml/atmospheric_model.yaml b/gcmpy/yaml/atmospheric_model.yaml old mode 100644 new mode 100755 index 9769d35f..38be2843 --- a/gcmpy/yaml/atmospheric_model.yaml +++ b/gcmpy/yaml/atmospheric_model.yaml @@ -37,7 +37,7 @@ io_server: type: 'confirm' prompt: 'Would you like to IOSERVER?' choices: '' - default_answer: 'True' + default_answer: True follows_up: '' diff --git a/gcmpy/yaml/directory_setup.yaml b/gcmpy/yaml/directory_setup.yaml old mode 100644 new mode 100755 diff --git a/gcmpy/yaml/exp_setup.yaml b/gcmpy/yaml/exp_setup.yaml old mode 100644 new mode 100755 diff --git a/gcmpy/yaml/gocart.yaml b/gcmpy/yaml/gocart.yaml old mode 100644 new mode 100755 diff --git a/gcmpy/yaml/land_model.yaml b/gcmpy/yaml/land_model.yaml old mode 100644 new mode 100755 index 86353a87..45fcd63d --- a/gcmpy/yaml/land_model.yaml +++ b/gcmpy/yaml/land_model.yaml @@ -1,7 +1,7 @@ LS_boundary_conditions: type: 'select' prompt: 'Select the Land Surface Boundary Conditions:' - choices: ['Icarus', 'Icarus-NLv3'] + choices: ['Icarus-NLv3', 'Icarus', 'v12'] default_answer: '' follows_up: '' @@ -10,5 +10,4 @@ LS_model: prompt: 'Select the Land Surface Model:' choices: ['Catchment', 'CatchmentCN-CLM4.0 (CN_CLM40)', 'CatchmentCN-CLM4.5 (CN_CLM45)'] default_answer: '' - follows_up: - - ['LS_boundary_conditions', 'Icarus-NLv3'] + follows_up: '' diff --git a/gcmpy/yaml/mpi_config.yaml b/gcmpy/yaml/mpi_config.yaml old mode 100644 new mode 100755 diff --git a/gcmpy/yaml/ocean_model.yaml b/gcmpy/yaml/ocean_model.yaml old mode 100644 new mode 100755 diff --git a/linkbcs.tmpl b/linkbcs.tmpl index 296be7ff..e255811e 100644 --- a/linkbcs.tmpl +++ b/linkbcs.tmpl @@ -1,43 +1,43 @@ #!/bin/csh -f -setenv BCSDIR @BCSDIR -@DATAOCEANsetenv SSTDIR @SSTDIR -@COUPLEDsetenv CPLDIR @COUPLEDIR/@OCNMODEL -setenv CHMDIR @CHMDIR -setenv BCRSLV @ATMOStag_@OCEANtag +setenv BCSDIR {{ BCSDIR }} +{{ DATAOCEAN }}setenv SSTDIR {{ SSTDIR }} +{{ COUPLED }}setenv CPLDIR {{ COUPLEDIR }}/{{ OCNMODEL }} +setenv CHMDIR {{ CHMDIR }} +setenv BCRSLV {{ ATMOStag }}_{{ OCEANtag }} -@MOM5setenv SSTDIR @COUPLEDIR/SST/MERRA2/@OGCM_IMx@OGCM_JM/v1 -@MOM6setenv SSTDIR @COUPLEDIR/SST/MERRA2/@OGCM_IMx@OGCM_JM/v1 +{{ MOM5 }}setenv SSTDIR {{ COUPLEDIR }}/SST/MERRA2/{{ OGCM_IM }}x{{ OGCM_JM }}/v1 +{{ MOM6 }}setenv SSTDIR {{ COUPLEDIR }}/SST/MERRA2/{{ OGCM_IM }}x{{ OGCM_JM }}/v1 -@COUPLED /bin/mkdir -p RESTART +{{ COUPLED }} /bin/mkdir -p RESTART /bin/mkdir -p ExtData /bin/ln -sf $CHMDIR/* ExtData -@COUPLED/bin/ln -sf $CPLDIR/@OGCM_IMx@OGCM_JM/SEAWIFS_KPAR_mon_clim.@OGCM_IMx@OGCM_JM SEAWIFS_KPAR_mon_clim.data -@COUPLED/bin/ln -sf $BCSDIR/geometry/${BCRSLV}/${BCRSLV}-Pfafstetter.til tile.data -@COUPLED/bin/ln -sf $BCSDIR/geometry/${BCRSLV}/${BCRSLV}-Pfafstetter.TRN runoff.bin -@MOM5/bin/ln -sf $BCSDIR/geometry/${BCRSLV}/MAPL_Tripolar.nc . -@MOM6/bin/ln -sf $BCSDIR/geometry/${BCRSLV}/MAPL_Tripolar.nc . -@MIT/bin/ln -sf $BCSDIR/geometry/${BCRSLV}/mit.ascii -@MOM5/bin/ln -sf $CPLDIR/@OGCM_IMx@OGCM_JM/vgrid@OGCM_LM.ascii ./vgrid.ascii -@MOM6/bin/ln -sf $CPLDIR/@OGCM_IMx@OGCM_JM/vgrid@OGCM_LM.ascii ./vgrid.ascii -@MIT/bin/ln -sf $CPLDIR/DC0360xPC0181_LL5400x15-LL.bin DC0360xPC0181_LL5400x15-LL.bin +{{ COUPLED }}/bin/ln -sf $CPLDIR/{{ OGCM_IM }}x{{ OGCM_JM }}/SEAWIFS_KPAR_mon_clim.{{ OGCM_IM }}x{{ OGCM_JM }} SEAWIFS_KPAR_mon_clim.data +{{ COUPLED }}/bin/ln -sf $BCSDIR/geometry/${BCRSLV}/${BCRSLV}-Pfafstetter.til tile.data +{{ COUPLED }}/bin/ln -sf $BCSDIR/geometry/${BCRSLV}/${BCRSLV}-Pfafstetter.TRN runoff.bin +{{ MOM5 }}/bin/ln -sf $BCSDIR/geometry/${BCRSLV}/MAPL_Tripolar.nc . +{{ MOM6 }}/bin/ln -sf $BCSDIR/geometry/${BCRSLV}/MAPL_Tripolar.nc . +{{ MIT }}/bin/ln -sf $BCSDIR/geometry/${BCRSLV}/mit.ascii +{{ MOM5 }}/bin/ln -sf $CPLDIR/{{ OGCM_IM }}x{{ OGCM_JM }}/vgrid{{ OGCM_LM }}.ascii ./vgrid.ascii +{{ MOM6 }}/bin/ln -sf $CPLDIR/{{ OGCM_IM }}x{{ OGCM_JM }}/vgrid{{ OGCM_LM }}.ascii ./vgrid.ascii +{{ MIT }}/bin/ln -sf $CPLDIR/DC0360xPC0181_LL5400x15-LL.bin DC0360xPC0181_LL5400x15-LL.bin # Precip correction #/bin/ln -s /discover/nobackup/projects/gmao/share/gmao_ops/fvInput/merra_land/precip_CPCUexcludeAfrica-CMAP_corrected_MERRA/GEOSdas-2_1_4 ExtData/PCP -@DATAOCEAN/bin/ln -sf $BCSDIR/geometry/$BCRSLV/${BCRSLV}-Pfafstetter.til tile.data -@DATAOCEANif( -e $BCSDIR/geometry/$BCRSLV/${BCRSLV}-Pfafstetter.TIL) then -@DATAOCEAN/bin/ln -sf $BCSDIR/geometry/$BCRSLV/${BCRSLV}-Pfafstetter.TIL tile.bin -@DATAOCEANendif +{{ DATAOCEAN }}/bin/ln -sf $BCSDIR/geometry/$BCRSLV/${BCRSLV}-Pfafstetter.til tile.data +{{ DATAOCEAN }}if( -e $BCSDIR/geometry/$BCRSLV/${BCRSLV}-Pfafstetter.TIL) then +{{ DATAOCEAN }}/bin/ln -sf $BCSDIR/geometry/$BCRSLV/${BCRSLV}-Pfafstetter.TIL tile.bin +{{ DATAOCEAN }}endif # DAS or REPLAY Mode (AGCM.rc: pchem_clim_years = 1-Year Climatology) # -------------------------------------------------------------------- -@OPS_SPECIES/bin/ln -sf $BCSDIR/PCHEM/pchem.species.Clim_Prod_Loss.z_721x72.nc4 species.data +{{ OPS_SPECIES }}/bin/ln -sf $BCSDIR/PCHEM/pchem.species.Clim_Prod_Loss.z_721x72.nc4 species.data # CMIP-5 Ozone Data (AGCM.rc: pchem_clim_years = 228-Years) # ---------------------------------------------------------- -@CMIP_SPECIES/bin/ln -sf $BCSDIR/PCHEM/pchem.species.CMIP-5.1870-2097.z_91x72.nc4 species.data +{{ CMIP_SPECIES }}/bin/ln -sf $BCSDIR/PCHEM/pchem.species.CMIP-5.1870-2097.z_91x72.nc4 species.data # S2S pre-industrial with prod/loss of stratospheric water vapor # (AGCM.rc: pchem_clim_years = 3-Years, and H2O_ProdLoss: 1 ) @@ -46,34 +46,34 @@ setenv BCRSLV @ATMOStag_@OCEANtag # MERRA-2 Ozone Data (AGCM.rc: pchem_clim_years = 39-Years) # ---------------------------------------------------------- -@MERRA2OX_SPECIES/bin/ln -sf $BCSDIR/PCHEM/pchem.species.CMIP-5.MERRA2OX.197902-201706.z_91x72.nc4 species.data +{{ MERRA2OX_SPECIES }}/bin/ln -sf $BCSDIR/PCHEM/pchem.species.CMIP-5.MERRA2OX.197902-201706.z_91x72.nc4 species.data -/bin/ln -sf $BCSDIR/land/$BCRSLV/visdf_@AGCM_IMx@AGCM_JM.dat visdf.dat -/bin/ln -sf $BCSDIR/land/$BCRSLV/nirdf_@AGCM_IMx@AGCM_JM.dat nirdf.dat -/bin/ln -sf $BCSDIR/land/$BCRSLV/vegdyn_@AGCM_IMx@AGCM_JM.dat vegdyn.data -/bin/ln -sf $BCSDIR/land/$BCRSLV/lai_clim_@AGCM_IMx@AGCM_JM.data lai.data -/bin/ln -sf $BCSDIR/land/$BCRSLV/green_clim_@AGCM_IMx@AGCM_JM.data green.data -/bin/ln -sf $BCSDIR/land/$BCRSLV/ndvi_clim_@AGCM_IMx@AGCM_JM.data ndvi.data +/bin/ln -sf $BCSDIR/land/$BCRSLV/visdf_{{ AGCM_IM }}x{{ AGCM_JM }}.dat visdf.dat +/bin/ln -sf $BCSDIR/land/$BCRSLV/nirdf_{{ AGCM_IM }}x{{ AGCM_JM }}.dat nirdf.dat +/bin/ln -sf $BCSDIR/land/$BCRSLV/vegdyn_{{ AGCM_IM }}x{{ AGCM_JM }}.dat vegdyn.data +/bin/ln -sf $BCSDIR/land/$BCRSLV/lai_clim_{{ AGCM_IM }}x{{ AGCM_JM }}.data lai.data +/bin/ln -sf $BCSDIR/land/$BCRSLV/green_clim_{{ AGCM_IM }}x{{ AGCM_JM }}.data green.data +/bin/ln -sf $BCSDIR/land/$BCRSLV/ndvi_clim_{{ AGCM_IM }}x{{ AGCM_JM }}.data ndvi.data ->>>GCMRUN_CATCHCN<<>>GCMRUN_CATCHCN<<>>GCMRUN_CATCHCN<<