From 41ba1cb73c39c680a794a99e791c0ad21babfc1b Mon Sep 17 00:00:00 2001 From: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Date: Wed, 24 Jan 2024 14:59:37 -0500 Subject: [PATCH] Fix the logging (#863) The use of the standard Python logging has, for some reason, has not been working: nothing apparently gets sent to stdout. This PR replaces the Python logging with wxflow Logger. Some configuration previously used is perhaps lost, but this is a 90% solution. Resolves https://github.com/NOAA-EMC/GDASApp/issues/853 --- scripts/exgdas_global_marine_analysis_post.py | 9 ++- scripts/exgdas_global_marine_analysis_prep.py | 58 +++++++++---------- scripts/exglobal_prep_ocean_obs.py | 21 +++---- 3 files changed, 41 insertions(+), 47 deletions(-) diff --git a/scripts/exgdas_global_marine_analysis_post.py b/scripts/exgdas_global_marine_analysis_post.py index 2660acf8b..6e30f7cad 100755 --- a/scripts/exgdas_global_marine_analysis_post.py +++ b/scripts/exgdas_global_marine_analysis_post.py @@ -21,9 +21,10 @@ import os import glob import shutil -import logging from datetime import datetime, timedelta -from wxflow import FileHandler +from wxflow import FileHandler, Logger + +logger = Logger() # TODO: Move this somewhere else? @@ -35,9 +36,7 @@ def list_all_files(dir_in, dir_out, wc='*', fh_list=[]): return fh_list -# set up logger -logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') -logging.info(f"---------------- Copy from RUNDIR to COMOUT") +logger.info(f"---------------- Copy from RUNDIR to COMOUT") com_ocean_analysis = os.getenv('COM_OCEAN_ANALYSIS') com_ice_restart = os.getenv('COM_ICE_RESTART') diff --git a/scripts/exgdas_global_marine_analysis_prep.py b/scripts/exgdas_global_marine_analysis_prep.py index 4829ffcf4..6870ab0cd 100755 --- a/scripts/exgdas_global_marine_analysis_prep.py +++ b/scripts/exgdas_global_marine_analysis_prep.py @@ -26,17 +26,15 @@ import dateutil.parser as dparser import f90nml import shutil -import logging import subprocess from datetime import datetime, timedelta import pytz from netCDF4 import Dataset import xarray as xr import numpy as np -from wxflow import (AttrDict, Template, TemplateConstants, YAMLFile, FileHandler) +from wxflow import (AttrDict, Logger, Template, TemplateConstants, YAMLFile, FileHandler) -# set up logger -logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') +logger = Logger() # get absolute path of ush/ directory either from env or relative to this file my_dir = os.path.dirname(__file__) @@ -91,7 +89,7 @@ def cice_hist2fms(input_filename, output_filename): ds = xr.open_dataset(input_filename_real) if 'aicen' in ds.variables and 'hicen' in ds.variables and 'hsnon' in ds.variables: - logging.info(f"*** Already reformatted, skipping.") + logger.info(f"*** Already reformatted, skipping.") return # rename the dimensions to xaxis_1 and yaxis_1 @@ -114,7 +112,7 @@ def test_hist_date(histfile, ref_date): ncf = Dataset(histfile, 'r') hist_date = dparser.parse(ncf.variables['time'].units, fuzzy=True) + timedelta(hours=int(ncf.variables['time'][0])) ncf.close() - logging.info(f"*** history file date: {hist_date} expected date: {ref_date}") + logger.info(f"*** history file date: {hist_date} expected date: {ref_date}") assert hist_date == ref_date, 'Inconsistent bkg date' @@ -225,7 +223,7 @@ def find_clim_ens(input_date): ################################################################################ # runtime environment variables, create directories -logging.info(f"---------------- Setup runtime environement") +logger.info(f"---------------- Setup runtime environement") comin_obs = os.getenv('COMIN_OBS') anl_dir = os.getenv('DATA') @@ -259,7 +257,7 @@ def find_clim_ens(input_date): ################################################################################ # fetch observations -logging.info(f"---------------- Stage observations") +logger.info(f"---------------- Stage observations") # create config dict from runtime env envconfig = {'window_begin': f"{window_begin.strftime('%Y-%m-%dT%H:%M:%SZ')}", @@ -278,29 +276,29 @@ def find_clim_ens(input_date): # copy obs from COM_OBS to DATA/obs for obs_file in obs_files: - logging.info(f"******* {obs_file}") + logger.info(f"******* {obs_file}") obs_src = os.path.join(os.getenv('COM_OBS'), obs_file) obs_dst = os.path.join(os.path.realpath(obs_in), obs_file) - logging.info(f"******* {obs_src}") + logger.info(f"******* {obs_src}") if os.path.exists(obs_src): - logging.info(f"******* fetching {obs_file}") + logger.info(f"******* fetching {obs_file}") obs_list.append([obs_src, obs_dst]) else: - logging.info(f"******* {obs_file} is not in the database") + logger.info(f"******* {obs_file} is not in the database") FileHandler({'copy': obs_list}).sync() ################################################################################ # stage static files -logging.info(f"---------------- Stage static files") +logger.info(f"---------------- Stage static files") ufsda.stage.soca_fix(stage_cfg) ################################################################################ # stage ensemble members if dohybvar: - logging.info("---------------- Stage ensemble members") + logger.info("---------------- Stage ensemble members") nmem_ens = int(os.getenv('NMEM_ENS')) longname = {'ocn': 'ocean', 'ice': 'ice'} ens_member_list = [] @@ -324,7 +322,7 @@ def find_clim_ens(input_date): cice_fname = os.path.realpath(os.path.join(static_ens, "ice."+str(mem)+".nc")) cice_hist2fms(cice_fname, cice_fname) else: - logging.info("---------------- Stage offline ensemble members") + logger.info("---------------- Stage offline ensemble members") ens_member_list = [] clim_ens_dir = find_clim_ens(pytz.utc.localize(window_begin, is_dst=None)) nmem_ens = len(glob.glob(os.path.abspath(os.path.join(clim_ens_dir, 'ocn.*.nc')))) @@ -340,12 +338,12 @@ def find_clim_ens(input_date): ################################################################################ # prepare JEDI yamls -logging.info(f"---------------- Generate JEDI yaml files") +logger.info(f"---------------- Generate JEDI yaml files") ################################################################################ # copy yaml for grid generation -logging.info(f"---------------- generate gridgen.yaml") +logger.info(f"---------------- generate gridgen.yaml") gridgen_yaml_src = os.path.realpath(os.path.join(gdas_home, 'parm', 'soca', 'gridgen', 'gridgen.yaml')) gridgen_yaml_dst = os.path.realpath(os.path.join(stage_cfg['stage_dir'], 'gridgen.yaml')) FileHandler({'copy': [[gridgen_yaml_src, gridgen_yaml_dst]]}).sync() @@ -354,14 +352,14 @@ def find_clim_ens(input_date): # generate the YAML file for the post processing of the clim. ens. B berror_yaml_dir = os.path.join(gdas_home, 'parm', 'soca', 'berror') -logging.info(f"---------------- generate soca_ensb.yaml") +logger.info(f"---------------- generate soca_ensb.yaml") berr_yaml = os.path.join(anl_dir, 'soca_ensb.yaml') berr_yaml_template = os.path.join(berror_yaml_dir, 'soca_ensb.yaml') config = YAMLFile(path=berr_yaml_template) config = Template.substitute_structure(config, TemplateConstants.DOUBLE_CURLY_BRACES, envconfig.get) config.save(berr_yaml) -logging.info(f"---------------- generate soca_ensweights.yaml") +logger.info(f"---------------- generate soca_ensweights.yaml") berr_yaml = os.path.join(anl_dir, 'soca_ensweights.yaml') berr_yaml_template = os.path.join(berror_yaml_dir, 'soca_ensweights.yaml') config = YAMLFile(path=berr_yaml_template) @@ -371,7 +369,7 @@ def find_clim_ens(input_date): ################################################################################ # copy yaml for localization length scales -logging.info(f"---------------- generate soca_setlocscales.yaml") +logger.info(f"---------------- generate soca_setlocscales.yaml") locscales_yaml_src = os.path.join(gdas_home, 'parm', 'soca', 'berror', 'soca_setlocscales.yaml') locscales_yaml_dst = os.path.join(stage_cfg['stage_dir'], 'soca_setlocscales.yaml') FileHandler({'copy': [[locscales_yaml_src, locscales_yaml_dst]]}).sync() @@ -379,7 +377,7 @@ def find_clim_ens(input_date): ################################################################################ # copy yaml for correlation length scales -logging.info(f"---------------- generate soca_setcorscales.yaml") +logger.info(f"---------------- generate soca_setcorscales.yaml") corscales_yaml_src = os.path.join(gdas_home, 'parm', 'soca', 'berror', 'soca_setcorscales.yaml') corscales_yaml_dst = os.path.join(stage_cfg['stage_dir'], 'soca_setcorscales.yaml') FileHandler({'copy': [[corscales_yaml_src, corscales_yaml_dst]]}).sync() @@ -387,7 +385,7 @@ def find_clim_ens(input_date): ################################################################################ # copy yaml for diffusion initialization -logging.info(f"---------------- generate soca_parameters_diffusion_hz.yaml") +logger.info(f"---------------- generate soca_parameters_diffusion_hz.yaml") diffu_hz_yaml = os.path.join(anl_dir, 'soca_parameters_diffusion_hz.yaml') diffu_hz_yaml_dir = os.path.join(gdas_home, 'parm', 'soca', 'berror') diffu_hz_yaml_template = os.path.join(berror_yaml_dir, 'soca_parameters_diffusion_hz.yaml') @@ -395,7 +393,7 @@ def find_clim_ens(input_date): config = Template.substitute_structure(config, TemplateConstants.DOUBLE_CURLY_BRACES, envconfig.get) config.save(diffu_hz_yaml) -logging.info(f"---------------- generate soca_parameters_diffusion_vt.yaml") +logger.info(f"---------------- generate soca_parameters_diffusion_vt.yaml") diffu_vt_yaml = os.path.join(anl_dir, 'soca_parameters_diffusion_vt.yaml') diffu_vt_yaml_dir = os.path.join(gdas_home, 'parm', 'soca', 'berror') diffu_vt_yaml_template = os.path.join(berror_yaml_dir, 'soca_parameters_diffusion_vt.yaml') @@ -406,7 +404,7 @@ def find_clim_ens(input_date): ################################################################################ # generate yaml for bump/nicas (used for correlation and/or localization) -logging.info(f"---------------- generate BUMP/NICAS localization yamls") +logger.info(f"---------------- generate BUMP/NICAS localization yamls") # localization bump yaml bumpdir = 'bump' ufsda.disk_utils.mkdir(os.path.join(anl_dir, bumpdir)) @@ -424,7 +422,7 @@ def find_clim_ens(input_date): ################################################################################ # generate yaml for soca_var -logging.info(f"---------------- generate var.yaml") +logger.info(f"---------------- generate var.yaml") var_yaml = os.path.join(anl_dir, 'var.yaml') var_yaml_template = os.path.join(gdas_home, 'parm', @@ -440,13 +438,13 @@ def find_clim_ens(input_date): # select the SABER BLOCKS to use if 'SABER_BLOCKS_YAML' in os.environ and os.environ['SABER_BLOCKS_YAML']: saber_blocks_yaml = os.getenv('SABER_BLOCKS_YAML') - logging.info(f"using non-default SABER blocks yaml: {saber_blocks_yaml}") + logger.info(f"using non-default SABER blocks yaml: {saber_blocks_yaml}") else: - logging.info(f"using default SABER blocks yaml") + logger.info(f"using default SABER blocks yaml") os.environ['SABER_BLOCKS_YAML'] = os.path.join(gdas_home, 'parm', 'soca', 'berror', 'saber_blocks.yaml') # substitute templated variables in the var config -logging.info(f"{config}") +logger.info(f"{config}") varconfig = YAMLFile(path=var_yaml_template) varconfig = Template.substitute_structure(varconfig, TemplateConstants.DOUBLE_CURLY_BRACES, config.get) varconfig = Template.substitute_structure(varconfig, TemplateConstants.DOLLAR_PARENTHESES, config.get) @@ -460,7 +458,7 @@ def find_clim_ens(input_date): # Prepare the yamls for the "checkpoint" jjob # prepare yaml and CICE restart for soca to cice change of variable -logging.info(f"---------------- generate soca to cice yamls") +logger.info(f"---------------- generate soca to cice yamls") # make a copy of the CICE6 restart rst_date = fcst_begin.strftime('%Y%m%d.%H%M%S') ice_rst = os.path.join(os.getenv('COM_ICE_RESTART_PREV'), f'{rst_date}.cice_model.res.nc') @@ -483,7 +481,7 @@ def find_clim_ens(input_date): outyaml.save(varchgyaml) # prepare yaml for soca to MOM6 IAU increment -logging.info(f"---------------- generate soca to MOM6 IAU yaml") +logger.info(f"---------------- generate soca to MOM6 IAU yaml") socaincr2mom6_yaml = os.path.join(anl_dir, 'socaincr2mom6.yaml') socaincr2mom6_yaml_template = os.path.join(gdas_home, 'parm', 'soca', 'variational', 'socaincr2mom6.yaml') s2mconfig = YAMLFile(path=socaincr2mom6_yaml_template) diff --git a/scripts/exglobal_prep_ocean_obs.py b/scripts/exglobal_prep_ocean_obs.py index a8ab59d6b..d4326f0f6 100755 --- a/scripts/exglobal_prep_ocean_obs.py +++ b/scripts/exglobal_prep_ocean_obs.py @@ -2,15 +2,12 @@ # exglobal_prep_ocean_obs.py # Pepares observations for marine DA from datetime import datetime, timedelta -import logging import os from soca import prep_marine_obs import subprocess -from wxflow import YAMLFile, save_as_yaml, FileHandler +from wxflow import YAMLFile, save_as_yaml, FileHandler, Logger -# TODO (AFE) figure out why logger is not logging -# set up logger -logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') +logger = Logger() cyc = os.getenv('cyc') PDY = os.getenv('PDY') @@ -35,7 +32,7 @@ if os.path.exists(OBSPREP_YAML): obsprepConfig = YAMLFile(OBSPREP_YAML) else: - print(f"CRITICAL: OBSPREP_YAML file {OBSPREP_YAML} does not exist") + logger.critical(f"OBSPREP_YAML file {OBSPREP_YAML} does not exist") raise FileNotFoundError filesToSave = [] @@ -47,10 +44,10 @@ try: obsSpaceName = observer['obs space']['name'] - print(f"obsSpaceName: {obsSpaceName}") + logger.info(f"obsSpaceName: {obsSpaceName}") except KeyError: - print(f"observer: {observer}") - print("WARNING: Ill-formed observer yaml file, skipping") + logger.warning(f"observer: {observer}") + logger.warning("Ill-formed observer yaml file, skipping") continue # to next observer # ...look through the observations in OBSPREP_YAML... @@ -62,13 +59,13 @@ # ...for a matching name, and process the observation source if obsprepSpaceName == obsSpaceName: - print(f"obsprepSpaceName: {obsSpaceName}") + logger.info(f"obsprepSpaceName: {obsSpaceName}") # fetch the obs files from DMPDIR to RUNDIR matchingFiles = prep_marine_obs.obs_fetch(obsprepSpace) if not matchingFiles: - print("WARNING: No files found for obs source , skipping") + logger.warning("No files found for obs source , skipping") break # to next observation source in OBS_YAML obsprepSpace['input files'] = matchingFiles @@ -87,7 +84,7 @@ filesToSave.append([iodaYamlFilename, os.path.join(COMOUT_OBS, iodaYamlFilename)]) except TypeError: - print("CRITICAL: Ill-formed OBS_YAML or OBSPREP_YAML file, exiting") + logger.critical("Ill-formed OBS_YAML or OBSPREP_YAML file, exiting") raise if not os.path.exists(COMOUT_OBS):