From c740e562e9cac22dcc53feeb93861475838119bc Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Thu, 15 Feb 2024 15:22:31 -0600 Subject: [PATCH 01/24] gocart inst_aod fix --- ush/forecast_postdet.sh | 2 +- ush/forecast_predet.sh | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 052e549251..5a8496f560 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -931,7 +931,7 @@ GOCART_rc() { GOCART_postdet() { echo "SUB ${FUNCNAME[0]}: Linking output data for GOCART" - for fhr in ${FV3_OUTPUT_FH}; do + for fhr in ${GOCART_OUTPUT_FH}; do local vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) # Temporarily delete existing files due to noclobber in GOCART diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 1aaa1a4b9d..7c6888e9a1 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -265,4 +265,6 @@ GOCART_predet(){ if [[ ! -d "${COM_CHEM_HISTORY}" ]]; then mkdir -p "${COM_CHEM_HISTORY}"; fi + GOCART_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "6" "${FHMAX}") + # TODO: AERO_HISTORY.rc has hardwired output frequency to 6 hours } From a2a9a2c5572ff6c4855fa9901f54aacd2f638d11 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Fri, 16 Feb 2024 08:00:45 -0600 Subject: [PATCH 02/24] Missing GOCART spot --- ush/forecast_postdet.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 5a8496f560..37bc2752a7 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -952,7 +952,7 @@ GOCART_out() { # TO DO: this should be linked but there were issues where gocart was crashing if it was linked local fhr local vdate - for fhr in ${FV3_OUTPUT_FH}; do + for fhr in ${GOCART_OUTPUT_FH}; do if (( fhr == 0 )); then continue; fi vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) ${NCP} "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ From d371fcaf602a1c4708896e0b079bf221a2c267c9 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Fri, 16 Feb 2024 09:47:49 -0600 Subject: [PATCH 03/24] Update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index b37680be78..deac283e33 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit b37680be78560343b940352b9b2c6727190a642c +Subproject commit deac283e3337ed982f2dbb3ec7267f7db7e26800 From 24146ff2096b2870afc29a5728ebc482dd61b924 Mon Sep 17 00:00:00 2001 From: Cory Martin Date: Fri, 16 Feb 2024 09:56:18 -0600 Subject: [PATCH 04/24] modify snow DA test to include aero DA --- ci/cases/pr/{C96_atmsnowDA.yaml => C96_atm-aero-snowDA.yaml} | 4 ++-- ...owDA_defaults_ci.yaml => atm-aero-snowDA_defaults_ci.yaml} | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename ci/cases/pr/{C96_atmsnowDA.yaml => C96_atm-aero-snowDA.yaml} (81%) rename ci/cases/yamls/{atmsnowDA_defaults_ci.yaml => atm-aero-snowDA_defaults_ci.yaml} (100%) diff --git a/ci/cases/pr/C96_atmsnowDA.yaml b/ci/cases/pr/C96_atm-aero-snowDA.yaml similarity index 81% rename from ci/cases/pr/C96_atmsnowDA.yaml rename to ci/cases/pr/C96_atm-aero-snowDA.yaml index 35fcc10fb2..4266f0578a 100644 --- a/ci/cases/pr/C96_atmsnowDA.yaml +++ b/ci/cases/pr/C96_atm-aero-snowDA.yaml @@ -4,7 +4,7 @@ experiment: arguments: pslot: {{ 'pslot' | getenv }} - app: ATM + app: ATMA resdetatmos: 96 comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR @@ -14,7 +14,7 @@ arguments: nens: 0 gfs_cyc: 1 start: cold - yaml: {{ HOMEgfs }}/ci/cases/yamls/atmsnowDA_defaults_ci.yaml + yaml: {{ HOMEgfs }}/ci/cases/yamls/atm-aero-snowDA_defaults_ci.yaml skip_ci_on_hosts: - orion diff --git a/ci/cases/yamls/atmsnowDA_defaults_ci.yaml b/ci/cases/yamls/atm-aero-snowDA_defaults_ci.yaml similarity index 100% rename from ci/cases/yamls/atmsnowDA_defaults_ci.yaml rename to ci/cases/yamls/atm-aero-snowDA_defaults_ci.yaml From f7a13039d8893b4f38eaa07d15557f27e7b8767d Mon Sep 17 00:00:00 2001 From: CoryMartin-NOAA Date: Thu, 29 Feb 2024 21:33:38 +0000 Subject: [PATCH 05/24] random bugfix --- ush/python/pygfs/task/aero_analysis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index 0e515a0df4..b6152ea945 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -46,7 +46,7 @@ def __init__(self, config): 'npz_anl': self.config['LEVS'] - 1, 'AERO_WINDOW_BEGIN': _window_begin, 'AERO_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", - 'aero_bkg_fhr': map(int, self.config['aero_bkg_times'].split(',')), + 'aero_bkg_fhr': map(int, str(self.config['aero_bkg_times']).split(',')), 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", From 8be840a6f10e82aaa2cb2985d041c995efd453b4 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 8 Mar 2024 17:21:32 -0500 Subject: [PATCH 06/24] parse jediyaml only once --- ush/python/pygfs/task/analysis.py | 91 ++++++++++++++++++++------ ush/python/pygfs/task/snow_analysis.py | 7 +- 2 files changed, 75 insertions(+), 23 deletions(-) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 5709bc130e..ceabffe558 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -4,6 +4,7 @@ import glob import tarfile from logging import getLogger +from pprint import pformat from netCDF4 import Dataset from typing import List, Dict, Any, Union @@ -30,6 +31,10 @@ def __init__(self, config: Dict[str, Any]) -> None: def initialize(self) -> None: super().initialize() + + # all JEDI analyses need a JEDI config + self.task_config.jedi_config = self.get_jedi_config() + # all analyses need to stage observations obs_dict = self.get_obs_dict() FileHandler(obs_dict).sync() @@ -41,13 +46,33 @@ def initialize(self) -> None: # link jedi executable to run directory self.link_jediexe() + @logit(logger) + def get_jedi_config(self) -> Dict[str, Any]: + """Compile a dictionary of JEDI configuration from JEDIYAML template file + + Parameters + ---------- + + Returns + ---------- + jedi_config : Dict + a dictionary containing the fully rendered JEDI yaml configuration + """ + + # generate JEDI YAML file + logger.info(f"Generate JEDI YAML config: {self.task_config.jedi_yaml}") + jedi_config = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) + logger.debug(f"JEDI config:\n{pformat(jedi_config)}") + + return jedi_config + @logit(logger) def get_obs_dict(self) -> Dict[str, Any]: """Compile a dictionary of observation files to copy - This method uses the OBS_LIST configuration variable to generate a dictionary - from a list of YAML files that specify what observation files are to be - copied to the run directory from the observation input directory + This method extracts 'observers' from the JEDI yaml and from that list, extracts a list + observation files that are to be copied to the run directory + from the observation input directory Parameters ---------- @@ -57,13 +82,13 @@ def get_obs_dict(self) -> Dict[str, Any]: obs_dict: Dict a dictionary containing the list of observation files to copy for FileHandler """ - logger.debug(f"OBS_LIST: {self.task_config['OBS_LIST']}") - obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - logger.debug(f"obs_list_config: {obs_list_config}") - # get observers from master dictionary - observers = obs_list_config['observers'] + + logger.info(f"Extracting a list of observation files from {self.task_config.JEDIYAML}") + observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') + logger.debug(f"observations:\n{pformat(observations)}") + copylist = [] - for ob in observers: + for ob in observations['observers']: obfile = ob['obs space']['obsdatain']['engine']['obsfile'] basename = os.path.basename(obfile) copylist.append([os.path.join(self.task_config['COM_OBS'], basename), obfile]) @@ -77,9 +102,9 @@ def get_obs_dict(self) -> Dict[str, Any]: def get_bias_dict(self) -> Dict[str, Any]: """Compile a dictionary of observation files to copy - This method uses the OBS_LIST configuration variable to generate a dictionary - from a list of YAML files that specify what observation bias correction files - are to be copied to the run directory from the observation input directory + This method extracts 'observers' from the JEDI yaml and from that list, extracts a list + observation bias correction files that are to be copied to the run directory + from the observation input directory Parameters ---------- @@ -89,13 +114,13 @@ def get_bias_dict(self) -> Dict[str, Any]: bias_dict: Dict a dictionary containing the list of observation bias files to copy for FileHandler """ - logger.debug(f"OBS_LIST: {self.task_config['OBS_LIST']}") - obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - logger.debug(f"obs_list_config: {obs_list_config}") - # get observers from master dictionary - observers = obs_list_config['observers'] + + logger.info(f"Extracting a list of bias correction files from {self.task_config.JEDIYAML}") + observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') + logger.debug(f"observations:\n{pformat(observations)}") + copylist = [] - for ob in observers: + for ob in observations['observers']: if 'obs bias' in ob.keys(): obfile = ob['obs bias']['input file'] obdir = os.path.dirname(obfile) @@ -103,7 +128,7 @@ def get_bias_dict(self) -> Dict[str, Any]: prefix = '.'.join(basename.split('.')[:-2]) for file in ['satbias.nc', 'satbias_cov.nc', 'tlapse.txt']: bfile = f"{prefix}.{file}" - copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) + copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) # TODO: Why is this specific to ATMOS? bias_dict = { 'mkdir': [os.path.join(self.runtime_config.DATA, 'bc')], @@ -328,3 +353,31 @@ def tgz_diags(statfile: str, diagdir: str) -> None: # Add diag files to tarball for diagfile in diags: tgz.add(diagfile, arcname=os.path.basename(diagfile)) + + +@logit(logger) +def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any: + """ + Recursively search through a nested dictionary and return the value for the target key. + Parameters + ---------- + nested_dict : Dict + Dictionary to search + target_key : str + Key to search for + + Returns + ------- + Any + Value of the key + + Raises + ------ + KeyError + If key is not found in dictionary + + TODO: move this to a utility module so it can be used elsewhere + """ + if not isinstance(nested_dict, dict): + return None + return nested_dict.get(target_key) or next((find_value_in_nested_dict(value, target_key) for value in nested_dict.values() if isinstance(value, dict)), None) diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 9eee8314c3..43e9d6fe3e 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -269,11 +269,10 @@ def initialize(self) -> None: logger.info("Staging ensemble backgrounds") FileHandler(self.get_ens_bkg_dict(localconf)).sync() - # generate letkfoi YAML file - logger.info(f"Generate JEDI LETKF YAML file: {self.task_config.jedi_yaml}") - letkfoi_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - save_as_yaml(letkfoi_yaml, self.task_config.jedi_yaml) + # Write out letkfoi YAML file + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) logger.info(f"Wrote letkfoi YAML to: {self.task_config.jedi_yaml}") + # need output dir for diags and anl logger.info("Create empty output [anl, diags] directories to receive output from executable") newdirs = [ From 03fc903318b0cbbd97fe8439ad12b700be7ddb27 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 8 Mar 2024 17:32:01 -0500 Subject: [PATCH 07/24] fix pynorms --- ush/python/pygfs/task/analysis.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index ceabffe558..c7d564647e 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -128,7 +128,8 @@ def get_bias_dict(self) -> Dict[str, Any]: prefix = '.'.join(basename.split('.')[:-2]) for file in ['satbias.nc', 'satbias_cov.nc', 'tlapse.txt']: bfile = f"{prefix}.{file}" - copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) # TODO: Why is this specific to ATMOS? + copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) + # TODO: Why is this specific to ATMOS? bias_dict = { 'mkdir': [os.path.join(self.runtime_config.DATA, 'bc')], @@ -356,14 +357,14 @@ def tgz_diags(statfile: str, diagdir: str) -> None: @logit(logger) -def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any: - """ +def find_value_in_nested_dict(nested_dict: Dict, key: str) -> Any: + """ Recursively search through a nested dictionary and return the value for the target key. Parameters ---------- nested_dict : Dict Dictionary to search - target_key : str + key : str Key to search for Returns @@ -380,4 +381,4 @@ def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any: """ if not isinstance(nested_dict, dict): return None - return nested_dict.get(target_key) or next((find_value_in_nested_dict(value, target_key) for value in nested_dict.values() if isinstance(value, dict)), None) + return nested_dict.get(key) or next((find_value_in_nested_dict(vv, key) for vv in nested_dict.values() if isinstance(vv, dict)), None) From ef89d91cf5f2b2b017598dc59611f98720961729 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Mon, 11 Mar 2024 09:06:24 -0400 Subject: [PATCH 08/24] Update ush/python/pygfs/task/analysis.py Co-authored-by: Cory Martin --- ush/python/pygfs/task/analysis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index c7d564647e..186eb3125e 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -70,7 +70,7 @@ def get_jedi_config(self) -> Dict[str, Any]: def get_obs_dict(self) -> Dict[str, Any]: """Compile a dictionary of observation files to copy - This method extracts 'observers' from the JEDI yaml and from that list, extracts a list + This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of observation files that are to be copied to the run directory from the observation input directory From 7b4d7e7e58e044fa12a1b0c35d197ebf2f1b8ecc Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Mon, 11 Mar 2024 09:06:30 -0400 Subject: [PATCH 09/24] Update ush/python/pygfs/task/analysis.py Co-authored-by: Cory Martin --- ush/python/pygfs/task/analysis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 186eb3125e..01d96441bd 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -102,7 +102,7 @@ def get_obs_dict(self) -> Dict[str, Any]: def get_bias_dict(self) -> Dict[str, Any]: """Compile a dictionary of observation files to copy - This method extracts 'observers' from the JEDI yaml and from that list, extracts a list + This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of observation bias correction files that are to be copied to the run directory from the observation input directory From 3387dd289c4268776a845f8c12df09e76ec015ef Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Mon, 11 Mar 2024 09:55:42 -0400 Subject: [PATCH 10/24] Update ush/python/pygfs/task/analysis.py --- ush/python/pygfs/task/analysis.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 01d96441bd..fd5b682af3 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -104,7 +104,8 @@ def get_bias_dict(self) -> Dict[str, Any]: This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of observation bias correction files that are to be copied to the run directory - from the observation input directory + from the component directory. + TODO: COM_ATMOS_ANALYSIS_PREV is hardwired here and this method is not appropriate in `analysis.py` and should be implemented in the component where this is applicable. Parameters ---------- From e9f78e890f4b1e7a65030d39841457b54db97786 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Mon, 11 Mar 2024 09:57:46 -0400 Subject: [PATCH 11/24] Update ush/python/pygfs/task/analysis.py --- ush/python/pygfs/task/analysis.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index fd5b682af3..87d6cae74f 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -105,7 +105,8 @@ def get_bias_dict(self) -> Dict[str, Any]: This method extracts 'observers' from the JEDI yaml and from that list, extracts a list of observation bias correction files that are to be copied to the run directory from the component directory. - TODO: COM_ATMOS_ANALYSIS_PREV is hardwired here and this method is not appropriate in `analysis.py` and should be implemented in the component where this is applicable. + TODO: COM_ATMOS_ANALYSIS_PREV is hardwired here and this method is not appropriate in + `analysis.py` and should be implemented in the component where this is applicable. Parameters ---------- From e2c9a512444ff805fd31252b244b42a0a6cc4200 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Mon, 11 Mar 2024 13:10:18 -0400 Subject: [PATCH 12/24] throw a KeyError when looking for value in a nested dict if key is not found --- ush/python/pygfs/task/analysis.py | 43 +++++++++++++++++++++++++++++-- 1 file changed, 41 insertions(+), 2 deletions(-) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 87d6cae74f..2886aef437 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -380,7 +380,46 @@ def find_value_in_nested_dict(nested_dict: Dict, key: str) -> Any: If key is not found in dictionary TODO: move this to a utility module so it can be used elsewhere + + # Example usage: +nested_dict = { + 'a': { + 'b': { + 'c': 1, + 'd': { + 'e': 2, + 'f': 3 + } + }, + 'g': 4 + }, + 'h': { + 'i': 5 + }, + 'j': { + 'k': 6 + } +} + +user_key = input("Enter the key to search for: ") +result = find_value_in_nested_dict(nested_dict, user_key) """ + if not isinstance(nested_dict, dict): - return None - return nested_dict.get(key) or next((find_value_in_nested_dict(vv, key) for vv in nested_dict.values() if isinstance(vv, dict)), None) + raise TypeError(f"Input is not of type(dict)") + + result = nested_dict.get(target_key) + if result is not None: + return result + + for value in nested_dict.values(): + if isinstance(value, dict): + try: + result = find_value_in_nested_dict(value, target_key) + if result is not None: + return result + except KeyError: + pass + + raise KeyError(f"Key '{target_key}' not found in the nested dictionary") + From fb531506e64331437876d6206f872c9abb88ef12 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Mon, 11 Mar 2024 13:12:18 -0400 Subject: [PATCH 13/24] fix pynorms --- ush/python/pygfs/task/analysis.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 2886aef437..5b46cbad46 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -422,4 +422,3 @@ def find_value_in_nested_dict(nested_dict: Dict, key: str) -> Any: pass raise KeyError(f"Key '{target_key}' not found in the nested dictionary") - From 1161b77886e6aa63109c3e0e12081e079d229fc9 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Tue, 12 Mar 2024 16:25:11 -0400 Subject: [PATCH 14/24] Update ush/python/pygfs/task/analysis.py --- ush/python/pygfs/task/analysis.py | 36 +++++++++++++++---------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 5b46cbad46..d762a17d68 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -382,27 +382,27 @@ def find_value_in_nested_dict(nested_dict: Dict, key: str) -> Any: TODO: move this to a utility module so it can be used elsewhere # Example usage: -nested_dict = { - 'a': { - 'b': { - 'c': 1, - 'd': { - 'e': 2, - 'f': 3 - } + nested_dict = { + 'a': { + 'b': { + 'c': 1, + 'd': { + 'e': 2, + 'f': 3 + } + }, + 'g': 4 }, - 'g': 4 - }, - 'h': { - 'i': 5 - }, - 'j': { - 'k': 6 + 'h': { + 'i': 5 + }, + 'j': { + 'k': 6 + } } -} -user_key = input("Enter the key to search for: ") -result = find_value_in_nested_dict(nested_dict, user_key) + user_key = input("Enter the key to search for: ") + result = find_value_in_nested_dict(nested_dict, user_key) """ if not isinstance(nested_dict, dict): From 40847bfc8ad71ff005a924fe8e27e5819ec4bfbf Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Tue, 12 Mar 2024 16:44:50 -0400 Subject: [PATCH 15/24] add caveat to the doc-block --- ush/python/pygfs/task/analysis.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index d762a17d68..70451181c8 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -362,6 +362,9 @@ def tgz_diags(statfile: str, diagdir: str) -> None: def find_value_in_nested_dict(nested_dict: Dict, key: str) -> Any: """ Recursively search through a nested dictionary and return the value for the target key. + This returns the first target key it finds. So if a key exists in a subsequent + nested dictionary, it will not be found. + Parameters ---------- nested_dict : Dict @@ -379,7 +382,9 @@ def find_value_in_nested_dict(nested_dict: Dict, key: str) -> Any: KeyError If key is not found in dictionary - TODO: move this to a utility module so it can be used elsewhere + TODO: if this gives issues due to landing on an incorrect key in the nested + dictionary, we will have to implement a more concrete method to search for a key + given a more complete address. See resolved conversations in PR 2387 # Example usage: nested_dict = { From 4120f51988e382bb6c2fe335e032a4c51105e87c Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Tue, 12 Mar 2024 17:21:20 -0400 Subject: [PATCH 16/24] parse jedi_yaml once for aerosol and atm jedi tasks --- parm/config/gfs/config.aeroanl | 12 ++++++------ parm/config/gfs/config.aeroanlfinal | 2 +- parm/config/gfs/config.aeroanlinit | 2 +- parm/config/gfs/config.aeroanlrun | 2 +- ush/python/pygfs/task/aero_analysis.py | 13 ++++++------- ush/python/pygfs/task/atm_analysis.py | 15 +++++++-------- ush/python/pygfs/task/atmens_analysis.py | 15 +++++++-------- 7 files changed, 29 insertions(+), 32 deletions(-) diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl index cf7981f807..51c2e242f3 100644 --- a/parm/config/gfs/config.aeroanl +++ b/parm/config/gfs/config.aeroanl @@ -6,11 +6,11 @@ echo "BEGIN: config.aeroanl" export CASE_ANL=${CASE} -export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ -export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export OBS_YAML_DIR="${PARMgfs}/gdas/parm/aero/obs/config/" +export OBS_LIST="${PARMgfs}/gdas/parm/aero/obs/lists/gdas_aero_prototype.yaml" export STATICB_TYPE='identity' -export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml -export BERROR_DATA_DIR=${FIXgfs}/gdas/bump/aero/${CASE_ANL}/ +export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml" +export BERROR_DATA_DIR="${FIXgfs}/gdas/bump/aero/${CASE_ANL}/" export BERROR_DATE="20160630.000000" export io_layout_x=@IO_LAYOUT_X@ @@ -20,10 +20,10 @@ export JEDIEXE=${EXECgfs}/fv3jedi_var.x if [[ "${DOIAU}" == "YES" ]]; then export aero_bkg_times="3,6,9" - export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_fgat_gfs_aero.yaml + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_fgat_gfs_aero.yaml" else export aero_bkg_times="6" - export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_gfs_aero.yaml" fi echo "END: config.aeroanl" diff --git a/parm/config/gfs/config.aeroanlfinal b/parm/config/gfs/config.aeroanlfinal index 230ec5205a..34e5d8f116 100644 --- a/parm/config/gfs/config.aeroanlfinal +++ b/parm/config/gfs/config.aeroanlfinal @@ -6,5 +6,5 @@ echo "BEGIN: config.aeroanlfinal" # Get task specific resources -. $EXPDIR/config.resources aeroanlfinal +source "${EXPDIR}/config.resources" aeroanlfinal echo "END: config.aeroanlfinal" diff --git a/parm/config/gfs/config.aeroanlinit b/parm/config/gfs/config.aeroanlinit index 72175b8d0c..7036d3d27b 100644 --- a/parm/config/gfs/config.aeroanlinit +++ b/parm/config/gfs/config.aeroanlinit @@ -6,5 +6,5 @@ echo "BEGIN: config.aeroanlinit" # Get task specific resources -. $EXPDIR/config.resources aeroanlinit +source "${EXPDIR}/config.resources" aeroanlinit echo "END: config.aeroanlinit" diff --git a/parm/config/gfs/config.aeroanlrun b/parm/config/gfs/config.aeroanlrun index da13df2831..012e5b79f3 100644 --- a/parm/config/gfs/config.aeroanlrun +++ b/parm/config/gfs/config.aeroanlrun @@ -6,6 +6,6 @@ echo "BEGIN: config.aeroanlrun" # Get task specific resources -. $EXPDIR/config.resources aeroanlrun +source "${EXPDIR}/config.resources" aeroanlrun echo "END: config.aeroanlrun" diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index b6152ea945..576d8c1c11 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -32,7 +32,7 @@ def __init__(self, config): _res = int(self.config['CASE'][1:]) _res_anl = int(self.config['CASE_ANL'][1:]) _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) - _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") + _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -50,7 +50,7 @@ def __init__(self, config): 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", - 'fv3jedi_yaml': _fv3jedi_yaml, + 'jedi_yaml': _jedi_yaml, } ) @@ -93,10 +93,9 @@ def initialize(self: Analysis) -> None: FileHandler(self.get_bkg_dict(AttrDict(self.task_config, **self.task_config))).sync() # generate variational YAML file - logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}") - varda_yaml = parse_j2yaml(self.task_config['AEROVARYAML'], self.task_config) - save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) - logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") + logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}") + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -114,7 +113,7 @@ def execute(self: Analysis) -> None: exec_cmd = Executable(self.task_config.APRUN_AEROANL) exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: logger.debug(f"Executing {exec_cmd}") diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 5a90a89e34..52c37c9a74 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -31,7 +31,7 @@ def __init__(self, config): _res = int(self.config.CASE[1:]) _res_anl = int(self.config.CASE_ANL[1:]) _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) - _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml") + _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -48,7 +48,7 @@ def __init__(self, config): 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", - 'fv3jedi_yaml': _fv3jedi_yaml, + 'jedi_yaml': _jedi_yaml, } ) @@ -102,10 +102,9 @@ def initialize(self: Analysis) -> None: FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync() # generate variational YAML file - logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}") - varda_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) - logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") + logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}") + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -123,7 +122,7 @@ def execute(self: Analysis) -> None: exec_cmd = Executable(self.task_config.APRUN_ATMANL) exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: logger.debug(f"Executing {exec_cmd}") @@ -170,7 +169,7 @@ def finalize(self: Analysis) -> None: archive.add(diaggzip, arcname=os.path.basename(diaggzip)) # copy full YAML from executable to ROTDIR - logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}") + logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}") src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") logger.debug(f"Copying {src} to {dest}") diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 3e2c0a233c..6b4c65a040 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -31,7 +31,7 @@ def __init__(self, config): _res = int(self.config.CASE_ENS[1:]) _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) - _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml") + _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -45,7 +45,7 @@ def __init__(self, config): 'OPREFIX': f"{self.config.EUPD_CYC}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", - 'fv3jedi_yaml': _fv3jedi_yaml, + 'jedi_yaml': _jedi_yaml, } ) @@ -118,10 +118,9 @@ def initialize(self: Analysis) -> None: FileHandler(self.get_fv3ens_dict(localconf)).sync() # generate ensemble da YAML file - logger.debug(f"Generate ensemble da YAML file: {self.task_config.fv3jedi_yaml}") - ensda_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) - save_as_yaml(ensda_yaml, self.task_config.fv3jedi_yaml) - logger.info(f"Wrote ensemble da YAML to: {self.task_config.fv3jedi_yaml}") + logger.debug(f"Generate ensemble da YAML file: {self.task_config.jedi_yaml}") + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote ensemble da YAML to: {self.task_config.jedi_yaml}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -153,7 +152,7 @@ def execute(self: Analysis) -> None: exec_cmd = Executable(self.task_config.APRUN_ATMENSANL) exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_letkf.x') exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: logger.debug(f"Executing {exec_cmd}") @@ -206,7 +205,7 @@ def finalize(self: Analysis) -> None: archive.add(diaggzip, arcname=os.path.basename(diaggzip)) # copy full YAML from executable to ROTDIR - logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") + logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") logger.debug(f"Copying {src} to {dest}") From c22686e663d7088536b463910676af703ee952e4 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Tue, 12 Mar 2024 22:23:25 -0400 Subject: [PATCH 17/24] update aeroDA config file --- parm/config/gfs/config.aeroanl | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl index 51c2e242f3..c2769f6b91 100644 --- a/parm/config/gfs/config.aeroanl +++ b/parm/config/gfs/config.aeroanl @@ -6,24 +6,23 @@ echo "BEGIN: config.aeroanl" export CASE_ANL=${CASE} -export OBS_YAML_DIR="${PARMgfs}/gdas/parm/aero/obs/config/" -export OBS_LIST="${PARMgfs}/gdas/parm/aero/obs/lists/gdas_aero_prototype.yaml" +export OBS_LIST="${PARMgfs}/gdas/parm/aero/obs/lists/gdas_aero.yaml.j2" export STATICB_TYPE='identity' -export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml" +export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml.j2" export BERROR_DATA_DIR="${FIXgfs}/gdas/bump/aero/${CASE_ANL}/" export BERROR_DATE="20160630.000000" export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIEXE=${EXECgfs}/fv3jedi_var.x +export JEDIEXE="${EXECgfs}/fv3jedi_var.x" if [[ "${DOIAU}" == "YES" ]]; then export aero_bkg_times="3,6,9" - export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_fgat_gfs_aero.yaml" + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_fgat_gfs_aero.yaml.j2" else export aero_bkg_times="6" - export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_gfs_aero.yaml" + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_gfs_aero.yaml.j2" fi echo "END: config.aeroanl" From 55fc774adc112018b9d138b73639f52d0e190fc0 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Tue, 12 Mar 2024 22:46:27 -0400 Subject: [PATCH 18/24] make ush/forecast_predet.sh shellcheck compliant --- ush/forecast_predet.sh | 50 +++++++++++++++++++----------------------- 1 file changed, 22 insertions(+), 28 deletions(-) diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 41e4f8d1b1..b5e1ad8e82 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -10,30 +10,33 @@ to_seconds() { # Function to convert HHMMSS to seconds since 00Z - local hhmmss=${1:?} - local hh=${hhmmss:0:2} - local mm=${hhmmss:2:2} - local ss=${hhmmss:4:2} - local seconds=$((10#${hh}*3600+10#${mm}*60+10#${ss})) - local padded_seconds=$(printf "%05d" "${seconds}") + local hhmmss hh mm ss seconds padded_seconds + hhmmss=${1:?} + hh=${hhmmss:0:2} + mm=${hhmmss:2:2} + ss=${hhmmss:4:2} + seconds=$((10#${hh}*3600+10#${mm}*60+10#${ss})) + padded_seconds=$(printf "%05d" "${seconds}") echo "${padded_seconds}" } middle_date(){ # Function to calculate mid-point date in YYYYMMDDHH between two dates also in YYYYMMDDHH - local date1=${1:?} - local date2=${2:?} - local date1s=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s) - local date2s=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s) - local dtsecsby2=$(( $((date2s - date1s)) / 2 )) - local mid_date=$(date --utc -d "${date1:0:8} ${date1:8:2} + ${dtsecsby2} seconds" +%Y%m%d%H%M%S) + local date1 date2 date1s date2s dtsecsby2 mid_date + date1=${1:?} + date2=${2:?} + date1s=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s) + date2s=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s) + dtsecsby2=$(( $((date2s - date1s)) / 2 )) + mid_date=$(date --utc -d "${date1:0:8} ${date1:8:2} + ${dtsecsby2} seconds" +%Y%m%d%H%M%S) echo "${mid_date:0:10}" } nhour(){ # Function to calculate hours between two dates (This replicates prod-util NHOUR) - local date1=${1:?} - local date2=${2:?} + local date1 date2 seconds1 seconds2 hours + date1=${1:?} + date2=${2:?} # Convert dates to UNIX timestamps seconds1=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s) seconds2=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s) @@ -41,21 +44,17 @@ nhour(){ echo "${hours}" } +# shellcheck disable=SC2034 common_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for shared through model components" - # Ignore "not used" warning - # shellcheck disable=SC2034 pwd=$(pwd) CDUMP=${CDUMP:-gdas} - CASE=${CASE:-C96} CDATE=${CDATE:-"${PDY}${cyc}"} ENSMEM=${ENSMEM:-000} # Define significant cycles current_cycle="${PDY}${cyc}" previous_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - ${assim_freq} hours" +%Y%m%d%H) - # ignore errors that variable isn't used - # shellcheck disable=SC2034 next_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${assim_freq} hours" +%Y%m%d%H) forecast_end_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHMAX} hours" +%Y%m%d%H) @@ -89,6 +88,7 @@ common_predet(){ cd "${DATA}" || ( echo "FATAL ERROR: Unable to 'cd ${DATA}', ABORT!"; exit 8 ) } +# shellcheck disable=SC2034 FV3_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for FV3" @@ -105,8 +105,6 @@ FV3_predet(){ fi # Convert output settings into an explicit list for FV3 - # Ignore "not used" warning - # shellcheck disable=SC2034 FV3_OUTPUT_FH="" local fhr=${FHMIN} if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then @@ -116,8 +114,6 @@ FV3_predet(){ FV3_OUTPUT_FH="${FV3_OUTPUT_FH} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")" # Other options - # ignore errors that variable isn't used - # shellcheck disable=SC2034 MEMBER=$(( 10#${ENSMEM:-"-1"} )) # -1: control, 0: ensemble mean, >0: ensemble member $MEMBER PREFIX_ATMINC=${PREFIX_ATMINC:-""} # allow ensemble to use recentered increment @@ -169,7 +165,6 @@ FV3_predet(){ nstf_name=${nstf_name:-"${NST_MODEL},${NST_SPINUP},${NST_RESV},${ZSEA1},${ZSEA2}"} nst_anl=${nst_anl:-".false."} - # blocking factor used for threading and general physics performance #nyblocks=$(expr \( $npy - 1 \) \/ $layout_y ) #nxblocks=$(expr \( $npx - 1 \) \/ $layout_x \/ 32) @@ -215,6 +210,7 @@ WW3_predet(){ ${NLN} "${COM_WAVE_RESTART}" "restart_wave" } +# shellcheck disable=SC2034 CICE_predet(){ echo "SUB ${FUNCNAME[0]}: CICE before run type determination" @@ -227,12 +223,11 @@ CICE_predet(){ # CICE does not have a concept of high frequency output like FV3 # Convert output settings into an explicit list for CICE - # Ignore "not used" warning - # shellcheck disable=SC2034 CICE_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "${FHOUT_OCNICE}" "${FHMAX}") } +# shellcheck disable=SC2034 MOM6_predet(){ echo "SUB ${FUNCNAME[0]}: MOM6 before run type determination" @@ -245,8 +240,6 @@ MOM6_predet(){ # MOM6 does not have a concept of high frequency output like FV3 # Convert output settings into an explicit list for MOM6 - # Ignore "not used" warning - # shellcheck disable=SC2034 MOM6_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "${FHOUT_OCNICE}" "${FHMAX}") } @@ -260,6 +253,7 @@ CMEPS_predet(){ } +# shellcheck disable=SC2034 GOCART_predet(){ echo "SUB ${FUNCNAME[0]}: GOCART before run type determination" From c0c857387450d55665ccc5956ae4f4bb6cc83f8b Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Wed, 13 Mar 2024 10:59:53 -0400 Subject: [PATCH 19/24] update gdasapp hash for aerosol DA j2 templates --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 8a6f825f6d..82e16bbf9e 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 8a6f825f6d988c81fad11070de92a2744d5a53cc +Subproject commit 82e16bbf9eafec67e517827357ab8e1a360740fc From 8218ee78b83dd400430c744656dcc2cae58fc49e Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Wed, 13 Mar 2024 16:24:14 -0400 Subject: [PATCH 20/24] update configs and some more yamls; --- parm/config/gfs/config.aeroanl | 5 ++++- parm/config/gfs/config.atmanl | 3 +++ parm/config/gfs/config.atmensanl | 3 +++ parm/config/gfs/config.snowanl | 2 ++ parm/gdas/aero_crtm_coeff.yaml | 13 ------------- parm/gdas/aero_crtm_coeff.yaml.j2 | 13 +++++++++++++ parm/gdas/aero_jedi_fix.yaml | 11 ----------- .../{atm_jedi_fix.yaml => aero_jedi_fix.yaml.j2} | 0 ...{atm_crtm_coeff.yaml => atm_crtm_coeff.yaml.j2} | 0 parm/gdas/atm_jedi_fix.yaml.j2 | 7 +++++++ parm/gdas/snow_jedi_fix.yaml.j2 | 8 ++++---- ush/python/pygfs/task/aero_analysis.py | 14 ++++++-------- ush/python/pygfs/task/analysis.py | 8 ++++---- ush/python/pygfs/task/atm_analysis.py | 12 +++++------- ush/python/pygfs/task/atmens_analysis.py | 12 +++++------- ush/python/pygfs/task/snow_analysis.py | 5 ++--- 16 files changed, 58 insertions(+), 58 deletions(-) delete mode 100644 parm/gdas/aero_crtm_coeff.yaml create mode 100644 parm/gdas/aero_crtm_coeff.yaml.j2 delete mode 100644 parm/gdas/aero_jedi_fix.yaml rename parm/gdas/{atm_jedi_fix.yaml => aero_jedi_fix.yaml.j2} (100%) rename parm/gdas/{atm_crtm_coeff.yaml => atm_crtm_coeff.yaml.j2} (100%) create mode 100644 parm/gdas/atm_jedi_fix.yaml.j2 diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl index c2769f6b91..972f393feb 100644 --- a/parm/config/gfs/config.aeroanl +++ b/parm/config/gfs/config.aeroanl @@ -6,12 +6,15 @@ echo "BEGIN: config.aeroanl" export CASE_ANL=${CASE} -export OBS_LIST="${PARMgfs}/gdas/parm/aero/obs/lists/gdas_aero.yaml.j2" +export OBS_LIST="${PARMgfs}/gdas/aero/obs/lists/gdas_aero.yaml.j2" export STATICB_TYPE='identity' export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml.j2" export BERROR_DATA_DIR="${FIXgfs}/gdas/bump/aero/${CASE_ANL}/" export BERROR_DATE="20160630.000000" +export CRTM_FIX_YAML="${PARMgfs}/gdas/aero_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/aero_jedi_fix.yaml.j2" + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index 11358de8a8..7cfd0cb47f 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -19,6 +19,9 @@ else export BERROR_YAML="${PARMgfs}/gdas/atm/berror/staticb_${STATICB_TYPE}.yaml.j2" fi +export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + export layout_x_atmanl=@LAYOUT_X_ATMANL@ export layout_y_atmanl=@LAYOUT_Y_ATMANL@ diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index 49b903e4c0..8e824b22f6 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -9,6 +9,9 @@ export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/lgetkf_prototype.yaml.j2" export JEDIYAML="${PARMgfs}/gdas/atm/lgetkf/lgetkf.yaml.j2" export INTERP_METHOD='barycentric' +export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + export layout_x_atmensanl=@LAYOUT_X_ATMENSANL@ export layout_y_atmensanl=@LAYOUT_Y_ATMENSANL@ diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl index 30e6d9c07b..7b3ffa47f3 100644 --- a/parm/config/gfs/config.snowanl +++ b/parm/config/gfs/config.snowanl @@ -22,6 +22,8 @@ export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/apply_incr_nml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2" + export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ diff --git a/parm/gdas/aero_crtm_coeff.yaml b/parm/gdas/aero_crtm_coeff.yaml deleted file mode 100644 index 75b54c3741..0000000000 --- a/parm/gdas/aero_crtm_coeff.yaml +++ /dev/null @@ -1,13 +0,0 @@ -mkdir: -- {{ DATA }}/crtm/ -copy: -- [{{ CRTM_FIX }}/AerosolCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/CloudCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/v.viirs-m_npp.SpcCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/v.viirs-m_npp.TauCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/v.viirs-m_j1.SpcCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/v.viirs-m_j1.TauCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/NPOESS.VISice.EmisCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/NPOESS.VISland.EmisCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/NPOESS.VISsnow.EmisCoeff.bin, {{ DATA }}/crtm/] -- [{{ CRTM_FIX }}/NPOESS.VISwater.EmisCoeff.bin, {{ DATA }}/crtm/] diff --git a/parm/gdas/aero_crtm_coeff.yaml.j2 b/parm/gdas/aero_crtm_coeff.yaml.j2 new file mode 100644 index 0000000000..b48d8ff231 --- /dev/null +++ b/parm/gdas/aero_crtm_coeff.yaml.j2 @@ -0,0 +1,13 @@ +mkdir: +- '{{ DATA }}/crtm/' +copy: +- ['{{ CRTM_FIX }}/AerosolCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/CloudCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/v.viirs-m_npp.SpcCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/v.viirs-m_npp.TauCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/v.viirs-m_j1.SpcCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/v.viirs-m_j1.TauCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/NPOESS.VISice.EmisCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/NPOESS.VISland.EmisCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/NPOESS.VISsnow.EmisCoeff.bin', '{{ DATA }}/crtm/'] +- ['{{ CRTM_FIX }}/NPOESS.VISwater.EmisCoeff.bin', '{{ DATA }}/crtm/'] diff --git a/parm/gdas/aero_jedi_fix.yaml b/parm/gdas/aero_jedi_fix.yaml deleted file mode 100644 index 16cbeac6e7..0000000000 --- a/parm/gdas/aero_jedi_fix.yaml +++ /dev/null @@ -1,11 +0,0 @@ -mkdir: -- !ENV ${DATA}/fv3jedi -copy: -- - !ENV ${FIXgfs}/gdas/fv3jedi/fv3files/akbk$(npz).nc4 - - !ENV ${DATA}/fv3jedi/akbk.nc4 -- - !ENV ${FIXgfs}/gdas/fv3jedi/fv3files/fmsmpp.nml - - !ENV ${DATA}/fv3jedi/fmsmpp.nml -- - !ENV ${FIXgfs}/gdas/fv3jedi/fv3files/field_table_gfdl - - !ENV ${DATA}/fv3jedi/field_table -- - !ENV $(HOMEgfs)/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml - - !ENV ${DATA}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml diff --git a/parm/gdas/atm_jedi_fix.yaml b/parm/gdas/aero_jedi_fix.yaml.j2 similarity index 100% rename from parm/gdas/atm_jedi_fix.yaml rename to parm/gdas/aero_jedi_fix.yaml.j2 diff --git a/parm/gdas/atm_crtm_coeff.yaml b/parm/gdas/atm_crtm_coeff.yaml.j2 similarity index 100% rename from parm/gdas/atm_crtm_coeff.yaml rename to parm/gdas/atm_crtm_coeff.yaml.j2 diff --git a/parm/gdas/atm_jedi_fix.yaml.j2 b/parm/gdas/atm_jedi_fix.yaml.j2 new file mode 100644 index 0000000000..69039baddf --- /dev/null +++ b/parm/gdas/atm_jedi_fix.yaml.j2 @@ -0,0 +1,7 @@ +mkdir: +- '{{ DATA }}/fv3jedi' +copy: +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table'] +- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml'] diff --git a/parm/gdas/snow_jedi_fix.yaml.j2 b/parm/gdas/snow_jedi_fix.yaml.j2 index 4d820a82ba..69039baddf 100644 --- a/parm/gdas/snow_jedi_fix.yaml.j2 +++ b/parm/gdas/snow_jedi_fix.yaml.j2 @@ -1,7 +1,7 @@ mkdir: - '{{ DATA }}/fv3jedi' copy: -- ['{{ HOMEgfs }}/fix/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4'] -- ['{{ HOMEgfs }}/fix/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml'] -- ['{{ HOMEgfs }}/fix/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table'] -- ['{{ HOMEgfs }}/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml'] +- ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table'] +- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml'] diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index 576d8c1c11..a61b7c82f3 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -12,7 +12,7 @@ add_to_datetime, to_fv3time, to_timedelta, chdir, to_fv3time, - YAMLFile, parse_yamltmpl, parse_j2yaml, save_as_yaml, + YAMLFile, parse_j2yaml, save_as_yaml, logit, Executable, WorkflowException) @@ -73,15 +73,13 @@ def initialize(self: Analysis) -> None: super().initialize() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aero_crtm_coeff.yaml') - logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) + logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") + crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aero_jedi_fix.yaml') - logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage berror files @@ -211,7 +209,7 @@ def _add_fms_cube_sphere_increments(self: Analysis) -> None: inc_template = os.path.join(self.task_config.DATA, 'anl', 'aeroinc.' + increment_template) bkg_template = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, restart_template) # get list of increment vars - incvars_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aeroanl_inc_vars.yaml') + incvars_list_path = os.path.join(self.task_config['PARMgfs'], 'gdas', 'aeroanl_inc_vars.yaml') incvars = YAMLFile(path=incvars_list_path)['incvars'] super().add_fv3_increments(inc_template, bkg_template, incvars) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 70451181c8..2221fb7b34 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -27,7 +27,7 @@ def __init__(self, config: Dict[str, Any]) -> None: super().__init__(config) self.config.ntiles = 6 # Store location of GDASApp jinja2 templates - self.gdasapp_j2tmpl_dir = os.path.join(self.config.HOMEgfs, 'parm/gdas') + self.gdasapp_j2tmpl_dir = os.path.join(self.config.PARMgfs, 'gdas') def initialize(self) -> None: super().initialize() @@ -359,7 +359,7 @@ def tgz_diags(statfile: str, diagdir: str) -> None: @logit(logger) -def find_value_in_nested_dict(nested_dict: Dict, key: str) -> Any: +def find_value_in_nested_dict(nested_dict: Dict, target_key: str) -> Any: """ Recursively search through a nested dictionary and return the value for the target key. This returns the first target key it finds. So if a key exists in a subsequent @@ -369,13 +369,13 @@ def find_value_in_nested_dict(nested_dict: Dict, key: str) -> Any: ---------- nested_dict : Dict Dictionary to search - key : str + target_key : str Key to search for Returns ------- Any - Value of the key + Value of the target key Raises ------ diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 52c37c9a74..6348bdf319 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -71,19 +71,17 @@ def initialize(self: Analysis) -> None: super().initialize() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_crtm_coeff.yaml') - logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) + logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") + crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_jedi_fix.yaml') - logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage static background error files, otherwise it will assume ID matrix - logger.debug(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") + logger.info(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") FileHandler(self.get_berror_dict(self.task_config)).sync() # stage ensemble files for use in hybrid background error diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 6b4c65a040..1037b557c2 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -96,19 +96,17 @@ def initialize(self: Analysis) -> None: FileHandler({'mkdir': dirlist}).sync() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_crtm_coeff.yaml') - logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) + logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") + crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_jedi_fix.yaml') - logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage backgrounds - logger.debug(f"Stage ensemble member background files") + logger.info(f"Stage ensemble member background files") localconf = AttrDict() keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN', 'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles'] diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 43e9d6fe3e..c149f140b6 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -260,9 +260,8 @@ def initialize(self) -> None: FileHandler({'mkdir': dirlist}).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'snow_jedi_fix.yaml.j2') - logger.info(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage backgrounds From 320730cc18ae33f18268a3832f9cc6e826d5efda Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Wed, 13 Mar 2024 22:06:02 -0400 Subject: [PATCH 21/24] update pointer to gdasapp --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 82e16bbf9e..dd350d7e4d 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 82e16bbf9eafec67e517827357ab8e1a360740fc +Subproject commit dd350d7e4daab0977407e388711807f13b204f6f From c163a4fa070b9ee2c99bcae7c5ceaa16929bf0d6 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Thu, 14 Mar 2024 09:48:21 -0400 Subject: [PATCH 22/24] The jjob for ocean and ice pp, only defines the component specific history and grib directory. This causes an error in the exscript trying to pull keys for both ocean and ice. Fix this. Surprised this has not caused failures before today --- scripts/exglobal_oceanice_products.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/exglobal_oceanice_products.py b/scripts/exglobal_oceanice_products.py index 0f8e2e0d6d..615d72d1bf 100755 --- a/scripts/exglobal_oceanice_products.py +++ b/scripts/exglobal_oceanice_products.py @@ -19,8 +19,7 @@ def main(): # Pull out all the configuration keys needed to run the rest of steps keys = ['HOMEgfs', 'DATA', 'current_cycle', 'RUN', 'NET', - 'COM_OCEAN_HISTORY', 'COM_OCEAN_GRIB', - 'COM_ICE_HISTORY', 'COM_ICE_GRIB', + f'COM_{component.upper()}_HISTORY', f'COM_{component.upper()}_GRIB', 'APRUN_OCNICEPOST', 'component', 'forecast_hour', 'valid_datetime', 'avg_period', 'model_grid', 'product_grids', 'oceanice_yaml'] From 250b5e965759154892589a02ff5794bda2176f09 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Thu, 14 Mar 2024 10:45:07 -0400 Subject: [PATCH 23/24] Update scripts/exglobal_oceanice_products.py Co-authored-by: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> --- scripts/exglobal_oceanice_products.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/exglobal_oceanice_products.py b/scripts/exglobal_oceanice_products.py index 615d72d1bf..6a754ea45b 100755 --- a/scripts/exglobal_oceanice_products.py +++ b/scripts/exglobal_oceanice_products.py @@ -19,7 +19,7 @@ def main(): # Pull out all the configuration keys needed to run the rest of steps keys = ['HOMEgfs', 'DATA', 'current_cycle', 'RUN', 'NET', - f'COM_{component.upper()}_HISTORY', f'COM_{component.upper()}_GRIB', + f'COM_{ocnice.task_config.component.upper()}_HISTORY', f'COM_{ocnice.task_config.component.upper()}_GRIB', 'APRUN_OCNICEPOST', 'component', 'forecast_hour', 'valid_datetime', 'avg_period', 'model_grid', 'product_grids', 'oceanice_yaml'] From 2d8162094853b5089478c3815e9344d1ff2c861a Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 15 Mar 2024 12:48:10 -0400 Subject: [PATCH 24/24] update Jenkinsfile to add renamed and expanded test --- ci/Jenkinsfile | 4 ++-- .../pr/{C96_atm-aero-snowDA.yaml => C96_atmaerosnowDA.yaml} | 2 +- ...snowDA_defaults_ci.yaml => atmaerosnowDA_defaults_ci.yaml} | 0 3 files changed, 3 insertions(+), 3 deletions(-) rename ci/cases/pr/{C96_atm-aero-snowDA.yaml => C96_atmaerosnowDA.yaml} (84%) rename ci/cases/yamls/{atm-aero-snowDA_defaults_ci.yaml => atmaerosnowDA_defaults_ci.yaml} (100%) diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index 34535ed608..eddedf486c 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -119,7 +119,7 @@ pipeline { axis { name 'Case' // TODO add dynamic list of cases from env vars (needs addtional plugins) - values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96_atmsnowDA' + values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96_atmaerosnowDA' } } stages { @@ -160,7 +160,7 @@ pipeline { for (line in lines) { echo "archiving: ${line}" archiveArtifacts artifacts: "${line}", fingerprint: true - } + } } } error("Failed to run experiments ${Case} on ${Machine}") diff --git a/ci/cases/pr/C96_atm-aero-snowDA.yaml b/ci/cases/pr/C96_atmaerosnowDA.yaml similarity index 84% rename from ci/cases/pr/C96_atm-aero-snowDA.yaml rename to ci/cases/pr/C96_atmaerosnowDA.yaml index 4266f0578a..7e22955a37 100644 --- a/ci/cases/pr/C96_atm-aero-snowDA.yaml +++ b/ci/cases/pr/C96_atmaerosnowDA.yaml @@ -14,7 +14,7 @@ arguments: nens: 0 gfs_cyc: 1 start: cold - yaml: {{ HOMEgfs }}/ci/cases/yamls/atm-aero-snowDA_defaults_ci.yaml + yaml: {{ HOMEgfs }}/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml skip_ci_on_hosts: - orion diff --git a/ci/cases/yamls/atm-aero-snowDA_defaults_ci.yaml b/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml similarity index 100% rename from ci/cases/yamls/atm-aero-snowDA_defaults_ci.yaml rename to ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml