From 6b46fa3f84f5db64872260b4ac8c22b1ad8a67ae Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 21 Oct 2022 22:20:23 +0000 Subject: [PATCH 01/19] Move config load out into function. --- ush/generate_FV3LAM_wflow.py | 2 +- ush/python_utils/config_parser.py | 2 +- ush/set_predef_grid_params.py | 33 +-- ush/setup.py | 398 +++++++++++++++--------------- 4 files changed, 206 insertions(+), 229 deletions(-) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 11a86c9aaa..4faed8cc33 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -100,7 +100,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = 'log.generate_FV3LAM_wflow') -> # The setup function reads the user configuration file and fills in # non-user-specified values from config_defaults.yaml - setup() + setup(USHdir) # import all environment variables import_vars() diff --git a/ush/python_utils/config_parser.py b/ush/python_utils/config_parser.py index c1b69db5ff..1d5f26db71 100644 --- a/ush/python_utils/config_parser.py +++ b/ush/python_utils/config_parser.py @@ -377,7 +377,7 @@ def update_dict(dict_o, dict_t, provide_default=False): def check_structure_dict(dict_o, dict_t): - """Check if a dictinary's structure follows a template. + """Check if a dictionary's structure follows a template. The invalid entries are printed to the screen. Args: diff --git a/ush/set_predef_grid_params.py b/ush/set_predef_grid_params.py index 6b432b8f03..e6b05f6fce 100644 --- a/ush/set_predef_grid_params.py +++ b/ush/set_predef_grid_params.py @@ -17,7 +17,7 @@ ) -def set_predef_grid_params(): +def set_predef_grid_params(USHdir, fcst_config): """Sets grid parameters for the specified predfined grid Args: @@ -25,43 +25,24 @@ def set_predef_grid_params(): Returns: None """ - # import all environement variables - IMPORTS = [ - "PREDEF_GRID_NAME", - "QUILTING", - "DT_ATMOS", - "LAYOUT_X", - "LAYOUT_Y", - "BLOCKSIZE", - ] - import_vars(env_vars=IMPORTS) + predef_grid_name = fcst_config['PREDEF_GRID_NAME'] + quilting = fcst_config['QUILTING'] - USHdir = os.path.dirname(os.path.abspath(__file__)) params_dict = load_config_file(os.path.join(USHdir, "predef_grid_params.yaml")) try: - params_dict = params_dict[PREDEF_GRID_NAME] + params_dict = params_dict[predef_grid_name] except KeyError: errmsg = dedent(f''' - PREDEF_GRID_NAME = {PREDEF_GRID_NAME} not found in predef_grid_params.yaml + PREDEF_GRID_NAME = {predef_grid_name} not found in predef_grid_params.yaml Check your config file settings.''') raise Exception(errmsg) from None - - # if QUILTING = False, remove key - if not QUILTING: + # We don't need the quilting section if user wants it turned off + if not quilting: params_dict.pop("QUILTING") else: params_dict = flatten_dict(params_dict) - # take care of special vars - special_vars = ["DT_ATMOS", "LAYOUT_X", "LAYOUT_Y", "BLOCKSIZE"] - for var in special_vars: - if globals()[var] is not None: - params_dict[var] = globals()[var] - - # export variables to environment - export_vars(source_dict=params_dict) - return params_dict diff --git a/ush/setup.py b/ush/setup.py index 3fa7313727..b12e47c114 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -17,6 +17,7 @@ uppercase, check_for_preexist_dir_file, flatten_dict, + check_strcuture_dict, update_dict, import_vars, export_vars, @@ -40,108 +41,120 @@ from set_thompson_mp_fix_files import set_thompson_mp_fix_files -def setup(): - """Function that sets a secondary set - of parameters needed by the various scripts that are called by the - FV3-LAM rocoto community workflow. This secondary set of parameters is - calculated using the primary set of user-defined parameters in the de- - fault and custom experiment/workflow configuration scripts (whose file - names are defined below). This script then saves both sets of parame- - ters in a global variable definitions file (really a bash script) in - the experiment directory. This file then gets sourced by the various - scripts called by the tasks in the workflow. +def load_config_for_setup(default_config, user_config): + """ Load in the default, machine, and user configuration files into + Python dictionaries. Return the combined dictionary. Args: - None + default_config (str): Path to the default config YAML + user_config (str): Path to the user-provided config YAML + Returns: - None + Python dict of configuration settings from YAML files. """ - logger = getLogger(__name__) - global USHdir - USHdir = os.path.dirname(os.path.abspath(__file__)) - cd_vrfy(USHdir) - - # import all environment variables - import_vars() - - # print message - log_info( - f""" - ======================================================================== - Starting function setup() in \"{os.path.basename(__file__)}\"... - ========================================================================""" - ) - # - # ----------------------------------------------------------------------- - # - # Step-1 of config - # ================ - # Load the configuration file containing default values for the experiment. - # - # ----------------------------------------------------------------------- - # - EXPT_DEFAULT_CONFIG_FN = "config_defaults.yaml" - cfg_d = load_config_file(os.path.join(USHdir, EXPT_DEFAULT_CONFIG_FN)) - import_vars(dictionary=flatten_dict(cfg_d), - env_vars=["EXPT_CONFIG_FN", - "EXTRN_MDL_NAME_ICS", "EXTRN_MDL_NAME_LBCS", - "FV3GFS_FILE_FMT_ICS", "FV3GFS_FILE_FMT_LBCS"]) + # Load the default config. + cfg_d = load_config_file(default_config) # Load the user config file, then ensure all user-specified - # variables correspond to a default value. - if not os.path.exists(EXPT_CONFIG_FN): - raise FileNotFoundError(f'User config file not found: EXPT_CONFIG_FN = {EXPT_CONFIG_FN}') + # variables correspond to a default value. + if not os.path.exists(user_config): + raise FileNotFoundError(f'User config file not found: + user_config = {user_config}') try: - cfg_u = load_config_file(os.path.join(USHdir, EXPT_CONFIG_FN)) + cfg_u = load_config_file(user_config) except: errmsg = dedent(f'''\n - Could not load YAML config file: {EXPT_CONFIG_FN} + Could not load YAML config file: {user_config} Reference the above traceback for more information. ''') raise Exception(errmsg) - cfg_u = flatten_dict(cfg_u) - for key in cfg_u: - if key not in flatten_dict(cfg_d): - raise Exception(dedent(f''' - User-specified variable "{key}" in {EXPT_CONFIG_FN} is not valid. - Check {EXPT_DEFAULT_CONFIG_FN} for allowed user-specified variables.\n''')) + # Make sure the keys in user config match those in the default + # config. + if not check_structure_dict(cfg_u, cfg_d): + raise Exception(dedent(f''' + User-specified variable "{key}" in {user_config} is not valid + Check {EXPT_DEFAULT_CONFIG_FN} for allowed user-specified variables\n''')) # Mandatory variables *must* be set in the user's config; the default value is invalid - mandatory = ['MACHINE'] + mandatory = ['user.MACHINE'] for val in mandatory: - if val not in cfg_u: - raise Exception(f'Mandatory variable "{val}" not found in user config file {EXPT_CONFIG_FN}') + sect, key = val.split('.') + user_setting = cfg_u.get(sect, {}).get(key) + if user_setting is None: + raise Exception(f'Mandatory variable "{val}" not found in + user config file {user_config}') - import_vars(dictionary=cfg_u, env_vars=["MACHINE", - "EXTRN_MDL_NAME_ICS", "EXTRN_MDL_NAME_LBCS", - "FV3GFS_FILE_FMT_ICS", "FV3GFS_FILE_FMT_LBCS"]) - # - # ----------------------------------------------------------------------- - # - # Step-2 of config - # ================ - # Source machine specific config file to set default values - # - # ----------------------------------------------------------------------- - # - global MACHINE, EXTRN_MDL_SYSBASEDIR_ICS, EXTRN_MDL_SYSBASEDIR_LBCS - MACHINE_FILE = os.path.join(USHdir, "machine", f"{lowercase(MACHINE)}.yaml") - if not os.path.exists(MACHINE_FILE): + # Load the machine config file + machine = cfg_u.get('user').get('MACHINE') + machine_file = os.path.join(USHdir, "machine", f"{lowercase(MACHINE)}.yaml") + + if not os.path.exists(machine_file): raise FileNotFoundError(dedent( f""" - The machine file {MACHINE_FILE} does not exist. - Check that you have specified the correct machine ({MACHINE}) in your config file {EXPT_CONFIG_FN}""" + The machine file {machine_file} does not exist. + Check that you have specified the correct machine + ({machine}) in your config file {user_config}""" )) - machine_cfg = load_config_file(MACHINE_FILE) + machine_cfg = load_config_file(machine_file) + + # Load the constants file + cfg_c = load_config_file(os.path.join(USHdir, "constants.yaml")) + + # Update default config with the constants, the machine config, and + # then the user_config + update_dict(cfg_c, cfg_d) + update_dict(machine_cfg, cfg_d) + update_dict(cfg_u, cfg_d) + + return cfg_d + + +def setup(USHdir, user_config_fn="config.yaml"): + """Function that derives a secondary set of parameters needed to + configure a Rocoto-based SRW workflow. The derived parameters use a + set of required user-defined parameters defined by either + config_defaults.yaml, a user-provided configuration file + (config.yaml), or a YAML machine file. + + A set of global variable definitions is saved to the experiment + directory as a bash configure file that is sourced by scripts at run + time. + + Args: + USHdir (str): The full path of the ush/ directory where + this script is located + user_config_fn (str): The name of a user-provided config YAML + + Returns: + None + """ + + logger = getLogger(__name__) + cd_verify(USHdir) + + # print message + log_info( + f""" + ======================================================================== + Starting function setup() in \"{os.path.basename(__file__)}\"... + ========================================================================""" + ) + + # Create a dictionary of config options from defaults, machine, and + # user config files. + default_config_fp = os.path.join(USHdir, "config_defaults.yaml") + user_config_fp = os.path.join(USHdir, user_config_fn) + expt_config = load_config_for_setup(default_config_fp, user_config_fp) + # ics and lbcs - def get_location(xcs,fmt): - if ("data" in machine_cfg) and (xcs in machine_cfg["data"]): - v = machine_cfg["data"][xcs] + def get_location(xcs, fmt, expt_cfg): + if ("data" in expt_cfg) and (xcs in expt_cfg["data"]): + v = expt_cfg["data"][xcs] if not isinstance(v,dict): return v else: @@ -149,40 +162,23 @@ def get_location(xcs,fmt): else: return "" - EXTRN_MDL_SYSBASEDIR_ICS = get_location(EXTRN_MDL_NAME_ICS, FV3GFS_FILE_FMT_ICS) - EXTRN_MDL_SYSBASEDIR_LBCS = get_location(EXTRN_MDL_NAME_LBCS, FV3GFS_FILE_FMT_LBCS) + EXTRN_MDL_SYSBASEDIR_ICS = get_location( + expt_config.get('task_get_extrn_ics', {}).get('EXTRN_MDL_NAME_ICS'), + expt_config.get('task_get_extrn_ics', {}).get('FV3GFS_FILE_FMT_ICS'), + expt_cfg, + ) + EXTRN_MDL_SYSBASEDIR_LBCS = get_location( + expt_config.get('task_get_extrn_lbcs', {}).get('EXTRN_MDL_NAME_LBCS'), + expt_config.get('task_get_extrn_lbcs', {}).get('FV3GFS_FILE_FMT_LBCS'), + expt_cfg, + ) # remove the data key and provide machine specific default values for cfg_d - if "data" in machine_cfg: - machine_cfg.pop("data") - machine_cfg.update({ - "EXTRN_MDL_SYSBASEDIR_ICS": EXTRN_MDL_SYSBASEDIR_ICS, - "EXTRN_MDL_SYSBASEDIR_LBCS": EXTRN_MDL_SYSBASEDIR_LBCS, - }) - machine_cfg = flatten_dict(machine_cfg) - update_dict(machine_cfg, cfg_d) - - # - # ----------------------------------------------------------------------- - # - # Step-3 of config - # ================ - # Source user config. This overrides previous two configs - # - # ----------------------------------------------------------------------- - # - update_dict(cfg_u, cfg_d) - - # Now that all 3 config files have their contribution in cfg_d - # import its content to python globals() - import_vars(dictionary=flatten_dict(cfg_d)) - - # make machine name uppercase - MACHINE = uppercase(MACHINE) + if "data" in expt_config: + expt_config.pop("data") - # Load constants file and save its contents to a variable for later - cfg_c = load_config_file(os.path.join(USHdir, CONSTANTS_FN)) - import_vars(dictionary=flatten_dict(cfg_c)) + expt_config['task_get_extrn_ics']["EXTRN_MDL_SYSBASEDIR_ICS"] = EXTRN_MDL_SYSBASEDIR_ICS + expt_config['task_get_extrn_lbcs']["EXTRN_MDL_SYSBASEDIR_LBCS"] = EXTRN_MDL_SYSBASEDIR_LBCS # # ----------------------------------------------------------------------- @@ -192,9 +188,9 @@ def get_location(xcs,fmt): # # ----------------------------------------------------------------------- # - global WORKFLOW_ID - WORKFLOW_ID = "id_" + str(int(datetime.datetime.now().timestamp())) - log_info(f"""WORKFLOW ID = {WORKFLOW_ID}""") + workflow_id = "id_" + str(int(datetime.datetime.now().timestamp())) + expt_config["workflow"]["WORKFLOW_ID"] = workflow_id + log_info(f"""WORKFLOW ID = {workflow_id}""") # # ----------------------------------------------------------------------- @@ -204,13 +200,17 @@ def get_location(xcs,fmt): # # ----------------------------------------------------------------------- # - # export env vars before calling another module - export_vars() - - if PREDEF_GRID_NAME: - set_predef_grid_params() + if expt_config.get('task_run_fcst', {}).get('PREDEF_GRID_NAME'): + grid_params = set_predef_grid_params(USHdir, expt_config['task_run_fcst']) - import_vars() + # Users like to change these variables, so don't overwrite them + special_vars = ["DT_ATMOS", "LAYOUT_X", "LAYOUT_Y", "BLOCKSIZE"] + for param, value in grid_params.items(): + if param in special_vars and + expt_config.get('task_run_fcst', {}).get(param) is not None: + continue + else: + expt_config['task_run_fcst'][param] = value # # ----------------------------------------------------------------------- @@ -219,11 +219,10 @@ def get_location(xcs,fmt): # # ----------------------------------------------------------------------- # - global VERBOSE - if DEBUG and not VERBOSE: + if expt_config.get('workflow', {}).get('DEBUG'): log_info( """ - Resetting VERBOSE to \"TRUE\" because DEBUG has been set to \"TRUE\"...""" + Setting VERBOSE to \"TRUE\" because DEBUG has been set to \"TRUE\"...""" ) VERBOSE = True @@ -238,13 +237,15 @@ def get_location(xcs,fmt): # # ----------------------------------------------------------------------- # - global SHUM_MAG, SKEB_MAG, SPPT_MAG - if not DO_SHUM: - SHUM_MAG = -999.0 - if not DO_SKEB: - SKEB_MAG = -999.0 - if not DO_SPPT: - SPPT_MAG = -999.0 + # Alias to save some space below. Also, make sure the section + # exists! + global_sect = expt_config['global'] + if not global_sect.get('DO_SHUM'): + global_sect['SHUM_MAG'] = -999.0 + if not global_sect.get('DO_SKEB'): + global_sect['SKEB_MAG'] = -999.0 + if not global_sect.get('DO_SPPT'): + global_sect['SPPT_MAG'] = -999.0 # # ----------------------------------------------------------------------- # @@ -254,10 +255,39 @@ def get_location(xcs,fmt): # # ----------------------------------------------------------------------- # - global N_VAR_SPP - N_VAR_SPP = 0 - if DO_SPP: - N_VAR_SPP = len(SPP_VAR_LIST) + if global_sect.get('DO_SPP'): + global_sect['N_VAR_SPP'] = len(global_sect['SPP_VAR_LIST']) + else: + global_sect['N_VAR_SPP'] = 0 + # + # ----------------------------------------------------------------------- + # + # If running with SPP, confirm that each SPP-related namelist value + # contains the same number of entries as N_VAR_SPP (set above to be equal + # to the number of entries in SPP_VAR_LIST). + # + # ----------------------------------------------------------------------- + # + spp_vars = ['SPP_MAG_LIST', + 'SPP_LSCALE', + 'SPP_TSCALE', + 'SPP_SIGTOP1', + 'SPP_SIGTOP2', + 'SPP_STDDEV_CUTOFF', + 'ISEED_SPP', + ] + + if global_sect.get('DO_SPP'): + for spp_var in spp_vars: + if len(global_sect[spp_var]) != global_sect['N_VAR_SPP']: + raise Exception( + f''' + All MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or RRTMG SPP-related namelist + variables must be of equal length to SPP_VAR_LIST: + SPP_VAR_LIST (length {global_sect['N_VAR_SPP']}) + {spp_var} (length {len(global_sect[spp_var])}) + ''' + ) # # ----------------------------------------------------------------------- # @@ -273,50 +303,16 @@ def get_location(xcs,fmt): # # ----------------------------------------------------------------------- # - global N_VAR_LNDP, LNDP_TYPE, LNDP_MODEL_TYPE, FHCYC_LSM_SPP_OR_NOT - N_VAR_LNDP = 0 - LNDP_TYPE = 0 - LNDP_MODEL_TYPE = 0 - FHCYC_LSM_SPP_OR_NOT = 0 - if DO_LSM_SPP: - N_VAR_LNDP = len(LSM_SPP_VAR_LIST) - LNDP_TYPE = 2 - LNDP_MODEL_TYPE = 2 - FHCYC_LSM_SPP_OR_NOT = 999 - # - # ----------------------------------------------------------------------- - # - # If running with SPP, confirm that each SPP-related namelist value - # contains the same number of entries as N_VAR_SPP (set above to be equal - # to the number of entries in SPP_VAR_LIST). - # - # ----------------------------------------------------------------------- - # - if DO_SPP: - if ( - (len(SPP_MAG_LIST) != N_VAR_SPP) - or (len(SPP_LSCALE) != N_VAR_SPP) - or (len(SPP_TSCALE) != N_VAR_SPP) - or (len(SPP_SIGTOP1) != N_VAR_SPP) - or (len(SPP_SIGTOP2) != N_VAR_SPP) - or (len(SPP_STDDEV_CUTOFF) != N_VAR_SPP) - or (len(ISEED_SPP) != N_VAR_SPP) - ): - raise Exception( - f''' - All MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or RRTMG SPP-related namelist - variables set in {EXPT_CONFIG_FN} must be equal in number of entries to what is - found in SPP_VAR_LIST: - SPP_VAR_LIST (length {len(SPP_VAR_LIST)}) - SPP_MAG_LIST (length {len(SPP_MAG_LIST)}) - SPP_LSCALE (length {len(SPP_LSCALE)}) - SPP_TSCALE (length {len(SPP_TSCALE)}) - SPP_SIGTOP1 (length {len(SPP_SIGTOP1)}) - SPP_SIGTOP2 (length {len(SPP_SIGTOP2)}) - SPP_STDDEV_CUTOFF (length {len(SPP_STDDEV_CUTOFF)}) - ISEED_SPP (length {len(ISEED_SPP)}) - ''' - ) + if global_sect.get('DO_LSM_SPP'): + global_sect['N_VAR_LNDP'] = len(global_sect['LSM_SPP_VAR_LIST']) + global_sect['LNDP_TYPE'] = 2 + global_sect['LNDP_MODEL_TYPE'] = 2 + global_sect['FHCYC_LSM_SPP_OR_NOT'] = 999 + else + global_sect['N_VAR_LNDP'] = 0 + global_sect['LNDP_TYPE'] = 0 + global_sect['LNDP_MODEL_TYPE'] = 0 + global_sect['FHCYC_LSM_SPP_OR_NOT'] = 0 # # ----------------------------------------------------------------------- # @@ -326,26 +322,26 @@ def get_location(xcs,fmt): # # ----------------------------------------------------------------------- # - if DO_LSM_SPP: - if ( - (len(LSM_SPP_MAG_LIST) != N_VAR_LNDP) - or (len(LSM_SPP_LSCALE) != N_VAR_LNDP) - or (len(LSM_SPP_TSCALE) != N_VAR_LNDP) - ): - raise Exception( - f''' - All Noah or RUC-LSM SPP-related namelist variables (except ISEED_LSM_SPP) - set in {EXPT_CONFIG_FN} must be equal in number of entries to what is found in - SPP_VAR_LIST: - LSM_SPP_VAR_LIST (length {len(LSM_SPP_VAR_LIST)}) - LSM_SPP_MAG_LIST (length {len(LSM_SPP_MAG_LIST)}) - LSM_SPP_LSCALE (length {len(LSM_SPP_LSCALE)}) - LSM_SPP_TSCALE (length {len(LSM_SPP_TSCALE)}) - ''' - ) + lsm_spp_vars = ['LSM_SPP_MAG_LIST', + 'LSM_SPP_LSCALE', + 'LSM_SPP_TSCALE', + ] + if global_sect.get('DO_LSM_SPP'): + for lsm_spp_var in lsm_spp_vars: + if len(global_sect[lsm_spp_var]) != global_sect['N_VAR_LNDP']: + raise Exception( + f''' + All MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or RRTMG SPP-related namelist + variables must be of equal length to SPP_VAR_LIST: + All Noah or RUC-LSM SPP-related namelist variables (except ISEED_LSM_SPP) + must be equal of equal length to LSM_SPP_VAR_LIST: + LSM_SPP_VAR_LIST (length {global_sect['N_VAR_LNDP']}) + {lsm_spp_var} (length {len(global_sect[lsm_spp_var])} + ''' + ) # # The current script should be located in the ush subdirectory of the - # workflow directory. Thus, the workflow directory is the one above the + # workflow directory. Thus, the SRW home directory is the one above the # directory of the current script. # HOMEdir = os.path.abspath( @@ -355,13 +351,13 @@ def get_location(xcs,fmt): # # ----------------------------------------------------------------------- # - # Set the base directories in which codes obtained from external reposi- - # tories (using the manage_externals tool) are placed. Obtain the rela- - # tive paths to these directories by reading them in from the manage_ex- - # ternals configuration file. (Note that these are relative to the lo- - # cation of the configuration file.) Then form the full paths to these - # directories. Finally, make sure that each of these directories actu- - # ally exists. + # Set the base directories in which codes obtained from external + # repositories (using the manage_externals tool) are placed. Obtain the + # rela- tive paths to these directories by reading them in from the + # manage_externals configuration file. (Note that these are relative to the + # lo- cation of the configuration file.) Then form the full paths to these + # directories. Finally, make sure that each of these directories actually + # exists. # # ----------------------------------------------------------------------- # @@ -385,7 +381,7 @@ def get_location(xcs,fmt): Externals configuration file {mng_extrns_cfg_fn} does not contain "{external_name}".''') raise Exception(errmsg) from None - + UFS_WTHR_MDL_DIR = os.path.join(HOMEdir, UFS_WTHR_MDL_DIR) if not os.path.exists(UFS_WTHR_MDL_DIR): From b8fe83da7e16ea809f0652823598aa76043d8216 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 7 Nov 2022 16:09:31 +0000 Subject: [PATCH 02/19] WIP --- ush/config_defaults.yaml | 219 ++- ush/constants.yaml | 11 + ush/generate_FV3LAM_wflow.py | 23 +- ush/link_fix.py | 226 ++-- ush/python_utils/config_parser.py | 63 + ush/set_extrn_mdl_params.py | 49 - ush/set_gridparams_ESGgrid.py | 46 +- ush/set_gridparams_GFDLgrid.py | 39 +- ush/set_ozone_param.py | 139 +- ush/set_thompson_mp_fix_files.py | 49 +- ush/setup.py | 2071 ++++++++++------------------- 11 files changed, 1148 insertions(+), 1787 deletions(-) delete mode 100644 ush/set_extrn_mdl_params.py diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 2c5831a4df..6e933514ef 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -56,6 +56,19 @@ user: MACHINE: "BIG_COMPUTER" ACCOUNT: "" + HOMEdir: '{{ HOMEdir }}' + USHdir: '{{ USHdir }}' + SCRIPTSdir: '{{ [HOMEdir, "scripts"]|path_join }}' + JOBSdir: '{{ [HOMEdir, "jobs"]|path_join }}' + SORCdir: '{{ [HOMEdir, "sorc"]|path_join }}' + PARMdir: '{{ [HOMEdir, "parm"]|path_join }}' + MODULESdir: '{{ [HOMEdir, "modulefiles"]|path_join }}' + EXECdir: '{{ [HOMEdir, workflow.EXEC_SUBDIR]|path_join }}' + VX_CONFIG_DIR: '{{ [HOMEdir, "parm"]|path_join }}' + METPLUS_CONF: '{{ [PARMdir, "metplus"]|path_join }}' + MET_CONFIG: '{{ [PARMdir, "met"]|path_join }}' + UFS_WTHR_MDL_DIR: '{{ UFS_WTHR_MDL_DIR }}' + #---------------------------- # PLATFORM config parameters #----------------------------- @@ -154,10 +167,10 @@ platform: WORKFLOW_MANAGER: "" NCORES_PER_NODE: "" LMOD_PATH: "" - BUILD_MOD_FN: "" - WFLOW_MOD_FN: "" - BUILD_VER_FN: "" - RUN_VER_FN: "" + BUILD_MOD_FN: "build_{{ user.MAACHINE|lower() }}_{{ workflow.COMPILER }}" + WFLOW_MOD_FN: "wflow_{{ user.MACHINE|lower() }}" + BUILD_VER_FN: "build.ver.{{ user.MAACHINE|lower() }}" + RUN_VER_FN: "run.ver.{{ user.MAACHINE|lower() }}" SCHED: "" PARTITION_DEFAULT: "" QUEUE_DEFAULT: "" @@ -315,7 +328,7 @@ platform: # that will point to a subdirectory (having the name of the grid being # used) under this directory. This variable should be set to a null # string in this file, but it can be specified in the user-specified - # workflow configuration file (EXPT_CONFIG_FN). + : workflow configuration file (EXPT_CONFIG_FN). # #----------------------------------------------------------------------- # @@ -351,6 +364,25 @@ workflow: # #----------------------------------------------------------------------- # + # How to make links. Relative links by default. Empty string for + # absolute paths in links. + # + #----------------------------------------------------------------------- + # + RELATIVE_LINK_FLAG: "--relative" + # + #----------------------------------------------------------------------- + # + # CPL: + # Flag indicating whether another component should run in coupled mode + # with the forecast. + # + #----------------------------------------------------------------------- + # + CPL: "{{ eq(workflow.FCST_MODEL, 'fv3gfs_aqm', /) }}" + # + #----------------------------------------------------------------------- + # # Set cron-associated parameters. Definitions: # # USE_CRON_TO_RELAUNCH: @@ -367,6 +399,9 @@ workflow: # USE_CRON_TO_RELAUNCH: false CRON_RELAUNCH_INTVL_MNTS: 3 + CRONTAB_LINE: "" + LOAD_MODULES_RUN_TASK_FP: "{{ [user.USHdir, 'load_modules_run_task.sh']|path_join}}" + # #----------------------------------------------------------------------- # @@ -398,6 +433,7 @@ workflow: EXPT_BASEDIR: "" EXPT_SUBDIR: "" EXEC_SUBDIR: "exec" + EXPTDIR: "{{ [EXPT_BASEDIR, EXPT_SUBDIR]|path_join}}" # #----------------------------------------------------------------------- # @@ -448,9 +484,10 @@ workflow: # directory (EXECDIR; this is set during experiment generation). # # DIAG_TABLE_TMPL_FN: - # Name of a template file that specifies the output fields of the forecast - # model (ufs-weather-model: diag_table) followed by [dot_ccpp_phys_suite]. - # Its default value is the name of the file that the ufs weather model + # Name of a template file that specifies the output fields of the + # forecast model (ufs-weather-model: diag_table) followed by the name + # of the ccpp_phys_suite. Its default value is the name of the file + # that the ufs weather model # expects to read in. # # FIELD_TABLE_TMPL_FN: @@ -511,20 +548,77 @@ workflow: FV3_NML_BASE_SUITE_FN: "input.nml.FV3" FV3_NML_YAML_CONFIG_FN: "FV3.input.yml" FV3_NML_BASE_ENS_FN: "input.nml.base_ens" + FV3_NML_FN: "input.nml" FV3_EXEC_FN: "ufs_model" - DATA_TABLE_TMPL_FN: "" - DIAG_TABLE_TMPL_FN: "" - FIELD_TABLE_TMPL_FN: "" - MODEL_CONFIG_TMPL_FN: "" - NEMS_CONFIG_TMPL_FN: "" - + DATA_TABLE_FN: "data_table" + DIAG_TABLE_FN: "diag_table.{{ CCPP_PHYS_SUITE }}" + FIELD_TABLE_FN: "field_table.{{ CCPP_PHYS_SUITE }}" + MODEL_CONFIG_FN: "model_configure" + NEMS_CONFIG_FN: "nems.configure" + + FV3_NML_BASE_SUITE_FP: '{{ [user.PARMdir, FV3_NML_BASE_SUITE_FN]|path_join}}' + FV3_NML_YAML_CONFIG_FP: '{{ [user.PARMdir, FV3_NML_YAML_CONFIG_FN]|path_join}}' + FV3_NML_BASE_ENS_FP: '{{ [EXPTDIR, FV3_NML_BASE_ENS_FN]|path_join}}' + DATA_TABLE_TMPL_FP: '{{ [user.PARMdir, DATA_TABLE_FN]|path_join}}' + DIAG_TABLE_TMPL_FP: '{{ [user.PARMdir, DIAG_TABLE_FN]|path_join}}' + FIELD_TABLE_TMPL_FP: '{{ [user.PARMdir, FIELD_TABLE_FN]|path_join}}' + MODEL_CONFIG_TMPL_FP: '{{ [user.PARMdir, MODEL_CONFIG_FN]|path_join}}' + NEMS_CONFIG_TMPL_FP: '{{ [user.PARMdir, NEMS_CONFIG_FN]|path_join}}' + + # These are staged in the exptdir at configuration time + DATA_TABLE_FP: '{{ [EXPTDIR, DATA_TABLE_FN]|path_join}}' + FIELD_TABLE_FP: '{{ [EXPTDIR, FIELD_TABLE_FN]|path_join}}' + NEMS_CONFIG_FP: '{{ [EXPTDIR, NEMS_CONFIG_FN]|path_join}}' + FV3_NML_FP: '{{ [EXPTDIR, FV3_NML_FN]|path_join }}' + FCST_MODEL: "ufs-weather-model" WFLOW_XML_FN: "FV3LAM_wflow.xml" GLOBAL_VAR_DEFNS_FN: "var_defns.sh" EXTRN_MDL_VAR_DEFNS_FN: "extrn_mdl_var_defns" WFLOW_LAUNCH_SCRIPT_FN: "launch_FV3LAM_wflow.sh" WFLOW_LAUNCH_LOG_FN: "log.launch_FV3LAM_wflow" + + GLOBAL_VAR_DEFNS_FP: '{{ [EXPTDIR, GLOBAL_VAR_DEFNS_FN] |path_join}}' + WFLOW_LAUNCH_SCRIPT_FP: '{{ [user.USHdir, WFLOW_LAUNCH_SCRIPT_FN] |path_join}}' + WFLOW_LAUNCH_LOG_FP: '{{ [EXPTDIR, WFLOW_LAUNCH_LOG_FN] |path_join}}' + # + #----------------------------------------------------------------------- + # + # Set the fix file paths + # + # FIXdir: + # Location where fix files will be stored for a given experiment + # + # FIXam: + # Directory containing the fixed files (or symlinks) for various fields on + # global grids (which are usually much coarser than the native FV3-LAM grid). + # + # FIXclim: + # Directory containing the MERRA2 aerosol climatology data file and + # lookup tables for optics properties + # + # FIXlam: + # Directory containing the fixed files (or symlinks) for the grid, + # orography, and surface climatology on the native FV3-LAM grid. + # + # THOMPSON_MP_CLIMO_FN and _FP: + # Name and path of file that contains aerosol climatology data. It can + # be used to generate approximate versions of the aerosol fields + # needed by Thompson microphysics. This file will be used to + # generate such approximate aerosol fields in the ICs and LBCs if + # Thompson MP is included in the physics suite and if the exteranl + # model for ICs or LBCs does not already provide these fields. + # + #----------------------------------------------------------------------- + # + FIXdir: "{% EXPTDIR if workflow_switches.RUN_TASK_MAKE_GRID else path_join([user.HOMEdir, 'fix']) %}" + FIXam: "{{ [FIXdir, 'fix_am']|path_join}}" + FIXclim: "{{ [FIXdir, 'fix_clim']|path_join}}" + FIXlam: "{{ [FIXdir, 'fix_lam']|path_join}}" + + THOMPSON_MP_CLIMO_FN: "Thompson_MP_MONTHLY_CLIMO.nc" + THOMPSON_MP_CLIMO_FP: '{{ [FIXam, THOMPSON_MP_CLIMO_FN]|path_join}}' # #----------------------------------------------------------------------- # @@ -537,9 +631,24 @@ workflow: # the XML physics suite definition file that are staged in the experiment # directory or the cycle directories under it. # + # *_FN and *_FP variables set the name and paths to the suite + # definition files used for the experiment #----------------------------------------------------------------------- # CCPP_PHYS_SUITE: "FV3_GFS_v16" + CCPP_PHYS_SUITE_FN: "suite_{{ CCPP_PHYS_SUITE }}.xml" + CCPP_PHYS_SUITE_IN_CCPP_FP: "{{ [user.UFS_WTHR_MDL_DIR, 'FV3', 'ccpp', 'suites', CCPP_PHYS_SUITE_FN] |path_join}}" + CCPP_PHYS_SUITE_FP: "{{ [EXPTDIR, CCPP_PHYS_SUITE_FN]|path_join}}" + # + #----------------------------------------------------------------------- + # + # Set the field dictionary file name and paths. + # + #----------------------------------------------------------------------- + # + FIELD_DICT_FN: "fd_nems.yaml" + FIELD_DICT_IN_UWM_FP: "{{ [user.FIELD_DICT_IN_UWM_FP, 'tests', 'parm', FIELD_DICT_FN]|path_join}}" + FIELD_DICT_FP: "{{ [EXPTDIR, FIELD_DICT_FN]|path_join}}" # #----------------------------------------------------------------------- # @@ -701,6 +810,9 @@ nco: # # OPSROOT: # The operations root directory in NCO mode. + # + # LOGDIR: + # Directory in which the log files from the workflow tasks will be placed. # # For more information on NCO standards # @@ -712,7 +824,32 @@ nco: NET: "rrfs" RUN: "rrfs" model_ver: "v1.0.0" - OPSROOT: "" + OPSROOT: "{{ workflow.EXPT_BASEDIR }}/../nco_dirs" + COMROOT: "{{ OPSROOT }}/com" + PACKAGEROOT: "{{ OPSROOT }}/packages" + DATAROOT: "{{ OPSROOT }}/tmp" + DCOMROOT: "{{ OPSROOT }}/dcom" + LOGDIR: "{{ OPSROOT }}/output" + COMIN_BASEDIR: "{{ COMROOT }}/{{ NET }}/{{ model_ver }}" + COMOUT_BASEDIR: "{{ COMROOT }}/{{ NET }}/{{ model_ver }}" + + # + #----------------------------------------------------------------------- + # + # The following are also described in the NCO doc above + # + #----------------------------------------------------------------------- + # + DBNROOT: "" + SENDECF: false + SENDDBN: false + SENDDBN_NTC: false + SENDCOM: false + SENDWEB: false + KEEPDATA: true + MAILTO: "" + MAILCC: "" + #---------------------------- # WORKFLOW SWITCHES config parameters @@ -814,7 +951,7 @@ task_make_grid: # #----------------------------------------------------------------------- # - GRID_DIR: "" + GRID_DIR: "{{ [workflow.EXPTDIR, 'grid']|path_join}}" # #----------------------------------------------------------------------- # @@ -1095,7 +1232,7 @@ task_make_orog: KMP_AFFINITY_MAKE_OROG: "disabled" OMP_NUM_THREADS_MAKE_OROG: 6 OMP_STACKSIZE_MAKE_OROG: "2048m" - OROG_DIR: "" + OROG_DIR: "{{ [workflow.EXPTDIR, 'orog']|path_join}}" TOPO_DIR: "" #---------------------------- @@ -1110,7 +1247,7 @@ task_make_sfc_climo: KMP_AFFINITY_MAKE_SFC_CLIMO: "scatter" OMP_NUM_THREADS_MAKE_SFC_CLIMO: 1 OMP_STACKSIZE_MAKE_SFC_CLIMO: "1024m" - SFC_CLIMO_DIR: "" + SFC_CLIMO_DIR: "{{ [wokflow.EXPTDIR, 'sfc_climo']|path_join}}" SFC_CLIMO_INPUT_DIR: "" #---------------------------- @@ -1262,9 +1399,8 @@ task_get_extrn_lbcs: # the external model started than when the FV3 forecast configured here # should start. For example, the forecast should use lateral boundary # conditions from the GFS started 6 hours earlier, then - # EXTRN_MDL_LBCS_OFFSET_HRS=6. - # Note: the default value is model-dependent and set in - # set_extrn_mdl_params.sh + # EXTRN_MDL_LBCS_OFFSET_HRS=6. Defaults to 0 except for RAP, which + # uses a 3 hour offset. # # FV3GFS_FILE_FMT_LBCS: # If using the FV3GFS model as the source of the LBCs (i.e. if @@ -1275,7 +1411,7 @@ task_get_extrn_lbcs: # EXTRN_MDL_NAME_LBCS: "FV3GFS" LBC_SPEC_INTVL_HRS: 6 - EXTRN_MDL_LBCS_OFFSET_HRS: "" + EXTRN_MDL_LBCS_OFFSET_HRS: "{{ 3 if EXTRN_MDL_NAME_LBCS == 'RAP' else 0 }}" FV3GFS_FILE_FMT_LBCS: "nemsio" #----------------------------------------------------------------------- # @@ -1379,17 +1515,18 @@ task_make_lbcs: KMP_AFFINITY_MAKE_LBCS: "scatter" OMP_NUM_THREADS_MAKE_LBCS: 1 OMP_STACKSIZE_MAKE_LBCS: "1024m" - LBC_SPEC_FCST_HRS: [] + LBC_SPEC_FCST_HRS: "{% for h in range(task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS, task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS + workflow.FCST_LEN_HRS, task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS %}{{ h }}{% endfor %}" #---------------------------- # FORECAST config parameters #----------------------------- task_run_fcst: RUN_FCST_TN: "run_fcst" - NNODES_RUN_FCST: "" # This is calculated in the workflow generation scripts, so no need to set here. - PPN_RUN_FCST: "" # will be calculated from NCORES_PER_NODE and OMP_NUM_THREADS in setup.sh + NNODES_RUN_FCST: "{{ (PE_MEMBER01 + PPN_RUN_FCST - 1) // PPN_RUN_FCST }}" + PPN_RUN_FCST: "{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_RUN_FCST }}" WTIME_RUN_FCST: 04:30:00 MAXTRIES_RUN_FCST: 1 + FV3_EXEC_FP: '{{ [user.EXECdir, workflow.FV3_EXEC_FN]|path_join}}' # #----------------------------------------------------------------------- # @@ -1520,6 +1657,8 @@ task_run_fcst: # QUILTING: true PRINT_ESMF: false + + PE_MEMBER01: '{{ LAYOUT_Y * LAYOUT_X + WRTCMP_write_groups * WRTCMP_write_tasks_per_group if QUILTING else LAYOUT_Y * LAYOUT_X}}' WRTCMP_write_groups: 1 WRTCMP_write_tasks_per_group: 20 @@ -1580,7 +1719,7 @@ task_run_fcst: # #----------------------------------------------------------------------- # - PREDEF_GRID_NAME: "" + PREDEF_GRID_NAME: "" # #----------------------------------------------------------------------- # @@ -1589,7 +1728,7 @@ task_run_fcst: # #----------------------------------------------------------------------- # - USE_MERRA_CLIMO: false + USE_MERRA_CLIMO: "{{ eq(workflow.CCPP_PHYS_SUITE, 'FV3_GFS_v15_thompson_mynn_lam3km', /) }}" # #----------------------------------------------------------------------- # @@ -1849,7 +1988,7 @@ task_run_post: # USE_CUSTOM_POST_CONFIG_FILE: false CUSTOM_POST_CONFIG_FP: "" - POST_OUTPUT_DOMAIN_NAME: "" + POST_OUTPUT_DOMAIN_NAME: "{{ task_run_fcst.PREDEF_GRID_NAME }}" #---------------------------- # GET OBS CCPA config parameters @@ -2131,21 +2270,21 @@ global: # # NUM_ENS_MEMBERS: # The number of ensemble members to run if DO_ENSEMBLE is set to true. - # This variable also controls the naming of the ensemble member directories. - # For example, if this is set to "8", the member directories will be named - # mem1, mem2, ..., mem8. If it is set to "08" (note the leading zero), - # the member directories will be named mem01, mem02, ..., mem08. Note, - # however, that after reading in the number of characters in this string - # (in order to determine how many leading zeros, if any, should be placed - # in the names of the member directories), the workflow generation scripts - # strip away those leading zeros. Thus, in the variable definitions file - # (GLOBAL_VAR_DEFNS_FN), this variable appear with its leading zeros - # stripped. This variable is not used if DO_ENSEMBLE is not set to true. - # + # + # ENSMEM_NAMES: + # A list of names for the ensemble member names following the format + # mem001, mem002, etc. + # + # FV3_NML_ENSMEM_FPS: + # Paths to the ensemble member corresponding namelists in the + # experiment directory #----------------------------------------------------------------------- # DO_ENSEMBLE: false - NUM_ENS_MEMBERS: 1 + NUM_ENS_MEMBERS: 0 + ENSMEM_NAMES: "{% for m in range(NUM_ENS_MEMBERS) %}{{ 'mem%03d, ' % m }}{% endfor %}" + FV3_NML_ENSMEM_FPS: "{% for mem in ENSMEM_NAMES %}{{ [EXPTDIR, '%s_%s' % FV3_NML_FN, mem}}{% endfor %}" + # #----------------------------------------------------------------------- # diff --git a/ush/constants.yaml b/ush/constants.yaml index cd4867a728..c7ee96005e 100644 --- a/ush/constants.yaml +++ b/ush/constants.yaml @@ -30,6 +30,15 @@ constants: # the boundary of the domain. # # Note that the regional grid is referred to as "tile 7" in the code. + # Therefore, we will set the constants to a regional, tile 7 with + # these config options: + # + # GTYPE: + # Grid type string, set to regional for SRW + # + # TILE_RGNL: 7 + # Tile number set ot 7 for a regional grid in SRW + # # We will let: # # * NH0 denote the width (in units of number of cells on tile 7) of @@ -45,6 +54,8 @@ constants: # #----------------------------------------------------------------------- # + GTYPE: regional + TILE_RGNL: 7 NH0: 0 NH3: 3 NH4: 4 diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 4faed8cc33..b87ccdd19f 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -100,10 +100,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = 'log.generate_FV3LAM_wflow') -> # The setup function reads the user configuration file and fills in # non-user-specified values from config_defaults.yaml - setup(USHdir) - - # import all environment variables - import_vars() + expt_config = setup(USHdir) # # ----------------------------------------------------------------------- @@ -114,8 +111,11 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = 'log.generate_FV3LAM_wflow') -> # # ----------------------------------------------------------------------- # - WFLOW_XML_FP = os.path.join(EXPTDIR, WFLOW_XML_FN) - + wflow_xml_fn = expt_config['workflow']['WFLOW_XML_FN'] + wflow_xml_fp = os.path.join( + expt_config['workflow']['EXPTDIR'], + wflow_xml_fn, + ) # # ----------------------------------------------------------------------- # @@ -127,22 +127,25 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = 'log.generate_FV3LAM_wflow') -> # # ----------------------------------------------------------------------- # - if WORKFLOW_MANAGER == "rocoto": + if expt_config['platform']['WORKFLOW_MANAGER'] == "rocoto": - template_xml_fp = os.path.join(PARMdir, WFLOW_XML_FN) + template_xml_fp = os.path.join( + expt_config['user']['PARMdir'], + wflow_xml_fn, + ) log_info( f''' Creating rocoto workflow XML file (WFLOW_XML_FP) from jinja template XML file (template_xml_fp): template_xml_fp = \"{template_xml_fp}\" - WFLOW_XML_FP = \"{WFLOW_XML_FP}\"''' + WFLOW_XML_FP = \"{wflow_xml_fp}\"''' ) ensmem_indx_name = "" uscore_ensmem_name = "" slash_ensmem_subdir = "" - if DO_ENSEMBLE: + if expt_config['global']['DO_ENSEMBLE']: ensmem_indx_name = "mem" uscore_ensmem_name = f"_mem#{ensmem_indx_name}#" slash_ensmem_subdir = f"/mem#{ensmem_indx_name}#" diff --git a/ush/link_fix.py b/ush/link_fix.py index 752fb7743f..89d7a526a6 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -4,6 +4,7 @@ import os import sys import argparse +import re import glob from python_utils import ( @@ -23,11 +24,34 @@ ) -def link_fix(verbose, file_group): - """This file defines a function that ... +def link_fix(verbose, + file_group, + source_dir, + target_dir, + ccpp_phys_suite, + constants, + dot_or_underscore, + nhw, + run_task, + sfc_climo_fields, + **kwargs): + """This file defines a function that links fix files to the target + directory for a given SRW experiment. Only links files for one group + at a time. + Args: verbose: True or False file_group: could be on of ["grid", "orog", "sfc_climo"] + source_dir: the path to directory where the file_group fix files + are linked from + target_dir: the directory where the fix files should be linked to + dot_or_underscore: str containing either a dot or an underscore + nhw: grid parameter setting + constants: dict containing the constants used by SRW + run_task: boolean value indicating whether the task is to be run + in the experiment + climo_fields: list of fields needed for climo + Returns: a string: resolution """ @@ -37,15 +61,18 @@ def link_fix(verbose, file_group): valid_vals_file_group = ["grid", "orog", "sfc_climo"] check_var_valid_value(file_group, valid_vals_file_group) - # import all environement variables - import_vars() + # Decompress the constants needed below. + nh0 = constants['NH0'] + nh3 = constants['NH3'] + nh4 = constants['NH4'] + tile_rgnl = constants['TILE_RGNL'] # # ----------------------------------------------------------------------- # - # Create symlinks in the FIXlam directory pointing to the grid files. - # These symlinks are needed by the make_orog, make_sfc_climo, make_ic, - # make_lbc, and/or run_fcst tasks. + # Create symlinks in the target_dir pointing to the fix files. + # These symlinks are needed by the make_orog, make_sfc_climo, + # make_ic, make_lbc, and/or run_fcst tasks. # # Note that we check that each target file exists before attempting to # create symlinks. This is because the "ln" command will create sym- @@ -55,7 +82,7 @@ def link_fix(verbose, file_group): # ----------------------------------------------------------------------- # print_info_msg( - f"Creating links in the FIXlam directory to the grid files...", verbose=verbose + f"Creating links in the {target_dir} directory to the grid files...", verbose=verbose ) # # ----------------------------------------------------------------------- @@ -166,44 +193,37 @@ def link_fix(verbose, file_group): # if file_group == "grid": fns = [ - f"C*{DOT_OR_USCORE}mosaic.halo{NHW}.nc", - f"C*{DOT_OR_USCORE}mosaic.halo{NH4}.nc", - f"C*{DOT_OR_USCORE}mosaic.halo{NH3}.nc", - f"C*{DOT_OR_USCORE}grid.tile{TILE_RGNL}.halo{NHW}.nc", - f"C*{DOT_OR_USCORE}grid.tile{TILE_RGNL}.halo{NH3}.nc", - f"C*{DOT_OR_USCORE}grid.tile{TILE_RGNL}.halo{NH4}.nc", + f"C*{dot_or_uscore}mosaic.halo{nhw}.nc", + f"C*{dot_or_uscore}mosaic.halo{nh4}.nc", + f"C*{dot_or_uscore}mosaic.halo{nh3}.nc", + f"C*{dot_or_uscore}grid.tile{tile_rgnl}.halo{nhw}.nc", + f"C*{dot_or_uscore}grid.tile{tile_rgnl}.halo{nh3}.nc", + f"C*{dot_or_uscore}grid.tile{tile_rgnl}.halo{nh4}.nc", ] - fps = [os.path.join(GRID_DIR, itm) for itm in fns] - run_task = f"{RUN_TASK_MAKE_GRID}" - # + elif file_group == "orog": fns = [ - f"C*{DOT_OR_USCORE}oro_data.tile{TILE_RGNL}.halo{NH0}.nc", - f"C*{DOT_OR_USCORE}oro_data.tile{TILE_RGNL}.halo{NH4}.nc", + f"C*{dot_or_uscore}oro_data.tile{tile_rgnl}.halo{nh0}.nc", + f"C*{dot_or_uscore}oro_data.tile{tile_rgnl}.halo{nh4}.nc", ] - if CCPP_PHYS_SUITE == "FV3_HRRR": + if ccpp_phys_suite == "FV3_HRRR": fns += [ - f"C*{DOT_OR_USCORE}oro_data_ss.tile{TILE_RGNL}.halo{NH0}.nc", - f"C*{DOT_OR_USCORE}oro_data_ls.tile{TILE_RGNL}.halo{NH0}.nc", + f"C*{dot_or_uscore}oro_data_ss.tile{tile_rgnl}.halo{nh0}.nc", + f"C*{dot_or_uscore}oro_data_ls.tile{tile_rgnl}.halo{nh0}.nc", ] - fps = [os.path.join(OROG_DIR, itm) for itm in fns] - run_task = f"{RUN_TASK_MAKE_OROG}" + # # The following list of symlinks (which have the same names as their - # target files) need to be created made in order for the make_ics and - # make_lbcs tasks (i.e. tasks involving chgres_cube) to work. + # target files) need to be created for the make_ics and make_lbcs + # tasks (i.e. tasks involving chgres_cube) to work. # elif file_group == "sfc_climo": - num_fields = len(SFC_CLIMO_FIELDS) - fns = [None] * (2 * num_fields) - for i in range(num_fields): - ii = 2 * i - fns[ii] = f"C*.{SFC_CLIMO_FIELDS[i]}.tile{TILE_RGNL}.halo{NH0}.nc" - fns[ii + 1] = f"C*.{SFC_CLIMO_FIELDS[i]}.tile{TILE_RGNL}.halo{NH4}.nc" - fps = [os.path.join(SFC_CLIMO_DIR, itm) for itm in fns] - run_task = f"{RUN_TASK_MAKE_SFC_CLIMO}" - # + fns = [] + for sfc_climo_field in sfc_climo_fields: + fns.append(f"C*.{sfc_climo_field}.tile{tile_rgnl}.halo{nh0}.nc") + fns.append(f"C*.{sfc_climo_field}.tile{tile_rgnl}.halo{nh4}.nc") + fps = [os.path.join(source_dir, itm) for itm in fns] # # ----------------------------------------------------------------------- # @@ -270,31 +290,25 @@ def link_fix(verbose, file_group): # # ----------------------------------------------------------------------- # - SAVE_DIR = os.getcwd() - cd_vrfy(FIXlam) + save_dir = os.getcwd() + cd_vrfy(target_dir) # # ----------------------------------------------------------------------- # # Use the set of full file paths generated above as the link targets to - # create symlinks to these files in the FIXlam directory. + # create symlinks to these files in the target directory. # # ----------------------------------------------------------------------- # - # If the task in consideration (which will be one of the pre-processing - # tasks MAKE_GRID_TN, MAKE_OROG_TN, and MAKE_SFC_CLIMO_TN) was run, then - # the target files will be located under the experiment directory. In - # this case, we use relative symlinks in order the experiment directory - # more portable and the symlinks more readable. However, if the task - # was not run, then pregenerated grid, orography, or surface climatology - # files will be used, and those will be located in an arbitrary directory - # (specified by the user) that is somwehere outside the experiment - # directory. Thus, in this case, there isn't really an advantage to using - # relative symlinks, so we use symlinks with absolute paths. + # If the task in consideration (one of the pre-processing tasks + # MAKE_GRID_TN, MAKE_OROG_TN, and MAKE_SFC_CLIMO_TN) was run, then + # the source location of the fix files will be located under the + # experiment directory. In this case, we use relative symlinks for + # portability and readability. Make absolute links otherwise. # + relative_link_flag = False if run_task: relative_link_flag = True - else: - relative_link_flag = False for fp in fps: fn = os.path.basename(fp) @@ -319,55 +333,36 @@ def link_fix(verbose, file_group): # ----------------------------------------------------------------------- # if file_group == "grid": - target = f"{cres}{DOT_OR_USCORE}grid.tile{TILE_RGNL}.halo{NH4}.nc" - symlink = f"{cres}{DOT_OR_USCORE}grid.tile{TILE_RGNL}.nc" + target = f"{cres}{dot_or_uscore}grid.tile{tile_rgnl}.halo{nh4}.nc" + symlink = f"{cres}{dot_or_uscore}grid.tile{tile_rgnl}.nc" create_symlink_to_file(target, symlink, True) # # ----------------------------------------------------------------------- # # If considering surface climatology files, create symlinks to the surface # climatology files that do not contain the halo size in their names. - # These are needed by the task that generates the initial condition files. + # These are needed by the make_ics task. + # + # The forecat model needs sfc climo files to be named without the + # tile7 and halo references, and with only "tile1" in the name. # # ----------------------------------------------------------------------- # if file_group == "sfc_climo": - tmp = [f"{cres}.{itm}" for itm in SFC_CLIMO_FIELDS] - fns_sfc_climo_with_halo_in_fn = [ - f"{itm}.tile{TILE_RGNL}.halo{NH4}.nc" for itm in tmp - ] - fns_sfc_climo_no_halo_in_fn = [f"{itm}.tile{TILE_RGNL}.nc" for itm in tmp] - - for i in range(num_fields): - target = f"{fns_sfc_climo_with_halo_in_fn[i]}" - symlink = f"{fns_sfc_climo_no_halo_in_fn[i]}" - create_symlink_to_file(target, symlink, True) - # - # In order to be able to specify the surface climatology file names in - # the forecast model's namelist file, in the FIXlam directory a symlink - # must be created for each surface climatology field that has "tile1" in - # its name (and no "halo") and which points to the corresponding "tile7.halo0" - # file. - # - tmp = [f"{cres}.{itm}" for itm in SFC_CLIMO_FIELDS] - fns_sfc_climo_tile7_halo0_in_fn = [ - f"{itm}.tile{TILE_RGNL}.halo{NH0}.nc" for itm in tmp - ] - fns_sfc_climo_tile1_no_halo_in_fn = [f"{itm}.tile1.nc" for itm in tmp] + for field in sfc_climo_fields: + + # Create links without "halo" in the name + halo = f"{cres}.{field}.tile{tile_rgnl}.halo{nh4}.nc" + no_halo = re.sub(f".halo{nh4}", "", halo) + create_symlink_to_file(halo, no_halo, True) + + # Create links without halo and tile7, and with "tile1" + halo_tile = f"{cres}.{field}.tile{tile_rgnl}.halo{nh0}.nc" + no_halo_tile = re.sub(f"tile{tile_rgnl}.halo{nh0}", "tile1", True) - for i in range(num_fields): - target = f"{fns_sfc_climo_tile7_halo0_in_fn[i]}" - symlink = f"{fns_sfc_climo_tile1_no_halo_in_fn[i]}" - create_symlink_to_file(target, symlink, True) - # - # ----------------------------------------------------------------------- - # # Change directory back to original one. - # - # ----------------------------------------------------------------------- - # - cd_vrfy(SAVE_DIR) + cd_vrfy(save_dir) return res @@ -389,7 +384,7 @@ def parse_args(argv): parser.add_argument( "-p", "--path-to-defns", - dest="path_to_defns", + dest="path_to_defns" required=True, help="Path to var_defns file.", ) @@ -400,32 +395,49 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) cfg = load_shell_config(args.path_to_defns) - cfg = flatten_dict(cfg) - import_vars(dictionary=cfg) - link_fix(VERBOSE, args.file_group) + link_fix(verbose=cfg['workflow']['VERBOSE'], + file_group=args.file_group, + source_dir=cfg['task_make_{args.file_group.upper()}'][f"{args.file_group}_DIR"], + target_dir=cfg['workflow']['FIXlam'], + ccpp_phys_suite=cfg['workflow']['CCPP_PHYS_SUITE'], + constants=cfg['constants'] + dot_or_underscore=cfg['workflow']['DOT_OR_USCORE'], + nhw=cfg['grid_params']['NHW'], + run_task=True, + sfc_climo_fields=cfg['task_run_fcst']['SFC_CLIMO_FIELDS'], + ) class Testing(unittest.TestCase): def test_link_fix(self): - res = link_fix(verbose=True, file_group="grid") + res = link_fix(verbose=True, + file_group="grid", + source_dir=self.task_dir, + target_dir=self.FIXlam, + ccpp_phys_suite=self.cfg['CCPP_PHYS_SUITE'], + constants=self.cfg['constants'] + dot_or_underscore=self.cfg['DOT_OR_USCORE'], + nhw=self.cfg['NHW'], + run_task=False, + sfc_climo_fields=['foo', 'bar'] + ) self.assertTrue(res == "3357") def setUp(self): define_macos_utilities() TEST_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "test_data") - FIXlam = os.path.join(TEST_DIR, "expt", "fix_lam") - mkdir_vrfy("-p", FIXlam) - set_env_var("FIXlam", FIXlam) - set_env_var("DOT_OR_USCORE", "_") - set_env_var("TILE_RGNL", 7) - set_env_var("NH0", 0) - set_env_var("NHW", 6) - set_env_var("NH4", 4) - set_env_var("NH3", 3) - set_env_var("GRID_DIR", TEST_DIR + os.sep + "RRFS_CONUS_3km") - set_env_var("RUN_TASK_MAKE_GRID", "FALSE") - set_env_var("OROG_DIR", TEST_DIR + os.sep + "RRFS_CONUS_3km") - set_env_var("RUN_TASK_MAKE_OROG", "FALSE") - set_env_var("SFC_CLIMO_DIR", TEST_DIR + os.sep + "RRFS_CONUS_3km") - set_env_var("RUN_TASK_MAKE_SFC_CLIMO", "FALSE") - set_env_var("CCPP_PHYS_SUITE", "FV3_GSD_SAR") + self.FIXlam = os.path.join(TEST_DIR, "expt", "fix_lam") + self.task_dir = os.path.join(TEST_DIR, "RRFS_CONUS_3km") + mkdir_vrfy("-p", self.FIXlam) + + self.cfg = { + "DOT_OR_USCORE": "_", + "NHW": 6, + "CCPP_PHYS_SUITE": "FV3_GSD_SAR", + "constants": { + "NH0": 0, + "NH4": 4, + "NH3": 3, + "TILE_RGNL": 7, + }, + } diff --git a/ush/python_utils/config_parser.py b/ush/python_utils/config_parser.py index 1d5f26db71..046c305af8 100644 --- a/ush/python_utils/config_parser.py +++ b/ush/python_utils/config_parser.py @@ -88,6 +88,69 @@ def join_str(loader, node): except NameError: pass +def extend_yaml(yaml_dict, full_dict=None): + + ''' + Updates yaml_dict inplace by rendering any existing Jinja2 templates + that exist in a value. + ''' + + if full_dict is None: + full_dict = yaml_dict + + if not isinstance(yaml_dict, dict): + return + + for k, v in yaml_dict.items(): + + if isinstance(v, dict): + extend_yaml(v, full_dict) + else: + + # Save a bit of compute and only do this part for strings that + # contain the jinja double brackets. + v_str = str(v.text) if isinstance(v, ET.Element) else str(v) + is_a_template = any((ele for ele in ['{{', '{%'] if ele in v_str)) + if is_a_template: + + # Find expressions first, and process them as a single template + # if they exist + # Find individual double curly brace template in the string + # otherwise. We need one substitution template at a time so that + # we can opt to leave some un-filled when they are not yet set. + # For example, we can save cycle-dependent templates to fill in + # at run time. + if '{%' in v: + templates = [v_str] + else: + templates = re.findall(r'{{[^}]*}}|\S', v_str) + data = [] + for template in templates: + j2env = jinja2.Environment(loader=jinja2.BaseLoader, + undefined=jinja2.StrictUndefined) + j2env.filters['path_join'] = path_join + j2tmpl = j2env.from_string(template) + try: + # Fill in a template that has the appropriate variables + # set. + template = j2tmpl.render(env=os.environ, **full_dict) + except jinja2.exceptions.UndefinedError as e: + # Leave a templated field as-is in the resulting dict + print(f'Error: {e}') + print(f'Preserved template: {k}: {template}') + for a, b in full_dict.items(): + print(f' {a}: {b}') + + data.append(template) + + if isinstance(v, ET.Element): + v.text = ''.join(data) + else: + # Put the full template line back together as it was, + # filled or not + yaml_dict[k] = ''.join(data) + + ########## # JSON ########## diff --git a/ush/set_extrn_mdl_params.py b/ush/set_extrn_mdl_params.py deleted file mode 100644 index 7d52055031..0000000000 --- a/ush/set_extrn_mdl_params.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python3 - -import unittest - -from python_utils import import_vars, export_vars, set_env_var, get_env_var - - -def set_extrn_mdl_params(): - """Sets parameters associated with the external model used for initial - conditions (ICs) and lateral boundary conditions (LBCs). - Args: - None - Returns: - None - """ - - # import all env variables - import_vars() - - global EXTRN_MDL_LBCS_OFFSET_HRS - - # - # ----------------------------------------------------------------------- - # - # Set EXTRN_MDL_LBCS_OFFSET_HRS, which is the number of hours to shift - # the starting time of the external model that provides lateral boundary - # conditions. - # - # ----------------------------------------------------------------------- - # - if EXTRN_MDL_NAME_LBCS == "RAP": - EXTRN_MDL_LBCS_OFFSET_HRS = EXTRN_MDL_LBCS_OFFSET_HRS or "3" - else: - EXTRN_MDL_LBCS_OFFSET_HRS = EXTRN_MDL_LBCS_OFFSET_HRS or "0" - - # export values we set above - env_vars = ["EXTRN_MDL_LBCS_OFFSET_HRS"] - export_vars(env_vars=env_vars) - - -class Testing(unittest.TestCase): - def test_extrn_mdl_params(self): - set_extrn_mdl_params() - EXTRN_MDL_LBCS_OFFSET_HRS = get_env_var("EXTRN_MDL_LBCS_OFFSET_HRS") - self.assertEqual(EXTRN_MDL_LBCS_OFFSET_HRS, 3) - - def setUp(self): - set_env_var("EXTRN_MDL_NAME_LBCS", "RAP") - set_env_var("EXTRN_MDL_LBCS_OFFSET_HRS", None) diff --git a/ush/set_gridparams_ESGgrid.py b/ush/set_gridparams_ESGgrid.py index ce8cd02ffe..02e6e5b542 100644 --- a/ush/set_gridparams_ESGgrid.py +++ b/ush/set_gridparams_ESGgrid.py @@ -13,7 +13,8 @@ ) -def set_gridparams_ESGgrid(lon_ctr, lat_ctr, nx, ny, halo_width, delx, dely, pazi): +def set_gridparams_ESGgrid(lon_ctr, lat_ctr, nx, ny, halo_width, delx, + dely, pazi, constants): """Sets the parameters for a grid that is to be generated using the "ESGgrid" grid generation method (i.e. GRID_GEN_METHOD set to "ESGgrid"). @@ -26,6 +27,7 @@ def set_gridparams_ESGgrid(lon_ctr, lat_ctr, nx, ny, halo_width, delx, dely, paz delx dely pazi + constants: dictionary of SRW constants Returns: Tuple of inputs, and 4 outputs (see return statement) """ @@ -33,10 +35,8 @@ def set_gridparams_ESGgrid(lon_ctr, lat_ctr, nx, ny, halo_width, delx, dely, paz print_input_args(locals()) # get constants - IMPORTS = ["RADIUS_EARTH", "DEGS_PER_RADIAN"] - USHdir = os.path.dirname(os.path.abspath(__file__)) - constants_cfg = load_config_file(os.path.join(USHdir,"constants.yaml")) - import_vars(dictionary=flatten_dict(constants_cfg), env_vars=IMPORTS) + RADIUS_EARTH = constants['RADIUS_EARTH'] + DEGS_PER_RADIAN = constants['DEGS_PER_RADIAN'] # # ----------------------------------------------------------------------- @@ -60,28 +60,6 @@ def set_gridparams_ESGgrid(lon_ctr, lat_ctr, nx, ny, halo_width, delx, dely, paz # It turns out that the program will work if we set stretch_factor to a # value that is not exactly 1. This is what we do below. # - # ----------------------------------------------------------------------- - # - stretch_factor = 0.999 # Check whether the orography program has been fixed so that we can set this to 1... - # - # ----------------------------------------------------------------------- - # - # Set parameters needed as inputs to the regional_grid grid generation - # code. - # - # ----------------------------------------------------------------------- - # - del_angle_x_sg = (delx / (2.0 * RADIUS_EARTH)) * DEGS_PER_RADIAN - del_angle_y_sg = (dely / (2.0 * RADIUS_EARTH)) * DEGS_PER_RADIAN - neg_nx_of_dom_with_wide_halo = -(nx + 2 * halo_width) - neg_ny_of_dom_with_wide_halo = -(ny + 2 * halo_width) - # - # ----------------------------------------------------------------------- - # - # return output variables. - # - # ----------------------------------------------------------------------- - # return { "LON_CTR": lon_ctr, "LAT_CTR": lat_ctr, @@ -89,11 +67,11 @@ def set_gridparams_ESGgrid(lon_ctr, lat_ctr, nx, ny, halo_width, delx, dely, paz "NY": ny, "PAZI": pazi, "NHW": halo_width, - "STRETCH_FAC": stretch_factor, - "DEL_ANGLE_X_SG": del_angle_x_sg, - "DEL_ANGLE_Y_SG": del_angle_y_sg, - "NEG_NX_OF_DOM_WITH_WIDE_HALO": int(neg_nx_of_dom_with_wide_halo), - "NEG_NY_OF_DOM_WITH_WIDE_HALO": int(neg_ny_of_dom_with_wide_halo), + "STRETCH_FAC": 0.999, + "DEL_ANGLE_X_SG": (delx / (2.0 * RADIUS_EARTH)) * DEGS_PER_RADIAN, + "DEL_ANGLE_Y_SG": (dely / (2.0 * RADIUS_EARTH)) * DEGS_PER_RADIAN, + "NEG_NX_OF_DOM_WITH_WIDE_HALO": int(-(nx + 2 * halo_width)), + "NEG_NY_OF_DOM_WITH_WIDE_HALO": int(-(ny + 2 * halo_width)), } @@ -109,6 +87,10 @@ def test_set_gridparams_ESGgrid(self): halo_width=6, delx=3000.0, dely=3000.0, + constants=dict( + RADIUS_EARTH=6371200.0, + DEGS_PER_RADIAN=57.2957795131, + ) ) self.assertEqual( diff --git a/ush/set_gridparams_GFDLgrid.py b/ush/set_gridparams_GFDLgrid.py index 47253a1c73..838c2f550e 100644 --- a/ush/set_gridparams_GFDLgrid.py +++ b/ush/set_gridparams_GFDLgrid.py @@ -38,6 +38,9 @@ def set_gridparams_GFDLgrid( iend_of_t7_on_t6g, jstart_of_t7_on_t6g, jend_of_t7_on_t6g, + verbose, + nh4, + run_envir, ): """Sets the parameters for a grid that is to be generated using the "GFDLgrid" grid generation method (i.e. GRID_GEN_METHOD set to "ESGgrid"). @@ -51,21 +54,16 @@ def set_gridparams_GFDLgrid( istart_of_t7_on_t6g iend_of_t7_on_t6g jstart_of_t7_on_t6g - jend_of_t7_on_t6g): + jend_of_t7_on_t6g + verbose + nh4 + run_envir Returns: Tuple of inputs and outputs (see return statement) """ print_input_args(locals()) - # get needed environment variables - IMPORTS = ["VERBOSE", "RUN_ENVIR"] - import_vars(env_vars=IMPORTS) - IMPORTS = ["NH4"] - USHdir = os.path.dirname(os.path.abspath(__file__)) - constants_cfg = load_config_file(os.path.join(USHdir,"constants.yaml")) - import_vars(dictionary=flatten_dict(constants_cfg), env_vars=IMPORTS) - # # ----------------------------------------------------------------------- # @@ -94,7 +92,7 @@ def set_gridparams_GFDLgrid( # This if-statement can hopefully be removed once EMC agrees to make their # GFDLgrid type grids (tile 7) symmetric about tile 6. - if RUN_ENVIR != "nco": + if run_envir != "nco": if num_left_margin_cells_on_t6g != num_right_margin_cells_on_t6g: print_err_msg_exit( f""" @@ -120,7 +118,7 @@ def set_gridparams_GFDLgrid( # This if-statement can hopefully be removed once EMC agrees to make their # GFDLgrid type grids (tile 7) symmetric about tile 6. - if RUN_ENVIR != "nco": + if run_envir != "nco": if num_bot_margin_cells_on_t6g != num_top_margin_cells_on_t6g: print_err_msg_exit( f""" @@ -259,7 +257,7 @@ def set_gridparams_GFDLgrid( # # ----------------------------------------------------------------------- # - halo_width_on_t7g = NH4 + 1 + halo_width_on_t7g = nh4 + 1 halo_width_on_t6sg = ( 2 * halo_width_on_t7g + refine_ratio_t6g_to_t7g - 1 ) / refine_ratio_t6g_to_t7g @@ -335,7 +333,7 @@ def set_gridparams_GFDLgrid( tile 7 grid are: halo_width_on_t6sg = {halo_width_on_t6sg} halo_width_on_t7g = {halo_width_on_t7g}""", - verbose=VERBOSE, + verbose=verbose, ) halo_width_on_t6sg = istart_of_t7_on_t6sg - istart_of_t7_with_halo_on_t6sg @@ -348,7 +346,7 @@ def set_gridparams_GFDLgrid( AFTER adjustments are: halo_width_on_t6sg = {halo_width_on_t6sg} halo_width_on_t7g = {halo_width_on_t7g}""", - verbose=VERBOSE, + verbose=verbose, ) # # ----------------------------------------------------------------------- @@ -421,7 +419,7 @@ def set_gridparams_GFDLgrid( determining an MPI task layout): prime_factors_nx_of_t7_on_t7g: {prime_factors_nx_of_t7_on_t7g} prime_factors_ny_of_t7_on_t7g: {prime_factors_ny_of_t7_on_t7g}""", - verbose=VERBOSE, + verbose=verbose, ) # # ----------------------------------------------------------------------- @@ -450,7 +448,7 @@ def set_gridparams_GFDLgrid( nx_of_t7_with_halo_on_t7g = {nx_of_t7_with_halo_on_t7g} (istart_of_t7_with_halo_on_t6sg = {istart_of_t7_with_halo_on_t6sg}, iend_of_t7_with_halo_on_t6sg = {iend_of_t7_with_halo_on_t6sg})""", - verbose=VERBOSE, + verbose=verbose, ) print_info_msg( @@ -458,7 +456,7 @@ def set_gridparams_GFDLgrid( ny_of_t7_with_halo_on_t7g = {ny_of_t7_with_halo_on_t7g} (jstart_of_t7_with_halo_on_t6sg = {jstart_of_t7_with_halo_on_t6sg}, jend_of_t7_with_halo_on_t6sg = {jend_of_t7_with_halo_on_t6sg})""", - verbose=VERBOSE, + verbose=verbose, ) # # ----------------------------------------------------------------------- @@ -493,6 +491,9 @@ def test_set_gridparams_GFDLgrid(self): iend_of_t7_on_t6g=84, jstart_of_t7_on_t6g=17, jend_of_t7_on_t6g=80, + run_env="community", + verbose=True, + nh4=4, ) self.assertEqual( @@ -501,6 +502,4 @@ def test_set_gridparams_GFDLgrid(self): ) def setUp(self): - set_env_var("DEBUG", True) - set_env_var("VERBOSE", True) - set_env_var("NH4", 4) + pass diff --git a/ush/set_ozone_param.py b/ush/set_ozone_param.py index 5ed4449fe1..e9a0298924 100644 --- a/ush/set_ozone_param.py +++ b/ush/set_ozone_param.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 +import copy import os import unittest from textwrap import dedent @@ -17,7 +18,7 @@ find_pattern_in_str, ) -def set_ozone_param(ccpp_phys_suite_fp): +def set_ozone_param(ccpp_phys_suite_fp, link_mappings): """Function that does the following: (1) Determines the ozone parameterization being used by checking in the CCPP physics suite XML. @@ -25,31 +26,24 @@ def set_ozone_param(ccpp_phys_suite_fp): (2) Sets the name of the global ozone production/loss file in the FIXgsm FIXgsm system directory to copy to the experiment's FIXam directory. - (3) Resets the last element of the workflow array variable - FIXgsm_FILES_TO_COPY_TO_FIXam that contains the files to copy from - FIXgsm to FIXam (this last element is initially set to a dummy - value) to the name of the ozone production/loss file set in the - previous step. - - (4) Resets the element of the workflow array variable - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING (this array contains the - mapping between the symlinks to create in any cycle directory and - the files in the FIXam directory that are their targets) that - specifies the mapping for the ozone symlink/file such that the - target FIXam file name is set to the name of the ozone production/ - loss file set above. + (3) Updates the symlink for the ozone file provided in link_mappings + list to include the name of global ozone production/loss file. Args: ccpp_phys_suite_fp: full path to CCPP physics suite + link_mappings: list of mappings between symlinks and their + target files for this experiment Returns: ozone_param: a string + fixgsm_ozone_fn: a path to a fix file that should be used with + this experiment + ozone_link_mappings: a list of mappings for the files needed for + this experiment + """ print_input_args(locals()) - # import all environment variables - import_vars() - # # ----------------------------------------------------------------------- # @@ -95,17 +89,6 @@ def set_ozone_param(ccpp_phys_suite_fp): # # ----------------------------------------------------------------------- # - # Set the last element of the array FIXgsm_FILES_TO_COPY_TO_FIXam to the - # name of the ozone production/loss file to copy from the FIXgsm to the - # FIXam directory. - # - # ----------------------------------------------------------------------- - # - i = len(FIXgsm_FILES_TO_COPY_TO_FIXam) - 1 - FIXgsm_FILES_TO_COPY_TO_FIXam[i] = f"{fixgsm_ozone_fn}" - # - # ----------------------------------------------------------------------- - # # Set the element in the array CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING that # specifies the mapping between the symlink for the ozone production/loss # file that must be created in each cycle directory and its target in the @@ -113,50 +96,30 @@ def set_ozone_param(ccpp_phys_suite_fp): # the target is not because it depends on the ozone parameterization that # the physics suite uses. Since we determined the ozone parameterization # above, we now set the target of the symlink accordingly. + + # + # ----------------------------------------------------------------------- + # + # Set the mapping between the symlink and the target file we just + # found. The link name is already in the list, but the target file + # is not. # # ----------------------------------------------------------------------- # + ozone_symlink = "global_o3prdlos.f77" fixgsm_ozone_fn_is_set = False - regex_search = "^[ ]*([^| ]*)[ ]*[|][ ]*([^| ]*)[ ]*$" - num_symlinks = len(CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING) - - for i in range(num_symlinks): - mapping = CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING[i] - symlink = find_pattern_in_str(regex_search, mapping) - if symlink is not None: - symlink = symlink[0] - if symlink == ozone_symlink: - regex_search = "^[ ]*([^| ]+[ ]*)[|][ ]*([^| ]*)[ ]*$" - mapping_ozone = find_pattern_in_str(regex_search, mapping)[0] - mapping_ozone = f"{mapping_ozone}| {fixgsm_ozone_fn}" - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING[i] = f"{mapping_ozone}" + + ozone_link_mappings = copy.deepcopy(link_mappings) + for i, mapping in enumerate(ozone_link_mappings): + symlink = mapping.split('|')[0] + if symlink.strip() == ozone_symlink: + ozone_link_mappings[i] = f"{symlink}| {fixgsm_ozone_fn}" fixgsm_ozone_fn_is_set = True break - # - # ----------------------------------------------------------------------- - # - # If fixgsm_ozone_fn_is_set is set to True, then the appropriate element - # of the array CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING was set successfully. - # In this case, print out the new version of this array. Otherwise, print - # out an error message and exit. - # - # ----------------------------------------------------------------------- - # - if fixgsm_ozone_fn_is_set: - log_info( - f""" - After setting the file name of the ozone production/loss file in the - FIXgsm directory (based on the ozone parameterization specified in the - CCPP suite definition file), the array specifying the mapping between - the symlinks that need to be created in the cycle directories and the - files in the FIXam directory is: - """, verbose=VERBOSE) - log_info(f""" - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = {list_to_str(CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING)} - """, verbose=VERBOSE, dedent_=False) - else: + # Make sure the list has been updated + if not fixgsm_ozone_fn_is_set: raise Exception( f''' @@ -166,10 +129,7 @@ def set_ozone_param(ccpp_phys_suite_fp): fixgsm_ozone_fn_is_set = \"{fixgsm_ozone_fn_is_set}\"''' ) - EXPORTS = ["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING", "FIXgsm_FILES_TO_COPY_TO_FIXam"] - export_vars(env_vars=EXPORTS) - - return ozone_param + return ozone_param, fixgsm_ozone_fn, ozone_link_mappings class Testing(unittest.TestCase): @@ -179,15 +139,14 @@ def test_set_ozone_param(self): "ozphys_2015", set_ozone_param( ccpp_phys_suite_fp=f"{USHdir}{os.sep}test_data{os.sep}suite_FV3_GSD_SAR.xml" + self.CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING, ), ) def setUp(self): define_macos_utilities() - set_env_var("DEBUG", True) - set_env_var("VERBOSE", True) - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = [ + self.CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = [ "aerosol.dat | global_climaeropac_global.txt", "co2historicaldata_2010.txt | fix_co2_proj/global_co2historicaldata_2010.txt", "co2historicaldata_2011.txt | fix_co2_proj/global_co2historicaldata_2011.txt", @@ -209,41 +168,3 @@ def setUp(self): "solarconstant_noaa_an.txt | global_solarconstant_noaa_an.txt", "global_o3prdlos.f77 | ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77", ] - FIXgsm_FILES_TO_COPY_TO_FIXam = [ - "global_glacier.2x2.grb", - "global_maxice.2x2.grb", - "RTGSST.1982.2012.monthly.clim.grb", - "global_snoclim.1.875.grb", - "CFSR.SEAICE.1982.2012.monthly.clim.grb", - "global_soilmgldas.t126.384.190.grb", - "seaice_newland.grb", - "global_climaeropac_global.txt", - "fix_co2_proj/global_co2historicaldata_2010.txt", - "fix_co2_proj/global_co2historicaldata_2011.txt", - "fix_co2_proj/global_co2historicaldata_2012.txt", - "fix_co2_proj/global_co2historicaldata_2013.txt", - "fix_co2_proj/global_co2historicaldata_2014.txt", - "fix_co2_proj/global_co2historicaldata_2015.txt", - "fix_co2_proj/global_co2historicaldata_2016.txt", - "fix_co2_proj/global_co2historicaldata_2017.txt", - "fix_co2_proj/global_co2historicaldata_2018.txt", - "fix_co2_proj/global_co2historicaldata_2019.txt", - "fix_co2_proj/global_co2historicaldata_2020.txt", - "fix_co2_proj/global_co2historicaldata_2021.txt", - "global_co2historicaldata_glob.txt", - "co2monthlycyc.txt", - "global_h2o_pltc.f77", - "global_hyblev.l65.txt", - "global_zorclim.1x1.grb", - "global_sfc_emissivity_idx.txt", - "global_solarconstant_noaa_an.txt", - "geo_em.d01.lat-lon.2.5m.HGT_M.nc", - "HGT.Beljaars_filtered.lat-lon.30s_res.nc", - "ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77", - ] - - set_env_var( - "CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING", - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING, - ) - set_env_var("FIXgsm_FILES_TO_COPY_TO_FIXam", FIXgsm_FILES_TO_COPY_TO_FIXam) diff --git a/ush/set_thompson_mp_fix_files.py b/ush/set_thompson_mp_fix_files.py index 93dc3c5de6..69a2d6b204 100644 --- a/ush/set_thompson_mp_fix_files.py +++ b/ush/set_thompson_mp_fix_files.py @@ -17,7 +17,8 @@ ) -def set_thompson_mp_fix_files(ccpp_phys_suite_fp, thompson_mp_climo_fn): +def set_thompson_mp_fix_files(ccpp_phys_suite_fp, thompson_mp_climo_fn, + link_thompson_climo): """Function that first checks whether the Thompson microphysics parameterization is being called by the selected physics suite. If not, it sets the output variable whose name is specified by @@ -31,15 +32,13 @@ def set_thompson_mp_fix_files(ccpp_phys_suite_fp, thompson_mp_climo_fn): Args: ccpp_phys_suite_fp: full path to CCPP physics suite thompson_mp_climo_fn: netcdf file for thompson microphysics + link_thompson_climo: whether to use the thompson climo file Returns: boolean: sdf_uses_thompson_mp """ print_input_args(locals()) - # import all environment variables - import_vars() - # # ----------------------------------------------------------------------- # @@ -78,48 +77,14 @@ def set_thompson_mp_fix_files(ccpp_phys_suite_fp, thompson_mp_climo_fn): "qr_acr_qsV2.dat", ] - if (EXTRN_MDL_NAME_ICS != "HRRR" and EXTRN_MDL_NAME_ICS != "RAP") or ( - EXTRN_MDL_NAME_LBCS != "HRRR" and EXTRN_MDL_NAME_LBCS != "RAP" - ): + if link_thompson_climo: thompson_mp_fix_files.append(thompson_mp_climo_fn) - FIXgsm_FILES_TO_COPY_TO_FIXam.extend(thompson_mp_fix_files) - + mapping = [] for fix_file in thompson_mp_fix_files: - mapping = f"{fix_file} | {fix_file}" - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING.append(mapping) - - log_info( - f""" - Since the Thompson microphysics parameterization is being used by this - physics suite (CCPP_PHYS_SUITE), the names of the fixed files needed by - this scheme have been appended to the array FIXgsm_FILES_TO_COPY_TO_FIXam, - and the mappings between these files and the symlinks that need to be - created in the cycle directories have been appended to the array - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING. After these modifications, the - values of these parameters are as follows: - - CCPP_PHYS_SUITE = \"{CCPP_PHYS_SUITE}\" - """ - ) - log_info( - f""" - FIXgsm_FILES_TO_COPY_TO_FIXam = {list_to_str(FIXgsm_FILES_TO_COPY_TO_FIXam)} - """ - ) - log_info( - f""" - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = {list_to_str(CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING)} - """ - ) - - EXPORTS = [ - "CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING", - "FIXgsm_FILES_TO_COPY_TO_FIXam", - ] - export_vars(env_vars=EXPORTS) + mapping.append(f"{fix_file} | {fix_file}") - return sdf_uses_thompson_mp + return sdf_uses_thompson_mp, mapping, thompson_mp_fix_files class Testing(unittest.TestCase): diff --git a/ush/setup.py b/ush/setup.py index b12e47c114..a17990debc 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -28,6 +28,7 @@ load_shell_config, load_ini_config, get_ini_value, + str_to_list, ) from set_cycle_dates import set_cycle_dates @@ -41,11 +42,12 @@ from set_thompson_mp_fix_files import set_thompson_mp_fix_files -def load_config_for_setup(default_config, user_config): +def load_config_for_setup(ushdir, default_config, user_config): """ Load in the default, machine, and user configuration files into - Python dictionaries. Return the combined dictionary. + Python dictionaries. Return the combined experiment dictionary. Args: + ushdir (str): Path to the ush directory for SRW default_config (str): Path to the default config YAML user_config (str): Path to the user-provided config YAML @@ -56,7 +58,6 @@ def load_config_for_setup(default_config, user_config): # Load the default config. cfg_d = load_config_file(default_config) - # Load the user config file, then ensure all user-specified # variables correspond to a default value. if not os.path.exists(user_config): @@ -90,7 +91,7 @@ def load_config_for_setup(default_config, user_config): # Load the machine config file machine = cfg_u.get('user').get('MACHINE') - machine_file = os.path.join(USHdir, "machine", f"{lowercase(MACHINE)}.yaml") + machine_file = os.path.join(ushdir, "machine", f"{lowercase(machine)}.yaml") if not os.path.exists(machine_file): raise FileNotFoundError(dedent( @@ -102,23 +103,139 @@ def load_config_for_setup(default_config, user_config): machine_cfg = load_config_file(machine_file) # Load the constants file - cfg_c = load_config_file(os.path.join(USHdir, "constants.yaml")) + cfg_c = load_config_file(os.path.join(ushdir, "constants.yaml")) # Update default config with the constants, the machine config, and # then the user_config + # Recall: update_dict updates the second dictionary with the first, + # and so, we update the default config settings in place with all + # the others. + + # Constants update_dict(cfg_c, cfg_d) + + # Machine settings update_dict(machine_cfg, cfg_d) + + # User settings (take precedence over all others) update_dict(cfg_u, cfg_d) + # Do any conversions of data types + for sect, settings in cfg_d.items(): + for k, v in settings.items(): + if not (v is None or v == ""): + cfg_d[sect][k] = str_to_list(v) + + # Mandatory variables *must* be set in the user's config or the machine file; the default value is invalid + mandatory = ['NCORES_PER_NODE', 'FIXgsm', 'FIXaer', 'FIXlut', 'TOPO_DIR', 'SFC_CLIMO_INPUT_DIR'] + for val in mandatory: + if not cfg_d.get('task_run_fcst', {}).get('val'): + raise Exception(dedent(f''' + Mandatory variable "{val}" not found in: + user config file {user_config} + OR + machine file {machine_file} + ''')) + + # Check that input dates are in a date format + dates = ['DATE_FIRST_CYCL', 'DATE_LAST_CYCL'] + for val in dates: + if not isinstance(cfg_d['user'][val], datetime.date): + raise Exception(dedent(f''' + Date variable {val}={cfg_d['user'][val]} is not in a valid date format. + + For examples of valid formats, see the Users' Guide. + ''')) + + # Check to make sure mandatory workflow variables are set. + vlist = ['EXPT_SUBDIR'] + for val in vlist: + if not cfg_d['task_run_fcst'].get('val') + raise Exception(f"\nMandatory variable '{val}' has not been set\n") + + # Check to make sure that mandatory forecast variables are set. + vlist = ['DT_ATMOS', + 'LAYOUT_X', + 'LAYOUT_Y', + 'BLOCKSIZE', + ] + for val in vlist: + if not cfg_d['task_run_fcst'].get('val') + raise Exception(f"\nMandatory variable '{val}' has not been set\n") + + return cfg_d +def set_srw_paths(ushdir, expt_config): + + """ + Generate a dictionary of directories that describe the SRW + structure, i.e., where SRW is installed, and the paths to + external repositories managed via the manage_externals tool. + + Other paths for SRW are set as defaults in config_defaults.yaml + + Args: + ushdir: (str) path to the system location of the ush/ directory + under the SRW clone + expt_config: (dict) contains the configuration settings for the + user-defined experiment + + Returns: + dictionary of config settings and system paths as keys/values + """ + + # HOMEdir is the location of the SRW clone, one directory above ush/ + home_dir = os.path.abspath( + os.path.dirname(__file__) + os.sep + os.pardir + ) + + # Read Externals.cfg + mng_extrns_cfg_fn = os.path.join(home_dir, "Externals.cfg") + try: + mng_extrns_cfg_fn = os.readlink(mng_extrns_cfg_fn) + except: + pass + cfg = load_ini_config(mng_extrns_cfg_fn) + + # Get the base directory of the FV3 forecast model code. + external_name = expt_config.get('workflow', {}).get('FCST_MODEL') + property_name = "local_path" + + try: + ufs_wthr_mdl_dir = get_ini_value(cfg, external_name, property_name) + except KeyError: + errmsg = dedent(f''' + Externals configuration file {mng_extrns_cfg_fn} + does not contain "{external_name}".''') + raise Exception(errmsg) from None + + + # Check that the model code has been downloaded + ufs_wthr_mdl_dir = os.path.join(home_dir, ufs_wthr_mdl_dir) + if not os.path.exists(ufs_wthr_mdl_dir): + raise FileNotFoundError(dedent( + f""" + The base directory in which the FV3 source code should be located + (UFS_WTHR_MDL_DIR) does not exist: + UFS_WTHR_MDL_DIR = \"{ufs_wthr_mdl_dir}\" + Please clone the external repository containing the code in this directory, + build the executable, and then rerun the workflow.""" + )) + + return dict( + HOMEdir = home_dir, + USHdir = ushdir, + UFS_WTHR_MDL_DIR = ufs_wthr_mdl_dir, + ) + def setup(USHdir, user_config_fn="config.yaml"): - """Function that derives a secondary set of parameters needed to - configure a Rocoto-based SRW workflow. The derived parameters use a - set of required user-defined parameters defined by either - config_defaults.yaml, a user-provided configuration file - (config.yaml), or a YAML machine file. + """Function that validates user-provided configuration, and derives + a secondary set of parameters needed to configure a Rocoto-based SRW + workflow. The derived parameters use a set of required user-defined + parameters defined by either config_defaults.yaml, a user-provided + configuration file (config.yaml), or a YAML machine file. A set of global variable definitions is saved to the experiment directory as a bash configure file that is sourced by scripts at run @@ -148,83 +265,299 @@ def setup(USHdir, user_config_fn="config.yaml"): # user config files. default_config_fp = os.path.join(USHdir, "config_defaults.yaml") user_config_fp = os.path.join(USHdir, user_config_fn) - expt_config = load_config_for_setup(default_config_fp, user_config_fp) - - - # ics and lbcs - def get_location(xcs, fmt, expt_cfg): - if ("data" in expt_cfg) and (xcs in expt_cfg["data"]): - v = expt_cfg["data"][xcs] - if not isinstance(v,dict): - return v - else: - return v[fmt] - else: - return "" - - EXTRN_MDL_SYSBASEDIR_ICS = get_location( - expt_config.get('task_get_extrn_ics', {}).get('EXTRN_MDL_NAME_ICS'), - expt_config.get('task_get_extrn_ics', {}).get('FV3GFS_FILE_FMT_ICS'), - expt_cfg, - ) - EXTRN_MDL_SYSBASEDIR_LBCS = get_location( - expt_config.get('task_get_extrn_lbcs', {}).get('EXTRN_MDL_NAME_LBCS'), - expt_config.get('task_get_extrn_lbcs', {}).get('FV3GFS_FILE_FMT_LBCS'), - expt_cfg, - ) + expt_config = load_config_for_setup(USHdir, default_config_fp, user_config_fp) - # remove the data key and provide machine specific default values for cfg_d - if "data" in expt_config: - expt_config.pop("data") + # Set up some paths relative to the SRW clone + expt_config['user'].update(set_srw_paths(USHdir, expt_config)) - expt_config['task_get_extrn_ics']["EXTRN_MDL_SYSBASEDIR_ICS"] = EXTRN_MDL_SYSBASEDIR_ICS - expt_config['task_get_extrn_lbcs']["EXTRN_MDL_SYSBASEDIR_LBCS"] = EXTRN_MDL_SYSBASEDIR_LBCS # # ----------------------------------------------------------------------- # - # Generate a unique number for this workflow run. This may be used to - # get unique log file names for example + # Validate the experiment configuration starting with the workflow, + # then in rough order of the tasks in the workflow # # ----------------------------------------------------------------------- # + + + # Workflow + workflow_config = expt_config["workflow"] + + # Generate a unique number for this workflow run. This may be used to + # get unique log file names for example workflow_id = "id_" + str(int(datetime.datetime.now().timestamp())) - expt_config["workflow"]["WORKFLOW_ID"] = workflow_id + workflow_config["WORKFLOW_ID"] = workflow_id log_info(f"""WORKFLOW ID = {workflow_id}""") + debug = workflow_config.get('DEBUG') + if debug: + log_info( + """ + Setting VERBOSE to \"TRUE\" because DEBUG has been set to \"TRUE\"...""" + ) + workflow_config['VERBOSE'] = True + + verbose = workflow_config['VERBOSE'] + + # The forecast length (in integer hours) cannot contain more than 3 characters. + # Thus, its maximum value is 999. + fcst_len_hrs_max = 999 + fcst_len_hrs = workflow_config.get('FCST_LEN_HRS') + if fcst_len_hrs > fcst_len_hrs_max: + raise ValueError( + f""" + Forecast length is greater than maximum allowed length: + FCST_LEN_HRS = {fcst_len_hrs} + fcst_len_hrs_max = {fcst_len_hrs_max}""" + ) + + # + # ----------------------------------------------------------------------- + # + # If the base directory (EXPT_BASEDIR) in which the experiment subdirectory + # (EXPT_SUBDIR) will be located does not start with a "/", then it is + # either set to a null string or contains a relative directory. In both + # cases, prepend to it the absolute path of the default directory under + # which the experiment directories are placed. If EXPT_BASEDIR was set + # to a null string, it will get reset to this default experiment directory, + # and if it was set to a relative directory, it will get reset to an + # absolute directory that points to the relative directory under the + # default experiment directory. Then create EXPT_BASEDIR if it doesn't + # already exist. + # + # ----------------------------------------------------------------------- + # + expt_basedir = workflow_config.get('EXPT_BASEDIR') + if (not expt_basedir) or (expt_basedir[0] != "/"): + if not expt_basedir: + expt_basedir = "" + expt_basedir = os.path.join(home_dir, "..", "expt_dirs", expt_basedir) + try: + expt_basedir = os.path.realpath(expt_basedir) + except: + pass + expt_basedir = os.path.abspath(expt_basedir) + + mkdir_vrfy(f' -p "{expt_basedir}"') + workflow_config['EXPT_BASEDIR'] = expt_basedir + + # + # ----------------------------------------------------------------------- + # + # Set the full path to the experiment directory. Then check if it already + # exists and if so, deal with it as specified by PREEXISTING_DIR_METHOD. + # + # ----------------------------------------------------------------------- + # + + expt_subdir = workflow_config.get('EXPT_SUBDIR', '') + exptdir = workflow_config['EXPTDIR'] + preexisting_dir_method = workflow_config.get('PREEXISTING_DIR_METHOD', '') + try: + check_for_preexist_dir_file(exptdir, preexisting_dir_method) + except ValueError: + logger.exception(f''' + Check that the following values are valid: + EXPTDIR {exptdir} + PREEXISTING_DIR_METHOD {preexisting_dir_method} + ''') + raise + except FileExistsError: + errmsg = dedent(f''' + EXPTDIR ({exptdir}) already exists, and PREEXISTING_DIR_METHOD = {preexisting_dir_method} + + To ignore this error, delete the directory, or set + PREEXISTING_DIR_METHOD = delete, or + PREEXISTING_DIR_METHOD = rename + in your config file. + ''') + raise FileExistsError(errmsg) from None + + + # + # ----------------------------------------------------------------------- + # + # Set cron table entry for relaunching the workflow if + # USE_CRON_TO_RELAUNCH is set to TRUE. + # + # ----------------------------------------------------------------------- + # + if workflow_config.get('USE_CRON_TO_RELAUNCH'): + intvl_mnts = workflow_config.get('CRON_RELAUNCH_INTVL_MNTS') + launch_script_fn = workflow_config.get('WFLOW_LAUNCH_SCRIPT_FN') + launch_log_fn = workflow_config.get('WFLOW_LAUNCH_LOG_FN') + workflow_config['CRONTAB_LINE'] = ( + f"""*/{intvl_mnts} * * * * cd {exptdir} && """ + f"""./{launch_script_fn} called_from_cron="TRUE" >> ./{launch_log_fn} 2>&1""" + ) + # + # ----------------------------------------------------------------------- + # + # Check user settings against platform settings + # + # ----------------------------------------------------------------------- + # + + # Necessary tasks are turned on + pregen_basedir = expt_config['platform'].get('DOMAIN_PREGEN_BASEDIR') + if pregen_basedir is None and not \ + (run_task_make_grid and run_task_make_orog and run_task_make_sfc_climo): + raise Exception( + f""" + DOMAIN_PREGEN_BASEDIR must be set when any of the following + tasks are turned off: + RUN_TASK_MAKE_GRID = {run_task_make_grid} + RUN_TASK_MAKE_OROG = {run_task_make_orog} + RUN_TASK_MAKE_SFC_CLIMO = {run_task_make_sfc_climo}""" + ) + + # A batch system account is specified + if expt_config['platform'].get('WORKFLOW_MANAGER') is not None: + if not expt.get('user').get('ACCOUNT'): + raise Exception(dedent(f''' + ACCOUNT must be specified in config or machine file if using a workflow manager. + WORKFLOW_MANAGER = {expt_config["platform"].get("WORKFLOW_MANAGER")}\n''' + )) + + + + # + # ----------------------------------------------------------------------- + # + # ICS and LBCS settings and validation + # + # ----------------------------------------------------------------------- + # + def get_location(xcs, fmt, expt_cfg): + if ("data" in expt_cfg) and (xcs in expt_cfg["data"]): + v = expt_cfg["data"][xcs] + if not isinstance(v,dict): + return v + else: + return v[fmt] + else: + return "" + + # Get the paths to any platform-supported data streams + get_extrn_ics = expt_config.get('task_get_extrn_ics', {}) + extrn_mdl_sysbasedir_ics = get_location( + get_extrn_ics.get('EXTRN_MDL_NAME_ICS'), + get_extrn_ics.get('FV3GFS_FILE_FMT_ICS'), + expt_config, + ) + get_extrn_ics["EXTRN_MDL_SYSBASEDIR_ICS"] = extrn_mdl_sysbasedir_ics + + get_extrn_lbcs = expt_config.get('task_get_extrn_lbcs', {}) + extrn_mdl_sysbasedir_lbcs = get_location( + get_extrn_lbcs.get('EXTRN_MDL_NAME_LBCS'), + get_extrn_lbcs.get('FV3GFS_FILE_FMT_LBCS'), + expt_config, + ) + get_extrn_lbcs["EXTRN_MDL_SYSBASEDIR_LBCS"] = extrn_mdl_sysbasedir_lbcs + + # remove the data key -- it's not needed beyond this point + if "data" in expt_config: + expt_config.pop("data") + + # Check for the user-specified directories for external model files if + # USE_USER_STAGED_EXTRN_FILES is set to TRUE + task_keys = zip( + [get_extrn_ics, get_extrn_lbcs], + ['EXTRN_MDL_SOURCE_BASEDIR_ICS', 'EXTRN_MDL_SOURCE_BASEDIR_LBCS'], + ) + + for task, data_key in task_keys: + use_staged_extrn_files = task.get('USE_USER_STAGED_EXTRN_FILES') + if use_staged_extrn_files: + basedir = task[data_key] + # Check for the base directory up to the first templated field. + idx = basedir.find("$") + if idx == -1: + idx = len(basedir) + + if not os.path.exists(basedir[:idx]): + raise FileNotFoundError( + f''' + The user-staged-data directory does not exist. + Please point to the correct path where your external + model files are stored. + {data_key} = \"{basedir}\"''' + ) + # # ----------------------------------------------------------------------- # - # If PREDEF_GRID_NAME is set to a non-empty string, set or reset parameters - # according to the predefined domain specified. + # Forecast settings # # ----------------------------------------------------------------------- # - if expt_config.get('task_run_fcst', {}).get('PREDEF_GRID_NAME'): - grid_params = set_predef_grid_params(USHdir, expt_config['task_run_fcst']) + + # Gather the pre-defined grid parameters, if needed + fcst_config = expt_config['task_run_fcst'] + grid_config = expt_confg['task_make_grid'] + if fcst_config.get('PREDEF_GRID_NAME'): + grid_params = set_predef_grid_params(USHdir, fcst_config) # Users like to change these variables, so don't overwrite them special_vars = ["DT_ATMOS", "LAYOUT_X", "LAYOUT_Y", "BLOCKSIZE"] for param, value in grid_params.items(): if param in special_vars and - expt_config.get('task_run_fcst', {}).get(param) is not None: + fcst_config.get(param) is not None: continue + elif param.startswith('WRTCMP'): + fcst_config[param] = value else: - expt_config['task_run_fcst'][param] = value + grid_config[param] = value # # ----------------------------------------------------------------------- # - # Make sure different variables are set to their corresponding valid value + # Set parameters according to the type of horizontal grid generation + # method specified. # # ----------------------------------------------------------------------- # - if expt_config.get('workflow', {}).get('DEBUG'): - log_info( - """ - Setting VERBOSE to \"TRUE\" because DEBUG has been set to \"TRUE\"...""" + grid_gen_method = workflow_config['GRID_GEN_METHOD'] + if grid_gen_method == "GFDLgrid": + grid_params = set_gridparams_GFDLgrid( + lon_of_t6_ctr=grid_config['GFDLgrid_LON_T6_CTR'], + lat_of_t6_ctr=grid_config['GFDLgrid_LAT_T6_CTR'], + res_of_t6g=grid_config['GFDLgrid_NUM_CELLS'], + stretch_factor=grid_config['GFDLgrid_STRETCH_FAC'], + refine_ratio_t6g_to_t7g=grid_config['GFDLgrid_REFINE_RATIO'], + istart_of_t7_on_t6g=grid_config['GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G'], + iend_of_t7_on_t6g=grid_config['GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G'], + jstart_of_t7_on_t6g=grid_config['GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G'], + jend_of_t7_on_t6g=grid_config['GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G'], + verbose=verbose, + nh4=expt_config['constants']['NH4'], + run_envir=run_envir, ) - VERBOSE = True + elif grid_gen_method == "ESGgrid": + grid_params = set_gridparams_ESGgrid( + lon_ctr=grid_config['ESGgrid_LON_CTR'], + lat_ctr=grid_config['ESGgrid_LAT_CTR'], + nx=grid_config['ESGgrid_NX'], + ny=grid_config['ESGgrid_NY'], + pazi=grid_config['ESGgrid_PAZI'], + halo_width=grid_config['ESGgrid_WIDE_HALO_WIDTH'], + delx=grid_config['ESGgrid_DELX'], + dely=grid_config['ESGgrid_DELY'], + constants=expt_config['constants'], + ) + else: + grid_params = { + "LON_CTR": LON_CTR, + "LAT_CTR": LAT_CTR, + "NX": NX, + "NY": NY, + "NHW": NHW, + "STRETCH_FAC": STRETCH_FAC, + } + + # Add a grid parameter section to the experiment config + expt_config["grid_params"] = grid_params # # ----------------------------------------------------------------------- @@ -237,8 +570,6 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - # Alias to save some space below. Also, make sure the section - # exists! global_sect = expt_config['global'] if not global_sect.get('DO_SHUM'): global_sect['SHUM_MAG'] = -999.0 @@ -339,544 +670,204 @@ def get_location(xcs, fmt, expt_cfg): {lsm_spp_var} (length {len(global_sect[lsm_spp_var])} ''' ) - # - # The current script should be located in the ush subdirectory of the - # workflow directory. Thus, the SRW home directory is the one above the - # directory of the current script. - # - HOMEdir = os.path.abspath( - os.path.dirname(__file__) + os.sep + os.pardir - ) - - # - # ----------------------------------------------------------------------- - # - # Set the base directories in which codes obtained from external - # repositories (using the manage_externals tool) are placed. Obtain the - # rela- tive paths to these directories by reading them in from the - # manage_externals configuration file. (Note that these are relative to the - # lo- cation of the configuration file.) Then form the full paths to these - # directories. Finally, make sure that each of these directories actually - # exists. - # - # ----------------------------------------------------------------------- - # - mng_extrns_cfg_fn = os.path.join(HOMEdir, "Externals.cfg") - try: - mng_extrns_cfg_fn = os.readlink(mng_extrns_cfg_fn) - except: - pass - cfg = load_ini_config(mng_extrns_cfg_fn) - # - # Get the base directory of the FV3 forecast model code. - # - external_name = FCST_MODEL - property_name = "local_path" - try: - UFS_WTHR_MDL_DIR = get_ini_value(cfg, external_name, property_name) - except KeyError: - errmsg = dedent(f''' - Externals configuration file {mng_extrns_cfg_fn} - does not contain "{external_name}".''') - raise Exception(errmsg) from None + # Make sure RESTART_INTERVAL is set to an integer value + restart_interval = fcst_config.get('RESTART_INTERVAL') + if not isinstance(restart_interval, int): + try: + fcst_config['RESTART_INTERVAL'] = int(restart_interval) + except ValueError: + raise ValueError(f"\nRESTART_INTERVAL = {restart_interval}, must be an integer value\n") + # Check whether the forecast length (FCST_LEN_HRS) is evenly divisible + # by the BC update interval (LBC_SPEC_INTVL_HRS). If so, generate an + # array of forecast hours at which the boundary values will be updated. - UFS_WTHR_MDL_DIR = os.path.join(HOMEdir, UFS_WTHR_MDL_DIR) - if not os.path.exists(UFS_WTHR_MDL_DIR): - raise FileNotFoundError(dedent( + lbc_spec_intvl_hrs = get_extrn_lbcs.get('LBC_SPEC_INTVL_HRS') + rem = fcst_len_hrs % lbc_spec_intvl_hrs + if rem != 0: + raise Exception( f""" - The base directory in which the FV3 source code should be located - (UFS_WTHR_MDL_DIR) does not exist: - UFS_WTHR_MDL_DIR = \"{UFS_WTHR_MDL_DIR}\" - Please clone the external repository containing the code in this directory, - build the executable, and then rerun the workflow.""" - )) - # - # Define some other useful paths - # - global SCRIPTSdir, JOBSdir, SORCdir, PARMdir, MODULESdir - global EXECdir, PARMdir, FIXdir, VX_CONFIG_DIR, METPLUS_CONF, MET_CONFIG - - SCRIPTSdir = os.path.join(HOMEdir, "scripts") - JOBSdir = os.path.join(HOMEdir, "jobs") - SORCdir = os.path.join(HOMEdir, "sorc") - PARMdir = os.path.join(HOMEdir, "parm") - MODULESdir = os.path.join(HOMEdir, "modulefiles") - EXECdir = os.path.join(HOMEdir, EXEC_SUBDIR) - VX_CONFIG_DIR = PARMdir - METPLUS_CONF = os.path.join(PARMdir, "metplus") - MET_CONFIG = os.path.join(PARMdir, "met") + The forecast length (FCST_LEN_HRS) is not evenly divisible by the lateral + boundary conditions update interval (LBC_SPEC_INTVL_HRS): + FCST_LEN_HRS = {fcst_len_hrs} + LBC_SPEC_INTVL_HRS = {lbc_spec_intvl_hrs} + rem = FCST_LEN_HRS%%LBC_SPEC_INTVL_HRS = {rem}""" + ) # # ----------------------------------------------------------------------- # - # Source the machine config file containing architechture information, - # queue names, and supported input file paths. + # Post-processing validation and settings # # ----------------------------------------------------------------------- # - global FIXgsm, FIXaer, FIXlut, TOPO_DIR, SFC_CLIMO_INPUT_DIR, DOMAIN_PREGEN_BASEDIR - global RELATIVE_LINK_FLAG, WORKFLOW_MANAGER, NCORES_PER_NODE, SCHED, QUEUE_DEFAULT - global QUEUE_HPSS, QUEUE_FCST, PARTITION_DEFAULT, PARTITION_HPSS, PARTITION_FCST - - RELATIVE_LINK_FLAG = "--relative" - - # Mandatory variables *must* be set in the user's config or the machine file; the default value is invalid - mandatory = ['NCORES_PER_NODE', 'FIXgsm', 'FIXaer', 'FIXlut', 'TOPO_DIR', 'SFC_CLIMO_INPUT_DIR'] - globalvars = globals() - for val in mandatory: - # globals() returns dictionary of global variables - if not globalvars[val]: - raise Exception(dedent(f''' - Mandatory variable "{val}" not found in: - user config file {EXPT_CONFIG_FN} - OR - machine file {MACHINE_FILE} - ''')) - - # - # ----------------------------------------------------------------------- - # - # Set the names of the build and workflow module files (if not - # already specified by the user). These are the files that need to be - # sourced before building the component SRW App codes and running various - # workflow scripts, respectively. - # - # ----------------------------------------------------------------------- - # - global WFLOW_MOD_FN, BUILD_MOD_FN, BUILD_VER_FN, RUN_VER_FN - machine = lowercase(MACHINE) - WFLOW_MOD_FN = WFLOW_MOD_FN or f"wflow_{machine}" - BUILD_MOD_FN = BUILD_MOD_FN or f"build_{machine}_{COMPILER}" - BUILD_VER_FN = BUILD_VER_FN or f"build.ver.{machine}" - RUN_VER_FN = RUN_VER_FN or f"run.ver.{machine}" - # - # ----------------------------------------------------------------------- - # - # Calculate a default value for the number of processes per node for the - # RUN_FCST_TN task. Then set PPN_RUN_FCST to this default value if - # PPN_RUN_FCST is not already specified by the user. - # - # ----------------------------------------------------------------------- - # - global PPN_RUN_FCST - ppn_run_fcst_default = NCORES_PER_NODE // OMP_NUM_THREADS_RUN_FCST - PPN_RUN_FCST = PPN_RUN_FCST or ppn_run_fcst_default - # - # ----------------------------------------------------------------------- - # - # If we are using a workflow manager check that the ACCOUNT variable is - # not empty. - # - # ----------------------------------------------------------------------- - # - if WORKFLOW_MANAGER is not None: - if not ACCOUNT: - raise Exception(dedent(f''' - ACCOUNT must be specified in config or machine file if using a workflow manager. - WORKFLOW_MANAGER = {WORKFLOW_MANAGER}\n''' - )) - # - # ----------------------------------------------------------------------- - # - # Set the grid type (GTYPE). In general, in the FV3 code, this can take - # on one of the following values: "global", "stretch", "nest", and "re- - # gional". The first three values are for various configurations of a - # global grid, while the last one is for a regional grid. Since here we - # are only interested in a regional grid, GTYPE must be set to "region- - # al". - # - # ----------------------------------------------------------------------- - # - global TILE_RGNL, GTYPE - GTYPE = "regional" - TILE_RGNL = "7" - - # USE_MERRA_CLIMO must be True for the physics suite FV3_GFS_v15_thompson_mynn_lam3km" - global USE_MERRA_CLIMO - if CCPP_PHYS_SUITE == "FV3_GFS_v15_thompson_mynn_lam3km": - USE_MERRA_CLIMO = True - # - # ----------------------------------------------------------------------- - # - # Set CPL to TRUE/FALSE based on FCST_MODEL. - # - # ----------------------------------------------------------------------- - # - global CPL - if FCST_MODEL == "ufs-weather-model": - CPL = False - elif FCST_MODEL == "fv3gfs_aqm": - CPL = True - else: - raise Exception( - f''' - The coupling flag CPL has not been specified for this value of FCST_MODEL: - FCST_MODEL = \"{FCST_MODEL}\"''' - ) - - # Make sure RESTART_INTERVAL is set to an integer value - if not isinstance(RESTART_INTERVAL, int): - raise Exception(f"\nRESTART_INTERVAL = {RESTART_INTERVAL}, must be an integer value\n") - - # Check that input dates are in a date format - - # get dictionary of all variables - allvars = dict(globals()) - allvars.update(locals()) - dates = ['DATE_FIRST_CYCL', 'DATE_LAST_CYCL'] - for val in dates: - if not isinstance(allvars[val], datetime.date): - raise Exception(dedent(f''' - Date variable {val}={allvars[val]} is not in a valid date format - - For examples of valid formats, see the users guide. - ''')) # If using a custom post configuration file, make sure that it exists. - if USE_CUSTOM_POST_CONFIG_FILE: + post_config = expt_config('task_run_post') + if post_config.get('USE_CUSTOM_POST_CONFIG_FILE'): + custom_post_config_fp = post_config.get('CUSTOM_POST_CONFIG_FP') try: - #os.path.exists returns exception if passed an empty string or None, so use "try/except" as a 2-for-1 error catch - if not os.path.exists(CUSTOM_POST_CONFIG_FP): - raise - except: - raise FileNotFoundError(dedent( - f''' - USE_CUSTOM_POST_CONFIG_FILE has been set, but the custom post configuration file - CUSTOM_POST_CONFIG_FP = {CUSTOM_POST_CONFIG_FP} - could not be found.''' - )) from None + # os.path.exists returns exception if passed None, so use + # "try/except" to catch it and the non-existence of a + # provided path + if not os.path.exists(custom_post_config_fp): + raise FileNotFoundError(dedent( + f''' + USE_CUSTOM_POST_CONFIG_FILE has been set, but the custom post configuration file + CUSTOM_POST_CONFIG_FP = {custom_post_config_fp} + could not be found.''' + )) from None + except TypeError: + raise TypeError(dedent( + f""" + USE_CUSTOM_POST_CONFIG_FILE has been set, but the custom + post configuration file path (CUSTOM_POST_CONFIG_FP) is + None. + """)) from None + except FileNotFoundError: + raise + # If using external CRTM fix files to allow post-processing of synthetic # satellite products from the UPP, make sure the CRTM fix file directory exists. - if USE_CRTM: + if global_sect.get('USE_CRTM'): + crtm_dir = global_sect.get('CRTM_DIR') try: - #os.path.exists returns exception if passed an empty string or None, so use "try/except" as a 2-for-1 error catch - if not os.path.exists(CRTM_DIR): - raise - except: - raise FileNotFoundError(dedent( - f''' - USE_CRTM has been set, but the external CRTM fix file directory: - CRTM_DIR = {CRTM_DIR} - could not be found.''' - )) from None - - # The forecast length (in integer hours) cannot contain more than 3 characters. - # Thus, its maximum value is 999. - fcst_len_hrs_max = 999 - if FCST_LEN_HRS > fcst_len_hrs_max: - raise ValueError( - f""" - Forecast length is greater than maximum allowed length: - FCST_LEN_HRS = {FCST_LEN_HRS} - fcst_len_hrs_max = {fcst_len_hrs_max}""" - ) - # - # ----------------------------------------------------------------------- - # - # Check whether the forecast length (FCST_LEN_HRS) is evenly divisible - # by the BC update interval (LBC_SPEC_INTVL_HRS). If so, generate an - # array of forecast hours at which the boundary values will be updated. - # - # ----------------------------------------------------------------------- - # - rem = FCST_LEN_HRS % LBC_SPEC_INTVL_HRS - - if rem != 0: - raise Exception( - f""" - The forecast length (FCST_LEN_HRS) is not evenly divisible by the lateral - boundary conditions update interval (LBC_SPEC_INTVL_HRS): - FCST_LEN_HRS = {FCST_LEN_HRS} - LBC_SPEC_INTVL_HRS = {LBC_SPEC_INTVL_HRS} - rem = FCST_LEN_HRS%%LBC_SPEC_INTVL_HRS = {rem}""" - ) - # - # ----------------------------------------------------------------------- - # - # Set the array containing the forecast hours at which the lateral - # boundary conditions (LBCs) need to be updated. Note that this array - # does not include the 0-th hour (initial time). - # - # ----------------------------------------------------------------------- - # - global LBC_SPEC_FCST_HRS - LBC_SPEC_FCST_HRS = [ - i - for i in range( - LBC_SPEC_INTVL_HRS, LBC_SPEC_INTVL_HRS + FCST_LEN_HRS, LBC_SPEC_INTVL_HRS - ) - ] - # - # ----------------------------------------------------------------------- - # - # Check to make sure that various computational parameters needed by the - # forecast model are set to non-empty values. At this point in the - # experiment generation, all of these should be set to valid (non-empty) - # values. - # - # ----------------------------------------------------------------------- - # - # get dictionary of all variables - allvars = dict(globals()) - allvars.update(locals()) - vlist = ['DT_ATMOS', - 'LAYOUT_X', - 'LAYOUT_Y', - 'BLOCKSIZE', - 'EXPT_SUBDIR'] - for val in vlist: - if not allvars[val]: - raise Exception(f"\nMandatory variable '{val}' has not been set\n") + # os.path.exists returns exception if passed None, so use + # "try/except" to catch it and the non-existence of a + # provided path + if not os.path.exists(crtm_dir): + raise FileNotFoundError(dedent( + f''' + USE_CRTM has been set, but the external CRTM fix file directory: + CRTM_DIR = {crtm_dir} + could not be found.''' + )) from None + except TypeError: + raise TypeError(dedent( + f""" + USE_CRTM has been set, but the external CRTM fix file + directory (CRTM_DIR) is None. + """)) from None + except FileNotFoundError: + raise - # - # ----------------------------------------------------------------------- - # # If performing sub-hourly model output and post-processing, check that # the output interval DT_SUBHOURLY_POST_MNTS (in minutes) is specified # correctly. - # - # ----------------------------------------------------------------------- - # - global SUB_HOURLY_POST + if post_config.get('SUB_HOURLY_POST'): - if SUB_HOURLY_POST: - # - # Check that DT_SUBHOURLY_POST_MNTS is between 0 and 59, inclusive. - # - if DT_SUBHOURLY_POST_MNTS < 0 or DT_SUBHOURLY_POST_MNTS > 59: - raise ValueError( - f''' - When performing sub-hourly post (i.e. SUB_HOURLY_POST set to \"TRUE\"), - DT_SUBHOURLY_POST_MNTS must be set to an integer between 0 and 59, - inclusive but in this case is not: - SUB_HOURLY_POST = \"{SUB_HOURLY_POST}\" - DT_SUBHOURLY_POST_MNTS = \"{DT_SUBHOURLY_POST_MNTS}\"''' - ) - # - # Check that DT_SUBHOURLY_POST_MNTS (after converting to seconds) is - # evenly divisible by the forecast model's main time step DT_ATMOS. - # - rem = DT_SUBHOURLY_POST_MNTS * 60 % DT_ATMOS - if rem != 0: - raise ValueError( - f""" - When performing sub-hourly post (i.e. SUB_HOURLY_POST set to \"TRUE\"), - the time interval specified by DT_SUBHOURLY_POST_MNTS (after converting - to seconds) must be evenly divisible by the time step DT_ATMOS used in - the forecast model, i.e. the remainder (rem) must be zero. In this case, - it is not: - SUB_HOURLY_POST = \"{SUB_HOURLY_POST}\" - DT_SUBHOURLY_POST_MNTS = \"{DT_SUBHOURLY_POST_MNTS}\" - DT_ATMOS = \"{DT_ATMOS}\" - rem = (DT_SUBHOURLY_POST_MNTS*60) %% DT_ATMOS = {rem} - Please reset DT_SUBHOURLY_POST_MNTS and/or DT_ATMOS so that this remainder - is zero.""" - ) - # - # If DT_SUBHOURLY_POST_MNTS is set to 0 (with SUB_HOURLY_POST set to - # True), then we're not really performing subhourly post-processing. - # In this case, reset SUB_HOURLY_POST to False and print out an - # informational message that such a change was made. - # - if DT_SUBHOURLY_POST_MNTS == 0: + # Subhourly post should be set with minutes between 1 and 59 for + # real subhourly post to be performed. + dt_subhourly_post_mnts = post_config.get('DT_SUBHOURLY_POST_MNTS') + if dt_subhourly_post_mnts == 0: logger.warning( f""" When performing sub-hourly post (i.e. SUB_HOURLY_POST set to \"TRUE\"), DT_SUBHOURLY_POST_MNTS must be set to a value greater than 0; otherwise, sub-hourly output is not really being performed: - SUB_HOURLY_POST = \"{SUB_HOURLY_POST}\" DT_SUBHOURLY_POST_MNTS = \"{DT_SUBHOURLY_POST_MNTS}\" Resetting SUB_HOURLY_POST to \"FALSE\". If you do not want this, you must set DT_SUBHOURLY_POST_MNTS to something other than zero.""" ) - SUB_HOURLY_POST = False - # - # ----------------------------------------------------------------------- - # - # If the base directory (EXPT_BASEDIR) in which the experiment subdirectory - # (EXPT_SUBDIR) will be located does not start with a "/", then it is - # either set to a null string or contains a relative directory. In both - # cases, prepend to it the absolute path of the default directory under - # which the experiment directories are placed. If EXPT_BASEDIR was set - # to a null string, it will get reset to this default experiment directory, - # and if it was set to a relative directory, it will get reset to an - # absolute directory that points to the relative directory under the - # default experiment directory. Then create EXPT_BASEDIR if it doesn't - # already exist. - # - # ----------------------------------------------------------------------- - # - global EXPT_BASEDIR - if (not EXPT_BASEDIR) or (EXPT_BASEDIR[0] != "/"): - if not EXPT_BASEDIR: - EXPT_BASEDIR = "" - EXPT_BASEDIR = os.path.join(HOMEdir, "..", "expt_dirs", EXPT_BASEDIR) - try: - EXPT_BASEDIR = os.path.realpath(EXPT_BASEDIR) - except: - pass - EXPT_BASEDIR = os.path.abspath(EXPT_BASEDIR) + post_config['SUB_HOURLY_POST'] = False - mkdir_vrfy(f' -p "{EXPT_BASEDIR}"') + if dt_subhourly_post_mnts < 1 or dt_subhourly_post_mnts > 59: + raise ValueError( + f''' + When SUB_HOURLY_POST is set to \"TRUE\", + DT_SUBHOURLY_POST_MNTS must be set to an integer between 1 and 59, + inclusive but: + DT_SUBHOURLY_POST_MNTS = \"{dt_subhourly_post_mnts}\"''' + ) - # - # ----------------------------------------------------------------------- - # - # Set the full path to the experiment directory. Then check if it already - # exists and if so, deal with it as specified by PREEXISTING_DIR_METHOD. - # - # ----------------------------------------------------------------------- - # - global EXPTDIR - EXPTDIR = os.path.join(EXPT_BASEDIR, EXPT_SUBDIR) - try: - check_for_preexist_dir_file(EXPTDIR, PREEXISTING_DIR_METHOD) - except ValueError: - logger.exception(f''' - Check that the following values are valid: - EXPTDIR {EXPTDIR} - PREEXISTING_DIR_METHOD {PREEXISTING_DIR_METHOD} - ''') - raise - except FileExistsError: - errmsg = dedent(f''' - EXPTDIR ({EXPTDIR}) already exists, and PREEXISTING_DIR_METHOD = {PREEXISTING_DIR_METHOD} + # Check that DT_SUBHOURLY_POST_MNTS (after converting to seconds) is + # evenly divisible by the forecast model's main time step DT_ATMOS. + dt_atmos = fcst_config['DT_ATMOS'] + rem = dt_subhourly_post_mnts * 60 % dt_atmos + if rem != 0: + raise ValueError( + f""" + When SUB_HOURLY_POST is set to \"TRUE\") the post + processing interval in seconds must be evenly divisible + by the time step DT_ATMOS used in the forecast model, + i.e. the remainder must be zero. In this case, it is + not: + + DT_SUBHOURLY_POST_MNTS = \"{dt_subhourly_post_mnts}\" + DT_ATMOS = \"{dt_atmos}\" + remainder = (DT_SUBHOURLY_POST_MNTS*60) %% DT_ATMOS = {rem} + + Please reset DT_SUBHOURLY_POST_MNTS and/or DT_ATMOS so + that this remainder is zero.""" + ) - To ignore this error, delete the directory, or set - PREEXISTING_DIR_METHOD = delete, or - PREEXISTING_DIR_METHOD = rename - in your config file. - ''') - raise FileExistsError(errmsg) from None + # Make sure the post output domain is set + predef_grid_name = fcst_config.get('PREDEF_GRID_NAME') + post_output_domain_name = post_config.get('POST_OUTPUT_DOMAIN_NAME') + + if not post_output_domain_name: + if not predef_grid_name: + raise Exception( + f""" + The domain name used in naming the run_post output files + (POST_OUTPUT_DOMAIN_NAME) has not been set: + POST_OUTPUT_DOMAIN_NAME = \"{post_output_domain_name}\" + If this experiment is not using a predefined grid (i.e. if + PREDEF_GRID_NAME is set to a null string), POST_OUTPUT_DOMAIN_NAME + must be set in the configuration file (\"{user_config}\"). """ + ) + post_output_domain_name = predef_grid_name + + if not isintstance(post_output_domain_name, int): + post_output_domain_name = lowercase(post_output_domain_name) # # ----------------------------------------------------------------------- # - # Set other directories, some of which may depend on EXPTDIR (depending - # on whether we're running in NCO or community mode, i.e. whether RUN_ENVIR - # is set to "nco" or "community"). Definitions: - # - # LOGDIR: - # Directory in which the log files from the workflow tasks will be placed. - # - # FIXam: - # This is the directory that will contain the fixed files or symlinks to - # the fixed files containing various fields on global grids (which are - # usually much coarser than the native FV3-LAM grid). - # - # FIXclim: - # This is the directory that will contain the MERRA2 aerosol climatology - # data file and lookup tables for optics properties - # - # FIXlam: - # This is the directory that will contain the fixed files or symlinks to - # the fixed files containing the grid, orography, and surface climatology - # on the native FV3-LAM grid. - # - # POST_OUTPUT_DOMAIN_NAME: - # The PREDEF_GRID_NAME is set by default. + # Set the output directory locations # # ----------------------------------------------------------------------- # - global LOGDIR, FIXam, FIXclim, FIXlam - global POST_OUTPUT_DOMAIN_NAME - global COMIN_BASEDIR, COMOUT_BASEDIR - global OPSROOT, COMROOT, PACKAGEROOT, DATAROOT, DCOMROOT, DBNROOT - global SENDECF, SENDDBN, SENDDBN_NTC, SENDCOM, SENDWEB - global KEEPDATA, MAILTO, MAILCC + run_envir = expt_config['user'].get('RUN_ENVIR', "") - # Main directory locations - if RUN_ENVIR == "nco": + # These NCO variables need to be set based on the user's specificed + # run environment. The default is set in config_defaults for nco. If + # running in community mode, we set these paths to the experiment + # directory. + nco_vars = [ + 'opsroot', + 'comroot', + 'packageroot', + 'dataroot', + 'dcomroot', + 'comin_basedir', + 'comout_basedir', + ] - try: OPSROOT = os.path.abspath(f"{EXPT_BASEDIR}{os.sep}..{os.sep}nco_dirs") \ - if OPSROOT is None else OPSROOT - except NameError: OPSROOT = EXPTDIR - try: COMROOT - except NameError: COMROOT = os.path.join(OPSROOT, "com") - try: PACKAGEROOT - except NameError: PACKAGEROOT = os.path.join(OPSROOT, "packages") - try: DATAROOT - except NameError: DATAROOT = os.path.join(OPSROOT, "tmp") - try: DCOMROOT - except NameError: DCOMROOT = os.path.join(OPSROOT, "dcom") - - COMIN_BASEDIR = os.path.join(COMROOT, NET, model_ver) - COMOUT_BASEDIR = os.path.join(COMROOT, NET, model_ver) - - LOGDIR = os.path.join(OPSROOT,"output") - - else: - - COMIN_BASEDIR = EXPTDIR - COMOUT_BASEDIR = EXPTDIR - OPSROOT = EXPTDIR - COMROOT = EXPTDIR - PACKAGEROOT = EXPTDIR - DATAROOT = EXPTDIR - DCOMROOT = EXPTDIR - - LOGDIR = os.path.join(EXPTDIR, "log") - - try: DBNROOT - except NameError: DBNROOT = None - try: SENDECF - except NameError: SENDECF = False - try: SENDDBN - except NameError: SENDDBN = False - try: SENDDBN_NTC - except NameError: SENDDBN_NTC = False - try: SENDCOM - except NameError: SENDCOM = False - try: SENDWEB - except NameError: SENDWEB = False - try: KEEPDATA - except NameError: KEEPDATA = True - try: MAILTO - except NameError: MAILTO = None - try: MAILCC - except NameError: MAILCC = None + nco_config = expt_config['nco'] + if run_envir =! "nco": + # Put the variables in config dict. + for nco_var in nco_vars: + nco_config[nco_var.upper()] = exptdir # create NCO directories - if RUN_ENVIR == "nco": - mkdir_vrfy(f' -p "{OPSROOT}"') - mkdir_vrfy(f' -p "{COMROOT}"') - mkdir_vrfy(f' -p "{PACKAGEROOT}"') - mkdir_vrfy(f' -p "{DATAROOT}"') - mkdir_vrfy(f' -p "{DCOMROOT}"') - if DBNROOT is not None: - mkdir_vrfy(f' -p "{DBNROOT}"') + if run_envir == "nco": + mkdir_vrfy(f' -p "{nco_config.get("OPSROOT")}"') + mkdir_vrfy(f' -p "{nco_config.get("COMROOT")}"') + mkdir_vrfy(f' -p "{nco_config.get("PACKAGEROOT")}"') + mkdir_vrfy(f' -p "{nco_config.get("DATAROOT")}"') + mkdir_vrfy(f' -p "{nco_config.get("DCOMROOT")}"') + mkdir_vrfy(f' -p "{nco_config.get("LOGDIR")}"') + if nco_config['DBNROOT']: + mkdir_vrfy(f' -p "{nco_config["DBNROOT"]}"') - # - # ----------------------------------------------------------------------- - # - # - # If POST_OUTPUT_DOMAIN_NAME has not been specified by the user, set it - # to PREDEF_GRID_NAME (which won't be empty if using a predefined grid). - # Then change it to lowercase. Finally, ensure that it does not end up - # getting set to an empty string. - # - # ----------------------------------------------------------------------- - # - POST_OUTPUT_DOMAIN_NAME = POST_OUTPUT_DOMAIN_NAME or PREDEF_GRID_NAME + # create experiment dir + mkdir_vrfy(f' -p "{exptdir}"') - if type(POST_OUTPUT_DOMAIN_NAME) != int: - POST_OUTPUT_DOMAIN_NAME = lowercase(POST_OUTPUT_DOMAIN_NAME) - if POST_OUTPUT_DOMAIN_NAME is None: - if PREDEF_GRID_NAME is None: - raise Exception( - f""" - The domain name used in naming the run_post output files - (POST_OUTPUT_DOMAIN_NAME) has not been set: - POST_OUTPUT_DOMAIN_NAME = \"{POST_OUTPUT_DOMAIN_NAME}\" - If this experiment is not using a predefined grid (i.e. if - PREDEF_GRID_NAME is set to a null string), POST_OUTPUT_DOMAIN_NAME - must be set in the configuration file (\"{EXPT_CONFIG_FN}\"). """ - ) # # ----------------------------------------------------------------------- # @@ -915,626 +906,179 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - global DATA_TABLE_TMPL_FN, DIAG_TABLE_TMPL_FN, FIELD_TABLE_TMPL_FN, MODEL_CONFIG_TMPL_FN, NEMS_CONFIG_TMPL_FN - global DATA_TABLE_TMPL_FP, DIAG_TABLE_TMPL_FP, FIELD_TABLE_TMPL_FP, MODEL_CONFIG_TMPL_FP, NEMS_CONFIG_TMPL_FP - global FV3_NML_BASE_SUITE_FP, FV3_NML_YAML_CONFIG_FP, FV3_NML_BASE_ENS_FP - - dot_ccpp_phys_suite_or_null = f".{CCPP_PHYS_SUITE}" - - # Names of input files that the forecast model (ufs-weather-model) expects - # to read in. These should only be changed if the input file names in the - # forecast model code are changed. - # ---------------------------------- - DATA_TABLE_FN = "data_table" - DIAG_TABLE_FN = "diag_table" - FIELD_TABLE_FN = "field_table" - MODEL_CONFIG_FN = "model_configure" - NEMS_CONFIG_FN = "nems.configure" - # ---------------------------------- - - DATA_TABLE_TMPL_FN = DATA_TABLE_TMPL_FN or DATA_TABLE_FN - DIAG_TABLE_TMPL_FN = ( - f"{DIAG_TABLE_TMPL_FN or DIAG_TABLE_FN}{dot_ccpp_phys_suite_or_null}" - ) - FIELD_TABLE_TMPL_FN = ( - f"{FIELD_TABLE_TMPL_FN or FIELD_TABLE_FN}{dot_ccpp_phys_suite_or_null}" - ) - MODEL_CONFIG_TMPL_FN = MODEL_CONFIG_TMPL_FN or MODEL_CONFIG_FN - NEMS_CONFIG_TMPL_FN = NEMS_CONFIG_TMPL_FN or NEMS_CONFIG_FN - - DATA_TABLE_TMPL_FP = os.path.join(PARMdir, DATA_TABLE_TMPL_FN) - DIAG_TABLE_TMPL_FP = os.path.join(PARMdir, DIAG_TABLE_TMPL_FN) - FIELD_TABLE_TMPL_FP = os.path.join(PARMdir, FIELD_TABLE_TMPL_FN) - FV3_NML_BASE_SUITE_FP = os.path.join(PARMdir, FV3_NML_BASE_SUITE_FN) - FV3_NML_YAML_CONFIG_FP = os.path.join(PARMdir, FV3_NML_YAML_CONFIG_FN) - FV3_NML_BASE_ENS_FP = os.path.join(EXPTDIR, FV3_NML_BASE_ENS_FN) - MODEL_CONFIG_TMPL_FP = os.path.join(PARMdir, MODEL_CONFIG_TMPL_FN) - NEMS_CONFIG_TMPL_FP = os.path.join(PARMdir, NEMS_CONFIG_TMPL_FN) - # - # ----------------------------------------------------------------------- - # - # Set: - # - # 1) the variable CCPP_PHYS_SUITE_FN to the name of the CCPP physics - # suite definition file. - # 2) the variable CCPP_PHYS_SUITE_IN_CCPP_FP to the full path of this - # file in the forecast model's directory structure. - # 3) the variable CCPP_PHYS_SUITE_FP to the full path of this file in - # the experiment directory. - # - # Note that the experiment/workflow generation scripts will copy this - # file from CCPP_PHYS_SUITE_IN_CCPP_FP to CCPP_PHYS_SUITE_FP. Then, for - # each cycle, the forecast launch script will create a link in the cycle - # run directory to the copy of this file at CCPP_PHYS_SUITE_FP. - # - # ----------------------------------------------------------------------- - # - global CCPP_PHYS_SUITE_FN, CCPP_PHYS_SUITE_IN_CCPP_FP, CCPP_PHYS_SUITE_FP - CCPP_PHYS_SUITE_FN = f"suite_{CCPP_PHYS_SUITE}.xml" - CCPP_PHYS_SUITE_IN_CCPP_FP = os.path.join( - UFS_WTHR_MDL_DIR, "FV3", "ccpp", "suites", CCPP_PHYS_SUITE_FN - ) - CCPP_PHYS_SUITE_FP = os.path.join(EXPTDIR, CCPP_PHYS_SUITE_FN) - if not os.path.exists(CCPP_PHYS_SUITE_IN_CCPP_FP): + + # Check for the CCPP_PHYSICS suite xml file + ccpp_phys_suite_in_ccpp_fp = workflow_config['CCPP_PHYS_SUITE_IN_CCPP_FP'] + if not os.path.exists(ccpp_phys_suite_in_ccpp_fp): raise FileNotFoundError( f''' The CCPP suite definition file (CCPP_PHYS_SUITE_IN_CCPP_FP) does not exist in the local clone of the ufs-weather-model: - CCPP_PHYS_SUITE_IN_CCPP_FP = \"{CCPP_PHYS_SUITE_IN_CCPP_FP}\"''' + CCPP_PHYS_SUITE_IN_CCPP_FP = \"{ccpp_phys_suite_in_ccpp_fp}\"''' ) - # - # ----------------------------------------------------------------------- - # - # Set: - # - # 1) the variable FIELD_DICT_FN to the name of the field dictionary - # file. - # 2) the variable FIELD_DICT_IN_UWM_FP to the full path of this - # file in the forecast model's directory structure. - # 3) the variable FIELD_DICT_FP to the full path of this file in - # the experiment directory. - # - # ----------------------------------------------------------------------- - # - global FIELD_DICT_FN, FIELD_DICT_IN_UWM_FP, FIELD_DICT_FP - FIELD_DICT_FN = "fd_nems.yaml" - FIELD_DICT_IN_UWM_FP = os.path.join( - UFS_WTHR_MDL_DIR, "tests", "parm", FIELD_DICT_FN - ) - FIELD_DICT_FP = os.path.join(EXPTDIR, FIELD_DICT_FN) - if not os.path.exists(FIELD_DICT_IN_UWM_FP): + + # Check for the field dict file + field_dict_in_uwm_fp = workflow_config['FIELD_DICT_IN_UWM_FP'] + if not os.path.exists(field_dict_in_uwm_fp): raise FileNotFoundError( f''' The field dictionary file (FIELD_DICT_IN_UWM_FP) does not exist in the local clone of the ufs-weather-model: - FIELD_DICT_IN_UWM_FP = \"{FIELD_DICT_IN_UWM_FP}\"''' + FIELD_DICT_IN_UWM_FP = \"{field_dict_in_uwm_fp}\"''' ) - # - # ----------------------------------------------------------------------- - # - # Call the function that sets the ozone parameterization being used and - # modifies associated parameters accordingly. - # - # ----------------------------------------------------------------------- - # - # export env vars before calling another module - export_vars() + # Set the appropriate ozone production/loss file paths and symlinks + ozone_param, fixgsm_ozone_fn, ozone_link_mappings = set_ozone_param( + ccpp_phys_suite_in_ccpp_fp, + fcst_config['CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING'], + ) - OZONE_PARAM = set_ozone_param(CCPP_PHYS_SUITE_IN_CCPP_FP) + # Reset the dummy value saved in the last list item to the ozone + # file name + fcst_config['FIXgsm_FILES_TO_COPY_TO_FIXam'][-1] = fixgsm_ozone_fn - IMPORTS = ["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING", "FIXgsm_FILES_TO_COPY_TO_FIXam"] - import_vars(env_vars=IMPORTS) - # - # ----------------------------------------------------------------------- - # - # Set the full paths to those forecast model input files that are cycle- - # independent, i.e. they don't include information about the cycle's - # starting day/time. These are: - # - # * The data table file [(1) in the list above)] - # * The field table file [(3) in the list above)] - # * The FV3 namelist file [(4) in the list above)] - # * The NEMS configuration file [(6) in the list above)] - # - # Since they are cycle-independent, the experiment/workflow generation - # scripts will place them in the main experiment directory (EXPTDIR). - # The script that runs each cycle will then create links to these files - # in the run directories of the individual cycles (which are subdirecto- - # ries under EXPTDIR). - # - # The remaining two input files to the forecast model, i.e. - # - # * The diagnostics table file [(2) in the list above)] - # * The model configuration file [(5) in the list above)] - # - # contain parameters that depend on the cycle start date. Thus, custom - # versions of these two files must be generated for each cycle and then - # placed directly in the run directories of the cycles (not EXPTDIR). - # For this reason, the full paths to their locations vary by cycle and - # cannot be set here (i.e. they can only be set in the loop over the - # cycles in the rocoto workflow XML file). - # - # ----------------------------------------------------------------------- - # - global DATA_TABLE_FP, FIELD_TABLE_FP, FV3_NML_FN, FV3_NML_FP, NEMS_CONFIG_FP - DATA_TABLE_FP = os.path.join(EXPTDIR, DATA_TABLE_FN) - FIELD_TABLE_FP = os.path.join(EXPTDIR, FIELD_TABLE_FN) - FV3_NML_FN = os.path.splitext(FV3_NML_BASE_SUITE_FN)[0] - FV3_NML_FP = os.path.join(EXPTDIR, FV3_NML_FN) - NEMS_CONFIG_FP = os.path.join(EXPTDIR, NEMS_CONFIG_FN) - # - # ----------------------------------------------------------------------- - # - # If USE_USER_STAGED_EXTRN_FILES is set to TRUE, make sure that the user- - # specified directories under which the external model files should be - # located actually exist. - # - # ----------------------------------------------------------------------- - # - if USE_USER_STAGED_EXTRN_FILES: - # Check for the base directory up to the first templated field. - idx = EXTRN_MDL_SOURCE_BASEDIR_ICS.find("$") - if idx == -1: - idx = len(EXTRN_MDL_SOURCE_BASEDIR_ICS) + # Reset the experiment config list with the update list + fcst_config['CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING'] = ozone_link_mappings - if not os.path.exists(EXTRN_MDL_SOURCE_BASEDIR_ICS[:idx]): - raise FileNotFoundError( - f''' - The directory (EXTRN_MDL_SOURCE_BASEDIR_ICS) in which the user-staged - external model files for generating ICs should be located does not exist: - EXTRN_MDL_SOURCE_BASEDIR_ICS = \"{EXTRN_MDL_SOURCE_BASEDIR_ICS}\"''' - ) + log_info( + f""" + The ozone parameter used for this experiment is {ozone_param}. + """ + ) - idx = EXTRN_MDL_SOURCE_BASEDIR_LBCS.find("$") - if idx == -1: - idx = len(EXTRN_MDL_SOURCE_BASEDIR_LBCS) + log_info( + f""" + The list that sets the mapping between symlinks in the cycle + directory, and the files in the FIXam directory has been updated + to include the ozone production/loss file.""", verbose=verbose) - if not os.path.exists(EXTRN_MDL_SOURCE_BASEDIR_LBCS[:idx]): - raise FileNotFoundError( - f''' - The directory (EXTRN_MDL_SOURCE_BASEDIR_LBCS) in which the user-staged - external model files for generating LBCs should be located does not exist: - EXTRN_MDL_SOURCE_BASEDIR_LBCS = \"{EXTRN_MDL_SOURCE_BASEDIR_LBCS}\"''' - ) - # - # ----------------------------------------------------------------------- - # - # If DO_ENSEMBLE, set the names of the ensemble members; these will be - # used to set the ensemble member directories. Also, set the full path - # to the FV3 namelist file corresponding to each ensemble member. - # - # ----------------------------------------------------------------------- - # - global NDIGITS_ENSMEM_NAMES, ENSMEM_NAMES, FV3_NML_ENSMEM_FPS, NUM_ENS_MEMBERS - NDIGITS_ENSMEM_NAMES = 0 - ENSMEM_NAMES = [] - FV3_NML_ENSMEM_FPS = [] - if DO_ENSEMBLE: - NDIGITS_ENSMEM_NAMES = len(str(NUM_ENS_MEMBERS)) - fmt = f"0{NDIGITS_ENSMEM_NAMES}d" - for i in range(NUM_ENS_MEMBERS): - ENSMEM_NAMES.append(f"mem{fmt}".format(i + 1)) - FV3_NML_ENSMEM_FPS.append( - os.path.join(EXPTDIR, f"{FV3_NML_FN}_{ENSMEM_NAMES[i]}") - ) + log_info( + f""" + CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = {list_to_str(ozone_link_mappings)} + """, verbose=verbose, _dedent=False) - # Set the full path to the forecast model executable. - global FV3_EXEC_FP - FV3_EXEC_FP = os.path.join(EXECdir, FV3_EXEC_FN) # # ----------------------------------------------------------------------- # - # Set the full path to the script that can be used to (re)launch the - # workflow. Also, if USE_CRON_TO_RELAUNCH is set to TRUE, set the line - # to add to the cron table to automatically relaunch the workflow every - # CRON_RELAUNCH_INTVL_MNTS minutes. Otherwise, set the variable con- - # taining this line to a null string. + # Check that the set of tasks to run in the workflow is internally + # consistent. # # ----------------------------------------------------------------------- # - global WFLOW_LAUNCH_SCRIPT_FP, WFLOW_LAUNCH_LOG_FP, CRONTAB_LINE - WFLOW_LAUNCH_SCRIPT_FP = os.path.join(USHdir, WFLOW_LAUNCH_SCRIPT_FN) - WFLOW_LAUNCH_LOG_FP = os.path.join(EXPTDIR, WFLOW_LAUNCH_LOG_FN) - if USE_CRON_TO_RELAUNCH: - CRONTAB_LINE = ( - f"""*/{CRON_RELAUNCH_INTVL_MNTS} * * * * cd {EXPTDIR} && """ - f"""./{WFLOW_LAUNCH_SCRIPT_FN} called_from_cron="TRUE" >> ./{WFLOW_LAUNCH_LOG_FN} 2>&1""" - ) - else: - CRONTAB_LINE = "" - # - # ----------------------------------------------------------------------- - # - # Set the full path to the script that, for a given task, loads the - # necessary module files and runs the tasks. - # - # ----------------------------------------------------------------------- - # - global LOAD_MODULES_RUN_TASK_FP - LOAD_MODULES_RUN_TASK_FP = os.path.join(USHdir, "load_modules_run_task.sh") - - global RUN_TASK_MAKE_GRID, RUN_TASK_MAKE_OROG, RUN_TASK_MAKE_SFC_CLIMO - global RUN_TASK_VX_GRIDSTAT, RUN_TASK_VX_POINTSTAT, RUN_TASK_VX_ENSGRID, RUN_TASK_VX_ENSPOINT - - # Fix file location - if RUN_TASK_MAKE_GRID: - FIXdir = EXPTDIR - else: - FIXdir = os.path.join(HOMEdir, "fix") - - FIXam = os.path.join(FIXdir, "fix_am") - FIXclim = os.path.join(FIXdir, "fix_clim") - FIXlam = os.path.join(FIXdir, "fix_lam") + workflow_switches = expt_config['workflow_switches'] # Ensemble verification can only be run in ensemble mode - if (not DO_ENSEMBLE) and (RUN_TASK_VX_ENSGRID or RUN_TASK_VX_ENSPOINT): + do_ensemble = global_sect['DO_ENSEMBLE'] + run_task_vx_ensgrid = workflow_switches['RUN_TASK_VX_ENSGRID'] + run_task_vx_enspoint = workflow_switches['RUN_TASK_VX_ENSPOINT'] + if (not do_ensemble) and (run_task_vx_ensgrid or run_task_vx_enspoint): raise Exception( f''' Ensemble verification can not be run unless running in ensemble mode: - DO_ENSEMBLE = \"{DO_ENSEMBLE}\" - RUN_TASK_VX_ENSGRID = \"{RUN_TASK_VX_ENSGRID}\" - RUN_TASK_VX_ENSPOINT = \"{RUN_TASK_VX_ENSPOINT}\"''' + DO_ENSEMBLE = \"{do_ensemble}\" + RUN_TASK_VX_ENSGRID = \"{run_task_vx_ensgrid}\" + RUN_TASK_VX_ENSPOINT = \"{run_task_vx_enspoint}\"''' ) # - # ----------------------------------------------------------------------- - # - # Define the various work subdirectories under the main work directory. - # Each of these corresponds to a different step/substep/task in the pre- - # processing, as follows: - # - # GRID_DIR: - # Directory in which the grid files will be placed (if RUN_TASK_MAKE_GRID - # is set to True) or searched for (if RUN_TASK_MAKE_GRID is set to - # False). - # - # OROG_DIR: - # Directory in which the orography files will be placed (if RUN_TASK_MAKE_OROG - # is set to True) or searched for (if RUN_TASK_MAKE_OROG is set to - # False). - # - # SFC_CLIMO_DIR: - # Directory in which the surface climatology files will be placed (if - # RUN_TASK_MAKE_SFC_CLIMO is set to True) or searched for (if - # RUN_TASK_MAKE_SFC_CLIMO is set to False). - # - # ---------------------------------------------------------------------- - # - global GRID_DIR, OROG_DIR, SFC_CLIMO_DIR - - if DOMAIN_PREGEN_BASEDIR is None: - RUN_TASK_MAKE_GRID = True - RUN_TASK_MAKE_OROG = True - RUN_TASK_MAKE_SFC_CLIMO = True - - # - # If RUN_TASK_MAKE_GRID is set to False, the workflow will look for - # the pregenerated grid files in GRID_DIR. In this case, make sure that - # GRID_DIR exists. Otherwise, set it to a predefined location under the - # experiment directory (EXPTDIR). + # Use the pregenerated domain files if the RUN_TASK_MAKE* tasks are + # turned off. Link the files, and check that they all contain the + # same resolution input. # - if not RUN_TASK_MAKE_GRID: - if (GRID_DIR is None): - GRID_DIR = os.path.join(DOMAIN_PREGEN_BASEDIR, PREDEF_GRID_NAME) + prep_tasks = ['GRID', 'OROG', 'SFC_CLIMO'] + res_in_fixlam_filenames = None + for prep_task in prep_tasks: + res_in_fns = "" + switch = f'RUN_TASK_MAKE_{prep_task}' + # If the user doesn't want to run the given task, link the fix + # file + if not workflow_switches[switch]: + task_dir = os.path.join(pregen_basedir, predef_grid_name) + sect_key = f"task_make_{prep_task.lower()}" + dir_key = f"{prep_task}_DIR" + expt_config[sect_key][dir_key] = task_dir msg = dedent(f""" - GRID_DIR not specified! - Setting GRID_DIR = {GRID_DIR} + {dir_key} not specified! + Setting {dir_key} = {task_dir} """) logger.warning(msg) - if not os.path.exists(GRID_DIR): - raise FileNotFoundError( - f''' - The directory (GRID_DIR) that should contain the pregenerated grid files - does not exist: - GRID_DIR = \"{GRID_DIR}\"''' - ) - else: - GRID_DIR = os.path.join(EXPTDIR, "grid") - # - # If RUN_TASK_MAKE_OROG is set to False, the workflow will look for - # the pregenerated orography files in OROG_DIR. In this case, make sure - # that OROG_DIR exists. Otherwise, set it to a predefined location under - # the experiment directory (EXPTDIR). - # - if not RUN_TASK_MAKE_OROG: - if (OROG_DIR is None): - OROG_DIR = os.path.join(DOMAIN_PREGEN_BASEDIR, PREDEF_GRID_NAME) - - msg = dedent(f""" - OROG_DIR not specified! - Setting OROG_DIR = {OROG_DIR} - """) - logger.warning(msg) + # Link the fix files and check that their resolution is + # consistent + res_in_fns = link_fix( + verbose=verbose, + file_group=prep_task.lower(), + source_dir=task_dir, + target_dir=workflow_config['FIXlam'], + ccpp_phys_suite=workflow_config['CCPP_PHYS_SUITE'], + constants=expt_config['constants'] + dot_or_underscore=workflow_config['DOT_OR_USCORE'], + nhw=grid_params['NHW'], + run_task=False, + sfc_climo_fields=expt_config['task_run_fcst']['SFC_CLIMO_FIELDS'], + ) + if res_in_fixlam_filenames is None: + res_in_fixlam_filenames = res_in_fns + else: + if res_in_fixlam_filesnames != res_in_fns: + raise Exception(dedent( + f""" + The resolution of the pregenerated files for + {prep_task} do not match those that were alread + set: - if not os.path.exists(OROG_DIR): - raise FileNotFoundError( - f''' - The directory (OROG_DIR) that should contain the pregenerated orography - files does not exist: - OROG_DIR = \"{OROG_DIR}\"''' - ) - else: - OROG_DIR = os.path.join(EXPTDIR, "orog") - # - # If RUN_TASK_MAKE_SFC_CLIMO is set to False, the workflow will look - # for the pregenerated surface climatology files in SFC_CLIMO_DIR. In - # this case, make sure that SFC_CLIMO_DIR exists. Otherwise, set it to - # a predefined location under the experiment directory (EXPTDIR). - # - if not RUN_TASK_MAKE_SFC_CLIMO: - if (SFC_CLIMO_DIR is None): - SFC_CLIMO_DIR = os.path.join(DOMAIN_PREGEN_BASEDIR, PREDEF_GRID_NAME) + Resolution in {prep_task}: {res_in_fns} + Resolution expected: {res_in_fixlam_filesnames} + """ + )) - msg = dedent(f""" - SFC_CLIMO_DIR not specified! - Setting SFC_CLIMO_DIR ={SFC_CLIMO_DIR} - """) - logger.warning(msg) - if not os.path.exists(SFC_CLIMO_DIR): + if not os.path.exists(task_dir): raise FileNotFoundError( f''' - The directory (SFC_CLIMO_DIR) that should contain the pregenerated surface - climatology files does not exist: - SFC_CLIMO_DIR = \"{SFC_CLIMO_DIR}\"''' + The directory ({dir_key}) that should contain the pregenerated + {prep_task.lower()} files does not exist: + {dir_key} = \"{task_dir}\"''' ) - else: - SFC_CLIMO_DIR = os.path.join(EXPTDIR, "sfc_climo") - - # ----------------------------------------------------------------------- - # - # Set cycle-independent parameters associated with the external models - # from which we will obtain the ICs and LBCs. - # - # ----------------------------------------------------------------------- - # - - # export env vars before calling another module - export_vars() - - set_extrn_mdl_params() - - IMPORTS = ["EXTRN_MDL_LBCS_OFFSET_HRS"] - import_vars(env_vars=IMPORTS) - # - # ----------------------------------------------------------------------- - # - # Set parameters according to the type of horizontal grid generation - # method specified. First consider GFDL's global-parent-grid based - # method. - # - # ----------------------------------------------------------------------- - # - global LON_CTR, LAT_CTR, NX, NY, NHW, STRETCH_FAC - - if GRID_GEN_METHOD == "GFDLgrid": - grid_params = set_gridparams_GFDLgrid( - lon_of_t6_ctr=GFDLgrid_LON_T6_CTR, - lat_of_t6_ctr=GFDLgrid_LAT_T6_CTR, - res_of_t6g=GFDLgrid_NUM_CELLS, - stretch_factor=GFDLgrid_STRETCH_FAC, - refine_ratio_t6g_to_t7g=GFDLgrid_REFINE_RATIO, - istart_of_t7_on_t6g=GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G, - iend_of_t7_on_t6g=GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G, - jstart_of_t7_on_t6g=GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G, - jend_of_t7_on_t6g=GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G, - ) - # - # ----------------------------------------------------------------------- - # - # Now consider Jim Purser's map projection/grid generation method. - # - # ----------------------------------------------------------------------- - # - elif GRID_GEN_METHOD == "ESGgrid": - grid_params = set_gridparams_ESGgrid( - lon_ctr=ESGgrid_LON_CTR, - lat_ctr=ESGgrid_LAT_CTR, - nx=ESGgrid_NX, - ny=ESGgrid_NY, - pazi=ESGgrid_PAZI, - halo_width=ESGgrid_WIDE_HALO_WIDTH, - delx=ESGgrid_DELX, - dely=ESGgrid_DELY, - ) - # - # ----------------------------------------------------------------------- - # - # Otherwise - # - # ----------------------------------------------------------------------- - # - else: - grid_params = { - "LON_CTR": LON_CTR, - "LAT_CTR": LAT_CTR, - "NX": NX, - "NY": NY, - "NHW": NHW, - "STRETCH_FAC": STRETCH_FAC, - } - - # Extract the basic grid params from the dictionary - (LON_CTR, LAT_CTR, NX, NY, NHW, STRETCH_FAC) = ( - grid_params[k] for k in ["LON_CTR", "LAT_CTR", "NX", "NY", "NHW", "STRETCH_FAC"] - ) + workflow_config['RES_IN_FIXLAM_FILENAMES'] = res_in_fixlam_filesnames + workflow_config['CRES'] = f"C{res_in_fixlam_filenames}" - # - # ----------------------------------------------------------------------- - # - # Create a new experiment directory. For platforms with no workflow - # manager we need to create LOGDIR as well, since it won't be created - # later at runtime. - # - # ----------------------------------------------------------------------- - # - mkdir_vrfy(f' -p "{EXPTDIR}"') - mkdir_vrfy(f' -p "{LOGDIR}"') # # ----------------------------------------------------------------------- # NOTE: currently this is executed no matter what, should it be dependent on the logic described below?? # If not running the MAKE_GRID_TN, MAKE_OROG_TN, and/or MAKE_SFC_CLIMO # tasks, create symlinks under the FIXlam directory to pregenerated grid, - # orography, and surface climatology files. In the process, also set - # RES_IN_FIXLAM_FILENAMES, which is the resolution of the grid (in units - # of number of grid points on an equivalent global uniform cubed-sphere - # grid) used in the names of the fixed files in the FIXlam directory. + # orography, and surface climatology files. # # ----------------------------------------------------------------------- # mkdir_vrfy(f' -p "{FIXlam}"') - RES_IN_FIXLAM_FILENAMES = "" - # - # ----------------------------------------------------------------------- - # - # If the grid file generation task in the workflow is going to be skipped - # (because pregenerated files are available), create links in the FIXlam - # directory to the pregenerated grid files. - # - # ----------------------------------------------------------------------- - # - - # export env vars - export_vars() - # link fix files - res_in_grid_fns = "" - if not RUN_TASK_MAKE_GRID: - - res_in_grid_fns = link_fix(verbose=VERBOSE, file_group="grid") - - RES_IN_FIXLAM_FILENAMES = res_in_grid_fns # # ----------------------------------------------------------------------- # - # If the orography file generation task in the workflow is going to be - # skipped (because pregenerated files are available), create links in - # the FIXlam directory to the pregenerated orography files. + # Turn off post task if it's not consistent with the forecast's + # user-setting of WRITE_DOPOST # # ----------------------------------------------------------------------- # - res_in_orog_fns = "" - if not RUN_TASK_MAKE_OROG: - - res_in_orog_fns = link_fix(verbose=VERBOSE, file_group="orog") - - if not RES_IN_FIXLAM_FILENAMES and (res_in_orog_fns != RES_IN_FIXLAM_FILENAMES): - raise Exception( - f""" - The resolution extracted from the orography file names (res_in_orog_fns) - does not match the resolution in other groups of files already consi- - dered (RES_IN_FIXLAM_FILENAMES): - res_in_orog_fns = {res_in_orog_fns} - RES_IN_FIXLAM_FILENAMES = {RES_IN_FIXLAM_FILENAMES}""" - ) - else: - RES_IN_FIXLAM_FILENAMES = res_in_orog_fns - # - # ----------------------------------------------------------------------- - # - # If the surface climatology file generation task in the workflow is - # going to be skipped (because pregenerated files are available), create - # links in the FIXlam directory to the pregenerated surface climatology - # files. - # - # ----------------------------------------------------------------------- - # - res_in_sfc_climo_fns = "" - if not RUN_TASK_MAKE_SFC_CLIMO: - - res_in_sfc_climo_fns = link_fix(verbose=VERBOSE, file_group="sfc_climo") - - if RES_IN_FIXLAM_FILENAMES and res_in_sfc_climo_fns != RES_IN_FIXLAM_FILENAMES: - raise Exception( - f""" - The resolution extracted from the surface climatology file names (res_- - in_sfc_climo_fns) does not match the resolution in other groups of files - already considered (RES_IN_FIXLAM_FILENAMES): - res_in_sfc_climo_fns = {res_in_sfc_climo_fns} - RES_IN_FIXLAM_FILENAMES = {RES_IN_FIXLAM_FILENAMES}""" - ) - else: - RES_IN_FIXLAM_FILENAMES = res_in_sfc_climo_fns - # - # ----------------------------------------------------------------------- - # - # The variable CRES is needed in constructing various file names. If - # not running the make_grid task, we can set it here. Otherwise, it - # will get set to a valid value by that task. - # - # ----------------------------------------------------------------------- - # - global CRES - CRES = "" - if not RUN_TASK_MAKE_GRID: - CRES = f"C{RES_IN_FIXLAM_FILENAMES}" - - global RUN_TASK_RUN_POST - if WRITE_DOPOST: + if fcst_config['WRITE_DOPOST']: # Turn off run_post - if RUN_TASK_RUN_POST: + if workflow_switches['RUN_TASK_RUN_POST']: logger.warning(dedent(f""" Inline post is turned on, deactivating post-processing tasks: RUN_TASK_RUN_POST = False """)) - RUN_TASK_RUN_POST = False + workflow_switches['RUN_TASK_RUN_POST'] = False # Check if SUB_HOURLY_POST is on - if SUB_HOURLY_POST: + if expt_config['task_run_post']['SUB_HOURLY_POST']: raise Exception( f""" SUB_HOURLY_POST is NOT available with Inline Post yet.""" ) - # - # ----------------------------------------------------------------------- - # - # Calculate PE_MEMBER01. This is the number of MPI tasks used for the - # forecast, including those for the write component if QUILTING is set - # to True. - # - # ----------------------------------------------------------------------- - # - global PE_MEMBER01 - PE_MEMBER01 = LAYOUT_X * LAYOUT_Y - if QUILTING: - PE_MEMBER01 = PE_MEMBER01 + WRTCMP_write_groups * WRTCMP_write_tasks_per_group - - if VERBOSE: - log_info( - f""" - The number of MPI tasks for the forecast (including those for the write - component if it is being used) are: - PE_MEMBER01 = {PE_MEMBER01}""", - verbose=VERBOSE, - ) - # - # ----------------------------------------------------------------------- - # - # Calculate the number of nodes (NNODES_RUN_FCST) to request from the job - # scheduler for the forecast task (RUN_FCST_TN). This is just PE_MEMBER01 - # dividied by the number of processes per node we want to request for this - # task (PPN_RUN_FCST), then rounded up to the nearest integer, i.e. - # - # NNODES_RUN_FCST = ceil(PE_MEMBER01/PPN_RUN_FCST) - # - # where ceil(...) is the ceiling function, i.e. it rounds its floating - # point argument up to the next larger integer. Since in bash, division - # of two integers returns a truncated integer, and since bash has no - # built-in ceil(...) function, we perform the rounding-up operation by - # adding the denominator (of the argument of ceil(...) above) minus 1 to - # the original numerator, i.e. by redefining NNODES_RUN_FCST to be - # - # NNODES_RUN_FCST = (PE_MEMBER01 + PPN_RUN_FCST - 1)/PPN_RUN_FCST - # - # ----------------------------------------------------------------------- - # - global NNODES_RUN_FCST - NNODES_RUN_FCST = (PE_MEMBER01 + PPN_RUN_FCST - 1) // PPN_RUN_FCST - # # ----------------------------------------------------------------------- # @@ -1544,297 +1088,69 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - global SDF_USES_RUC_LSM - SDF_USES_RUC_LSM = check_ruc_lsm(ccpp_phys_suite_fp=CCPP_PHYS_SUITE_IN_CCPP_FP) - # - # ----------------------------------------------------------------------- - # - # Set the name of the file containing aerosol climatology data that, if - # necessary, can be used to generate approximate versions of the aerosol - # fields needed by Thompson microphysics. This file will be used to - # generate such approximate aerosol fields in the ICs and LBCs if Thompson - # MP is included in the physics suite and if the exteranl model for ICs - # or LBCs does not already provide these fields. Also, set the full path - # to this file. - # - # ----------------------------------------------------------------------- - # - THOMPSON_MP_CLIMO_FN = "Thompson_MP_MONTHLY_CLIMO.nc" - THOMPSON_MP_CLIMO_FP = os.path.join(FIXam, THOMPSON_MP_CLIMO_FN) + workflow_config['SDF_USES_RUC_LSM'] = check_ruc_lsm( + ccpp_phys_suite_fp=CCPP_PHYS_SUITE_IN_CCPP_FP + ) # # ----------------------------------------------------------------------- # - # Call the function that, if the Thompson microphysics parameterization - # is being called by the physics suite, modifies certain workflow arrays - # to ensure that fixed files needed by this parameterization are copied - # to the FIXam directory and appropriate symlinks to them are created in - # the run directories. This function also sets the workflow variable - # SDF_USES_THOMPSON_MP that indicates whether Thompson MP is called by + # Check if the Thompson microphysics parameterization is being + # called by the physics suite and modify certain workflow arrays to + # ensure that fixed files needed by this parameterization are copied + # to the FIXam directory and appropriate symlinks to them are + # created in the run directories. Set the boolean flag + # SDF_USES_THOMPSON_MP to indicates whether Thompson MP is called by # the physics suite. # # ----------------------------------------------------------------------- # - SDF_USES_THOMPSON_MP = set_thompson_mp_fix_files( + + link_thompson_climo = (get_extrn_ics['EXTRN_MDL_NAME_ICS'] not in ["HRRR", "RAP"]) \ + or (get_extrn_lbcs['EXTRN_MDL_NAME_LBCS'] not in ["HRRR", "RAP"]) + use_thompson, mapping, fix_files = set_thompson_mp_fix_files( ccpp_phys_suite_fp=CCPP_PHYS_SUITE_IN_CCPP_FP, thompson_mp_climo_fn=THOMPSON_MP_CLIMO_FN, + link_thompson_climo=link_thompson_climo, ) - IMPORTS = ["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING", "FIXgsm_FILES_TO_COPY_TO_FIXam"] - import_vars(env_vars=IMPORTS) - - # - # ----------------------------------------------------------------------- - # - # Generate the shell script that will appear in the experiment directory - # (EXPTDIR) and will contain definitions of variables needed by the va- - # rious scripts in the workflow. We refer to this as the experiment/ - # workflow global variable definitions file. We will create this file - # by: - # - # 1) Copying the default workflow/experiment configuration file (speci- - # fied by EXPT_DEFAULT_CONFIG_FN and located in the shell script di- - # rectory specified by USHdir) to the experiment directory and rena- - # ming it to the name specified by GLOBAL_VAR_DEFNS_FN. - # - # 2) Resetting the default variable values in this file to their current - # values. This is necessary because these variables may have been - # reset by the user-specified configuration file (if one exists in - # USHdir) and/or by this setup script, e.g. because predef_domain is - # set to a valid non-empty value. - # - # 3) Appending to the variable definitions file any new variables intro- - # duced in this setup script that may be needed by the scripts that - # perform the various tasks in the workflow (and which source the va- - # riable defintions file). - # - # First, set the full path to the variable definitions file and copy the - # default configuration script into it. - # - # ----------------------------------------------------------------------- - # - - # global variable definition file path - global GLOBAL_VAR_DEFNS_FP - GLOBAL_VAR_DEFNS_FP = os.path.join(EXPTDIR, GLOBAL_VAR_DEFNS_FN) - - # update dictionary with globals() values - update_dict(globals(), cfg_d) + workflow_config['SDF_USES_THOMPSON_MP'] = use_thompson - # constants section - cfg_d.update(cfg_c) - - # grid params - cfg_d["grid_params"] = grid_params + if use_thompson: + expt_config['task_run_fcst']['CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING'].append(mapping) + expt_config['task_run_fcst']['FIXgsm_FILES_TO_COPY_TO_FIXam'].append(fix_files) + log_info( + f""" + Since the Thompson microphysics parameterization is being used by this + physics suite (CCPP_PHYS_SUITE), the names of the fixed files needed by + this scheme have been appended to the array FIXgsm_FILES_TO_COPY_TO_FIXam, + and the mappings between these files and the symlinks that need to be + created in the cycle directories have been appended to the array + CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING. After these modifications, the + values of these parameters are as follows: + + CCPP_PHYS_SUITE = \"{CCPP_PHYS_SUITE}\" + """ + ) + log_info( + f""" + FIXgsm_FILES_TO_COPY_TO_FIXam = {list_to_str(FIXgsm_FILES_TO_COPY_TO_FIXam)} + """ + ) + log_info( + f""" + CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = {list_to_str(CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING)} + """ + ) # # ----------------------------------------------------------------------- # - # Append additional variable definitions (and comments) to the variable - # definitions file. These variables have been set above using the vari- - # ables in the default and local configuration scripts. These variables - # are needed by various tasks/scripts in the workflow. + # Generate var_defns.sh file in the EXPTDIR. This file contains all + # the user-specified settings from expt_config. # # ----------------------------------------------------------------------- # - settings = { - # - # ----------------------------------------------------------------------- - # - # Full path to workflow (re)launch script, its log file, and the line - # that gets added to the cron table to launch this script if the flag - # USE_CRON_TO_RELAUNCH is set to \"TRUE\". - # - # ----------------------------------------------------------------------- - # - "WFLOW_LAUNCH_SCRIPT_FP": WFLOW_LAUNCH_SCRIPT_FP, - "WFLOW_LAUNCH_LOG_FP": WFLOW_LAUNCH_LOG_FP, - "CRONTAB_LINE": CRONTAB_LINE, - # - # ----------------------------------------------------------------------- - # - # Directories. - # - # ----------------------------------------------------------------------- - # - "HOMEdir": HOMEdir, - "USHdir": USHdir, - "SCRIPTSdir": SCRIPTSdir, - "JOBSdir": JOBSdir, - "SORCdir": SORCdir, - "PARMdir": PARMdir, - "MODULESdir": MODULESdir, - "EXECdir": EXECdir, - "FIXdir": FIXdir, - "FIXam": FIXam, - "FIXclim": FIXclim, - "FIXlam": FIXlam, - "FIXgsm": FIXgsm, - "FIXaer": FIXaer, - "FIXlut": FIXlut, - "VX_CONFIG_DIR": VX_CONFIG_DIR, - "METPLUS_CONF": METPLUS_CONF, - "MET_CONFIG": MET_CONFIG, - "UFS_WTHR_MDL_DIR": UFS_WTHR_MDL_DIR, - "SFC_CLIMO_INPUT_DIR": SFC_CLIMO_INPUT_DIR, - "TOPO_DIR": TOPO_DIR, - "EXPTDIR": EXPTDIR, - "GRID_DIR": GRID_DIR, - "OROG_DIR": OROG_DIR, - "SFC_CLIMO_DIR": SFC_CLIMO_DIR, - "NDIGITS_ENSMEM_NAMES": NDIGITS_ENSMEM_NAMES, - "ENSMEM_NAMES": ENSMEM_NAMES, - "FV3_NML_ENSMEM_FPS": FV3_NML_ENSMEM_FPS, - # - # ----------------------------------------------------------------------- - # - # Files. - # - # ----------------------------------------------------------------------- - # - "GLOBAL_VAR_DEFNS_FP": GLOBAL_VAR_DEFNS_FP, - "DATA_TABLE_FN": DATA_TABLE_FN, - "DIAG_TABLE_FN": DIAG_TABLE_FN, - "FIELD_TABLE_FN": FIELD_TABLE_FN, - "MODEL_CONFIG_FN": MODEL_CONFIG_FN, - "NEMS_CONFIG_FN": NEMS_CONFIG_FN, - "DATA_TABLE_TMPL_FN": DATA_TABLE_TMPL_FN, - "DIAG_TABLE_TMPL_FN": DIAG_TABLE_TMPL_FN, - "FIELD_TABLE_TMPL_FN": FIELD_TABLE_TMPL_FN, - "MODEL_CONFIG_TMPL_FN": MODEL_CONFIG_TMPL_FN, - "NEMS_CONFIG_TMPL_FN": NEMS_CONFIG_TMPL_FN, - "DATA_TABLE_TMPL_FP": DATA_TABLE_TMPL_FP, - "DIAG_TABLE_TMPL_FP": DIAG_TABLE_TMPL_FP, - "FIELD_TABLE_TMPL_FP": FIELD_TABLE_TMPL_FP, - "FV3_NML_BASE_SUITE_FP": FV3_NML_BASE_SUITE_FP, - "FV3_NML_YAML_CONFIG_FP": FV3_NML_YAML_CONFIG_FP, - "FV3_NML_BASE_ENS_FP": FV3_NML_BASE_ENS_FP, - "MODEL_CONFIG_TMPL_FP": MODEL_CONFIG_TMPL_FP, - "NEMS_CONFIG_TMPL_FP": NEMS_CONFIG_TMPL_FP, - "CCPP_PHYS_SUITE_FN": CCPP_PHYS_SUITE_FN, - "CCPP_PHYS_SUITE_IN_CCPP_FP": CCPP_PHYS_SUITE_IN_CCPP_FP, - "CCPP_PHYS_SUITE_FP": CCPP_PHYS_SUITE_FP, - "FIELD_DICT_FN": FIELD_DICT_FN, - "FIELD_DICT_IN_UWM_FP": FIELD_DICT_IN_UWM_FP, - "FIELD_DICT_FP": FIELD_DICT_FP, - "DATA_TABLE_FP": DATA_TABLE_FP, - "FIELD_TABLE_FP": FIELD_TABLE_FP, - "FV3_NML_FN": FV3_NML_FN, # This may not be necessary... - "FV3_NML_FP": FV3_NML_FP, - "NEMS_CONFIG_FP": NEMS_CONFIG_FP, - "FV3_EXEC_FP": FV3_EXEC_FP, - "LOAD_MODULES_RUN_TASK_FP": LOAD_MODULES_RUN_TASK_FP, - "THOMPSON_MP_CLIMO_FN": THOMPSON_MP_CLIMO_FN, - "THOMPSON_MP_CLIMO_FP": THOMPSON_MP_CLIMO_FP, - # - # ----------------------------------------------------------------------- - # - # Flag for creating relative symlinks (as opposed to absolute ones). - # - # ----------------------------------------------------------------------- - # - "RELATIVE_LINK_FLAG": RELATIVE_LINK_FLAG, - # - # ----------------------------------------------------------------------- - # - # Parameters that indicate whether or not various parameterizations are - # included in and called by the physics suite. - # - # ----------------------------------------------------------------------- - # - "SDF_USES_RUC_LSM": SDF_USES_RUC_LSM, - "SDF_USES_THOMPSON_MP": SDF_USES_THOMPSON_MP, - # - # ----------------------------------------------------------------------- - # - # Grid configuration parameters needed regardless of grid generation - # method used. - # - # ----------------------------------------------------------------------- - # - "GTYPE": GTYPE, - "TILE_RGNL": TILE_RGNL, - "RES_IN_FIXLAM_FILENAMES": RES_IN_FIXLAM_FILENAMES, - # - # If running the make_grid task, CRES will be set to a null string during - # the grid generation step. It will later be set to an actual value after - # the make_grid task is complete. - # - "CRES": CRES, - # - # ----------------------------------------------------------------------- - # - # Flag in the \"{MODEL_CONFIG_FN}\" file for coupling the ocean model to - # the weather model. - # - # ----------------------------------------------------------------------- - # - "CPL": CPL, - # - # ----------------------------------------------------------------------- - # - # Name of the ozone parameterization. The value this gets set to depends - # on the CCPP physics suite being used. - # - # ----------------------------------------------------------------------- - # - "OZONE_PARAM": OZONE_PARAM, - # - # ----------------------------------------------------------------------- - # - # Computational parameters. - # - # ----------------------------------------------------------------------- - # - "PE_MEMBER01": PE_MEMBER01, - # - # ----------------------------------------------------------------------- - # - # IF DO_SPP is set to "TRUE", N_VAR_SPP specifies the number of physics - # parameterizations that are perturbed with SPP. If DO_LSM_SPP is set to - # "TRUE", N_VAR_LNDP specifies the number of LSM parameters that are - # perturbed. LNDP_TYPE determines the way LSM perturbations are employed - # and FHCYC_LSM_SPP_OR_NOT sets FHCYC based on whether LSM perturbations - # are turned on or not. - # - # ----------------------------------------------------------------------- - # - "N_VAR_SPP": N_VAR_SPP, - "N_VAR_LNDP": N_VAR_LNDP, - "LNDP_TYPE": LNDP_TYPE, - "LNDP_MODEL_TYPE": LNDP_MODEL_TYPE, - "FHCYC_LSM_SPP_OR_NOT": FHCYC_LSM_SPP_OR_NOT, - } - - # write derived settings - cfg_d["derived"] = settings - # - # ----------------------------------------------------------------------- - # - # NCO specific settings - # - # ----------------------------------------------------------------------- - # - settings = { - "COMIN_BASEDIR": COMIN_BASEDIR, - "COMOUT_BASEDIR": COMOUT_BASEDIR, - "OPSROOT": OPSROOT, - "COMROOT": COMROOT, - "PACKAGEROOT": PACKAGEROOT, - "DATAROOT": DATAROOT, - "DCOMROOT": DCOMROOT, - "DBNROOT": DBNROOT, - "SENDECF": SENDECF, - "SENDDBN": SENDDBN, - "SENDDBN_NTC": SENDDBN_NTC, - "SENDCOM": SENDCOM, - "SENDWEB": SENDWEB, - "KEEPDATA": KEEPDATA, - "MAILTO": MAILTO, - "MAILCC": MAILCC, - } - - cfg_d["nco"].update(settings) # # ----------------------------------------------------------------------- # @@ -1844,23 +1160,21 @@ def get_location(xcs, fmt, expt_cfg): # # print content of var_defns if DEBUG=True - all_lines = cfg_to_yaml_str(cfg_d) - log_info(all_lines, verbose=DEBUG) + all_lines = cfg_to_yaml_str(expt_config) + log_info(all_lines, verbose=debug) + global_var_defns_fp = workflow_config['GLOBAL_VAR_DEFNS_FP'] # print info message log_info( f""" - Generating the global experiment variable definitions file specified by - GLOBAL_VAR_DEFNS_FN: - GLOBAL_VAR_DEFNS_FN = \"{GLOBAL_VAR_DEFNS_FN}\" - Full path to this file is: - GLOBAL_VAR_DEFNS_FP = \"{GLOBAL_VAR_DEFNS_FP}\" + Generating the global experiment variable definitions file here: + GLOBAL_VAR_DEFNS_FP = \"{global_var_defns_fp}\" For more detailed information, set DEBUG to \"TRUE\" in the experiment - configuration file (\"{EXPT_CONFIG_FN}\").""" + configuration file (\"{user_config}\").""" ) - with open(GLOBAL_VAR_DEFNS_FP, "a") as f: - f.write(cfg_to_shell_str(cfg_d)) + with open(global_var_defns_fp, "a") as f: + f.write(cfg_to_shell_str(expt_config)) # export all global variables back to the environment export_vars() @@ -1873,21 +1187,21 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # - # loop through cfg_d and check validity of params + # loop through the flattened expt_config and check validity of params cfg_v = load_config_file("valid_param_vals.yaml") - cfg_d = flatten_dict(cfg_d) - for k, v in cfg_d.items(): + for k, v in flatten_dict(expt_config).items(): if v == None: continue vkey = "valid_vals_" + k if (vkey in cfg_v) and not (v in cfg_v[vkey]): raise Exception( f""" - The variable {k}={v} in {EXPT_DEFAULT_CONFIG_FN} or {EXPT_CONFIG_FN} + The variable {k}={v} in the user's configuration does not have a valid value. Possible values are: {k} = {cfg_v[vkey]}""" ) + return expt_config # # ----------------------------------------------------------------------- @@ -1897,4 +1211,5 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # if __name__ == "__main__": - setup() + USHdir = os.path.dirname(os.path.abspath(__file__)) + setup(USHdir) From 3a017e1056db55f061dbdbaaed6df5b601e91483 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 7 Nov 2022 20:38:42 +0000 Subject: [PATCH 03/19] Run black. --- ush/generate_FV3LAM_wflow.py | 16 +- ush/link_fix.py | 87 ++--- ush/set_gridparams_ESGgrid.py | 11 +- ush/set_ozone_param.py | 5 +- ush/set_predef_grid_params.py | 6 +- ush/set_thompson_mp_fix_files.py | 7 +- ush/setup.py | 555 +++++++++++++++++-------------- 7 files changed, 371 insertions(+), 316 deletions(-) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 4ced08b686..73ba8e22ba 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -79,11 +79,11 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> # # ----------------------------------------------------------------------- # - wflow_xml_fn = expt_config['workflow']['WFLOW_XML_FN'] + wflow_xml_fn = expt_config["workflow"]["WFLOW_XML_FN"] wflow_xml_fp = os.path.join( - expt_config['workflow']['EXPTDIR'], - wflow_xml_fn, - ) + expt_config["workflow"]["EXPTDIR"], + wflow_xml_fn, + ) # # ----------------------------------------------------------------------- # @@ -95,12 +95,12 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> # # ----------------------------------------------------------------------- # - if expt_config['platform']['WORKFLOW_MANAGER'] == "rocoto": + if expt_config["platform"]["WORKFLOW_MANAGER"] == "rocoto": template_xml_fp = os.path.join( - expt_config['user']['PARMdir'], + expt_config["user"]["PARMdir"], wflow_xml_fn, - ) + ) log_info( f""" @@ -120,7 +120,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> ensmem_indx_name = "" uscore_ensmem_name = "" slash_ensmem_subdir = "" - if expt_config['global']['DO_ENSEMBLE']: + if expt_config["global"]["DO_ENSEMBLE"]: ensmem_indx_name = "mem" uscore_ensmem_name = f"_mem#{ensmem_indx_name}#" slash_ensmem_subdir = f"/mem#{ensmem_indx_name}#" diff --git a/ush/link_fix.py b/ush/link_fix.py index 059508b693..827f467b65 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -23,17 +23,19 @@ ) -def link_fix(verbose, - file_group, - source_dir, - target_dir, - ccpp_phys_suite, - constants, - dot_or_underscore, - nhw, - run_task, - sfc_climo_fields, - **kwargs): +def link_fix( + verbose, + file_group, + source_dir, + target_dir, + ccpp_phys_suite, + constants, + dot_or_underscore, + nhw, + run_task, + sfc_climo_fields, + **kwargs, +): """This file defines a function that links fix files to the target directory for a given SRW experiment. Only links files for one group at a time. @@ -61,10 +63,10 @@ def link_fix(verbose, check_var_valid_value(file_group, valid_vals_file_group) # Decompress the constants needed below. - nh0 = constants['NH0'] - nh3 = constants['NH3'] - nh4 = constants['NH4'] - tile_rgnl = constants['TILE_RGNL'] + nh0 = constants["NH0"] + nh3 = constants["NH3"] + nh4 = constants["NH4"] + tile_rgnl = constants["TILE_RGNL"] # # ----------------------------------------------------------------------- @@ -81,7 +83,8 @@ def link_fix(verbose, # ----------------------------------------------------------------------- # print_info_msg( - f"Creating links in the {target_dir} directory to the grid files...", verbose=verbose + f"Creating links in the {target_dir} directory to the grid files...", + verbose=verbose, ) # # ----------------------------------------------------------------------- @@ -343,7 +346,7 @@ def link_fix(verbose, # These are needed by the make_ics task. # # The forecat model needs sfc climo files to be named without the - # tile7 and halo references, and with only "tile1" in the name. + # tile7 and halo references, and with only "tile1" in the name. # # ----------------------------------------------------------------------- # @@ -383,7 +386,7 @@ def parse_args(argv): parser.add_argument( "-p", "--path-to-defns", - dest="path_to_defns" + dest="path_to_defns", required=True, help="Path to var_defns file.", ) @@ -394,32 +397,34 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) cfg = load_shell_config(args.path_to_defns) - link_fix(verbose=cfg['workflow']['VERBOSE'], - file_group=args.file_group, - source_dir=cfg['task_make_{args.file_group.upper()}'][f"{args.file_group}_DIR"], - target_dir=cfg['workflow']['FIXlam'], - ccpp_phys_suite=cfg['workflow']['CCPP_PHYS_SUITE'], - constants=cfg['constants'] - dot_or_underscore=cfg['workflow']['DOT_OR_USCORE'], - nhw=cfg['grid_params']['NHW'], - run_task=True, - sfc_climo_fields=cfg['task_run_fcst']['SFC_CLIMO_FIELDS'], - ) + link_fix( + verbose=cfg["workflow"]["VERBOSE"], + file_group=args.file_group, + source_dir=cfg["task_make_{args.file_group.upper()}"][f"{args.file_group}_DIR"], + target_dir=cfg["workflow"]["FIXlam"], + ccpp_phys_suite=cfg["workflow"]["CCPP_PHYS_SUITE"], + constants=cfg["constants"], + dot_or_underscore=cfg["workflow"]["DOT_OR_USCORE"], + nhw=cfg["grid_params"]["NHW"], + run_task=True, + sfc_climo_fields=cfg["task_run_fcst"]["SFC_CLIMO_FIELDS"], + ) class Testing(unittest.TestCase): def test_link_fix(self): - res = link_fix(verbose=True, - file_group="grid", - source_dir=self.task_dir, - target_dir=self.FIXlam, - ccpp_phys_suite=self.cfg['CCPP_PHYS_SUITE'], - constants=self.cfg['constants'] - dot_or_underscore=self.cfg['DOT_OR_USCORE'], - nhw=self.cfg['NHW'], - run_task=False, - sfc_climo_fields=['foo', 'bar'] - ) + res = link_fix( + verbose=True, + file_group="grid", + source_dir=self.task_dir, + target_dir=self.FIXlam, + ccpp_phys_suite=self.cfg["CCPP_PHYS_SUITE"], + constants=self.cfg["constants"], + dot_or_underscore=self.cfg["DOT_OR_USCORE"], + nhw=self.cfg["NHW"], + run_task=False, + sfc_climo_fields=["foo", "bar"], + ) self.assertTrue(res == "3357") def setUp(self): @@ -439,4 +444,4 @@ def setUp(self): "NH3": 3, "TILE_RGNL": 7, }, - } + } diff --git a/ush/set_gridparams_ESGgrid.py b/ush/set_gridparams_ESGgrid.py index 62f9dd4585..3346fe0bfd 100644 --- a/ush/set_gridparams_ESGgrid.py +++ b/ush/set_gridparams_ESGgrid.py @@ -13,8 +13,9 @@ ) -def set_gridparams_ESGgrid(lon_ctr, lat_ctr, nx, ny, halo_width, delx, - dely, pazi, constants): +def set_gridparams_ESGgrid( + lon_ctr, lat_ctr, nx, ny, halo_width, delx, dely, pazi, constants +): """Sets the parameters for a grid that is to be generated using the "ESGgrid" grid generation method (i.e. GRID_GEN_METHOD set to "ESGgrid"). @@ -35,8 +36,8 @@ def set_gridparams_ESGgrid(lon_ctr, lat_ctr, nx, ny, halo_width, delx, print_input_args(locals()) # get constants - RADIUS_EARTH = constants['RADIUS_EARTH'] - DEGS_PER_RADIAN = constants['DEGS_PER_RADIAN'] + RADIUS_EARTH = constants["RADIUS_EARTH"] + DEGS_PER_RADIAN = constants["DEGS_PER_RADIAN"] # # ----------------------------------------------------------------------- @@ -90,7 +91,7 @@ def test_set_gridparams_ESGgrid(self): constants=dict( RADIUS_EARTH=6371200.0, DEGS_PER_RADIAN=57.2957795131, - ) + ), ) self.assertEqual( diff --git a/ush/set_ozone_param.py b/ush/set_ozone_param.py index df86aff1cf..7fc146105f 100644 --- a/ush/set_ozone_param.py +++ b/ush/set_ozone_param.py @@ -14,6 +14,7 @@ find_pattern_in_str, ) + def set_ozone_param(ccpp_phys_suite_fp, link_mappings): """Function that does the following: (1) Determines the ozone parameterization being used by checking in the @@ -110,7 +111,7 @@ def set_ozone_param(ccpp_phys_suite_fp, link_mappings): ozone_link_mappings = copy.deepcopy(link_mappings) for i, mapping in enumerate(ozone_link_mappings): - symlink = mapping.split('|')[0] + symlink = mapping.split("|")[0] if symlink.strip() == ozone_symlink: ozone_link_mappings[i] = f"{symlink}| {fixgsm_ozone_fn}" fixgsm_ozone_fn_is_set = True @@ -136,7 +137,7 @@ def test_set_ozone_param(self): self.assertEqual( "ozphys_2015", set_ozone_param( - ccpp_phys_suite_fp=f"{USHdir}{os.sep}test_data{os.sep}suite_FV3_GSD_SAR.xml" + f"{USHdir}{os.sep}test_data{os.sep}suite_FV3_GSD_SAR.xml", self.CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING, ), ) diff --git a/ush/set_predef_grid_params.py b/ush/set_predef_grid_params.py index 122377d41a..949018f5ab 100644 --- a/ush/set_predef_grid_params.py +++ b/ush/set_predef_grid_params.py @@ -19,8 +19,8 @@ def set_predef_grid_params(USHdir, fcst_config): Returns: Dictionary of grid parameters """ - predef_grid_name = fcst_config['PREDEF_GRID_NAME'] - quilting = fcst_config['QUILTING'] + predef_grid_name = fcst_config["PREDEF_GRID_NAME"] + quilting = fcst_config["QUILTING"] params_dict = load_config_file(os.path.join(USHdir, "predef_grid_params.yaml")) try: @@ -72,5 +72,3 @@ def test_set_predef_grid_params(self): fcst_config, ) self.assertEqual(params_dict["WRTCMP_nx"], 1799) - - def setUp(self): diff --git a/ush/set_thompson_mp_fix_files.py b/ush/set_thompson_mp_fix_files.py index d3e2d6eb15..59a702f327 100644 --- a/ush/set_thompson_mp_fix_files.py +++ b/ush/set_thompson_mp_fix_files.py @@ -14,8 +14,9 @@ ) -def set_thompson_mp_fix_files(ccpp_phys_suite_fp, thompson_mp_climo_fn, - link_thompson_climo): +def set_thompson_mp_fix_files( + ccpp_phys_suite_fp, thompson_mp_climo_fn, link_thompson_climo +): """Function that first checks whether the Thompson microphysics parameterization is being called by the selected physics suite. If not, it sets the output variable whose name is specified by @@ -87,7 +88,7 @@ def set_thompson_mp_fix_files(ccpp_phys_suite_fp, thompson_mp_climo_fn, class Testing(unittest.TestCase): def test_set_thompson_mp_fix_files(self): USHdir = os.path.dirname(os.path.abspath(__file__)) - uses_thompson, _, _ = set_thompson_mp_fix_files( + uses_thompson, _, _ = set_thompson_mp_fix_files( f"{USHdir}{os.sep}test_data{os.sep}suite_FV3_GSD_SAR.xml", "Thompson_MP_MONTHLY_CLIMO.nc", False, diff --git a/ush/setup.py b/ush/setup.py index 3515192ee0..1648d541bb 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -41,7 +41,7 @@ def load_config_for_setup(ushdir, default_config, user_config): - """ Load in the default, machine, and user configuration files into + """Load in the default, machine, and user configuration files into Python dictionaries. Return the combined experiment dictionary. Args: @@ -59,45 +59,60 @@ def load_config_for_setup(ushdir, default_config, user_config): # Load the user config file, then ensure all user-specified # variables correspond to a default value. if not os.path.exists(user_config): - raise FileNotFoundError(f'User config file not found: - user_config = {user_config}') + raise FileNotFoundError( + f""" + User config file not found: + user_config = {user_config} + """ + ) try: cfg_u = load_config_file(user_config) except: - errmsg = dedent(f'''\n - Could not load YAML config file: {user_config} - Reference the above traceback for more information. - ''') + errmsg = dedent( + f"""\n + Could not load YAML config file: {user_config} + Reference the above traceback for more information. + """ + ) raise Exception(errmsg) # Make sure the keys in user config match those in the default # config. if not check_structure_dict(cfg_u, cfg_d): - raise Exception(dedent(f''' - User-specified variable "{key}" in {user_config} is not valid - Check {EXPT_DEFAULT_CONFIG_FN} for allowed user-specified variables\n''')) + raise Exception( + dedent( + f""" + User-specified variable "{key}" in {user_config} is not valid + Check {EXPT_DEFAULT_CONFIG_FN} for allowed user-specified variables\n + """ + ) + ) # Mandatory variables *must* be set in the user's config; the default value is invalid - mandatory = ['user.MACHINE'] + mandatory = ["user.MACHINE"] for val in mandatory: - sect, key = val.split('.') + sect, key = val.split(".") user_setting = cfg_u.get(sect, {}).get(key) if user_setting is None: - raise Exception(f'Mandatory variable "{val}" not found in - user config file {user_config}') + raise Exception( + f"""Mandatory variable "{val}" not found in + user config file {user_config}""" + ) # Load the machine config file - machine = cfg_u.get('user').get('MACHINE') + machine = cfg_u.get("user").get("MACHINE") machine_file = os.path.join(ushdir, "machine", f"{lowercase(machine)}.yaml") if not os.path.exists(machine_file): - raise FileNotFoundError(dedent( - f""" + raise FileNotFoundError( + dedent( + f""" The machine file {machine_file} does not exist. Check that you have specified the correct machine ({machine}) in your config file {user_config}""" - )) + ) + ) machine_cfg = load_config_file(machine_file) # Load the constants file @@ -126,49 +141,57 @@ def load_config_for_setup(ushdir, default_config, user_config): # Mandatory variables *must* be set in the user's config or the machine file; the default value is invalid mandatory = [ - 'NCORES_PER_NODE', - 'FIXgsm', - 'FIXaer', - 'FIXlut', - 'TOPO_DIR', - 'SFC_CLIMO_INPUT_DIR', + "NCORES_PER_NODE", + "FIXgsm", + "FIXaer", + "FIXlut", + "TOPO_DIR", + "SFC_CLIMO_INPUT_DIR", ] for val in mandatory: - if not cfg_d.get('task_run_fcst', {}).get('val'): - raise Exception(dedent(f''' + if not cfg_d.get("task_run_fcst", {}).get("val"): + raise Exception( + dedent( + f""" Mandatory variable "{val}" not found in: user config file {user_config} OR machine file {machine_file} - ''')) + """ + ) + ) # Check that input dates are in a date format - dates = ['DATE_FIRST_CYCL', 'DATE_LAST_CYCL'] + dates = ["DATE_FIRST_CYCL", "DATE_LAST_CYCL"] for val in dates: - if not isinstance(cfg_d['user'][val], datetime.date): - raise Exception(dedent(f''' + if not isinstance(cfg_d["user"][val], datetime.date): + raise Exception( + dedent( + f""" Date variable {val}={cfg_d['user'][val]} is not in a valid date format. For examples of valid formats, see the Users' Guide. - ''')) + """ + ) + ) # Check to make sure mandatory workflow variables are set. - vlist = ['EXPT_SUBDIR'] + vlist = ["EXPT_SUBDIR"] for val in vlist: - if not cfg_d['task_run_fcst'].get('val') + if not cfg_d["task_run_fcst"].get("val"): raise Exception(f"\nMandatory variable '{val}' has not been set\n") # Check to make sure that mandatory forecast variables are set. - vlist = ['DT_ATMOS', - 'LAYOUT_X', - 'LAYOUT_Y', - 'BLOCKSIZE', - ] + vlist = [ + "DT_ATMOS", + "LAYOUT_X", + "LAYOUT_Y", + "BLOCKSIZE", + ] for val in vlist: - if not cfg_d['task_run_fcst'].get('val') + if not cfg_d["task_run_fcst"].get("val"): raise Exception(f"\nMandatory variable '{val}' has not been set\n") - return cfg_d @@ -185,16 +208,14 @@ def set_srw_paths(ushdir, expt_config): ushdir: (str) path to the system location of the ush/ directory under the SRW clone expt_config: (dict) contains the configuration settings for the - user-defined experiment + user-defined experiment Returns: dictionary of config settings and system paths as keys/values """ # HOMEdir is the location of the SRW clone, one directory above ush/ - home_dir = os.path.abspath( - os.path.dirname(__file__) + os.sep + os.pardir - ) + home_dir = os.path.abspath(os.path.dirname(__file__) + os.sep + os.pardir) # Read Externals.cfg mng_extrns_cfg_fn = os.path.join(home_dir, "Externals.cfg") @@ -205,7 +226,7 @@ def set_srw_paths(ushdir, expt_config): cfg = load_ini_config(mng_extrns_cfg_fn) # Get the base directory of the FV3 forecast model code. - external_name = expt_config.get('workflow', {}).get('FCST_MODEL') + external_name = expt_config.get("workflow", {}).get("FCST_MODEL") property_name = "local_path" try: @@ -233,10 +254,11 @@ def set_srw_paths(ushdir, expt_config): ) return dict( - HOMEdir = home_dir, - USHdir = ushdir, - UFS_WTHR_MDL_DIR = ufs_wthr_mdl_dir, - ) + HOMEdir=home_dir, + USHdir=ushdir, + UFS_WTHR_MDL_DIR=ufs_wthr_mdl_dir, + ) + def setup(USHdir, user_config_fn="config.yaml"): """Function that validates user-provided configuration, and derives @@ -276,8 +298,7 @@ def setup(USHdir, user_config_fn="config.yaml"): expt_config = load_config_for_setup(USHdir, default_config_fp, user_config_fp) # Set up some paths relative to the SRW clone - expt_config['user'].update(set_srw_paths(USHdir, expt_config)) - + expt_config["user"].update(set_srw_paths(USHdir, expt_config)) # # ----------------------------------------------------------------------- @@ -297,20 +318,20 @@ def setup(USHdir, user_config_fn="config.yaml"): workflow_config["WORKFLOW_ID"] = workflow_id log_info(f"""WORKFLOW ID = {workflow_id}""") - debug = workflow_config.get('DEBUG') + debug = workflow_config.get("DEBUG") if debug: log_info( """ Setting VERBOSE to \"TRUE\" because DEBUG has been set to \"TRUE\"...""" ) - workflow_config['VERBOSE'] = True + workflow_config["VERBOSE"] = True - verbose = workflow_config['VERBOSE'] + verbose = workflow_config["VERBOSE"] # The forecast length (in integer hours) cannot contain more than 3 characters. # Thus, its maximum value is 999. fcst_len_hrs_max = 999 - fcst_len_hrs = workflow_config.get('FCST_LEN_HRS') + fcst_len_hrs = workflow_config.get("FCST_LEN_HRS") if fcst_len_hrs > fcst_len_hrs_max: raise ValueError( f""" @@ -335,7 +356,7 @@ def setup(USHdir, user_config_fn="config.yaml"): # # ----------------------------------------------------------------------- # - expt_basedir = workflow_config.get('EXPT_BASEDIR') + expt_basedir = workflow_config.get("EXPT_BASEDIR") if (not expt_basedir) or (expt_basedir[0] != "/"): if not expt_basedir: expt_basedir = "" @@ -347,7 +368,7 @@ def setup(USHdir, user_config_fn="config.yaml"): expt_basedir = os.path.abspath(expt_basedir) mkdir_vrfy(f' -p "{expt_basedir}"') - workflow_config['EXPT_BASEDIR'] = expt_basedir + workflow_config["EXPT_BASEDIR"] = expt_basedir # # ----------------------------------------------------------------------- @@ -358,30 +379,33 @@ def setup(USHdir, user_config_fn="config.yaml"): # ----------------------------------------------------------------------- # - expt_subdir = workflow_config.get('EXPT_SUBDIR', '') - exptdir = workflow_config['EXPTDIR'] - preexisting_dir_method = workflow_config.get('PREEXISTING_DIR_METHOD', '') + expt_subdir = workflow_config.get("EXPT_SUBDIR", "") + exptdir = workflow_config["EXPTDIR"] + preexisting_dir_method = workflow_config.get("PREEXISTING_DIR_METHOD", "") try: check_for_preexist_dir_file(exptdir, preexisting_dir_method) except ValueError: - logger.exception(f''' + logger.exception( + f""" Check that the following values are valid: EXPTDIR {exptdir} PREEXISTING_DIR_METHOD {preexisting_dir_method} - ''') + """ + ) raise except FileExistsError: - errmsg = dedent(f''' + errmsg = dedent( + f""" EXPTDIR ({exptdir}) already exists, and PREEXISTING_DIR_METHOD = {preexisting_dir_method} To ignore this error, delete the directory, or set PREEXISTING_DIR_METHOD = delete, or PREEXISTING_DIR_METHOD = rename in your config file. - ''') + """ + ) raise FileExistsError(errmsg) from None - # # ----------------------------------------------------------------------- # @@ -390,11 +414,11 @@ def setup(USHdir, user_config_fn="config.yaml"): # # ----------------------------------------------------------------------- # - if workflow_config.get('USE_CRON_TO_RELAUNCH'): - intvl_mnts = workflow_config.get('CRON_RELAUNCH_INTVL_MNTS') - launch_script_fn = workflow_config.get('WFLOW_LAUNCH_SCRIPT_FN') - launch_log_fn = workflow_config.get('WFLOW_LAUNCH_LOG_FN') - workflow_config['CRONTAB_LINE'] = ( + if workflow_config.get("USE_CRON_TO_RELAUNCH"): + intvl_mnts = workflow_config.get("CRON_RELAUNCH_INTVL_MNTS") + launch_script_fn = workflow_config.get("WFLOW_LAUNCH_SCRIPT_FN") + launch_log_fn = workflow_config.get("WFLOW_LAUNCH_LOG_FN") + workflow_config["CRONTAB_LINE"] = ( f"""*/{intvl_mnts} * * * * cd {exptdir} && """ f"""./{launch_script_fn} called_from_cron="TRUE" >> ./{launch_log_fn} 2>&1""" ) @@ -407,9 +431,10 @@ def setup(USHdir, user_config_fn="config.yaml"): # # Necessary tasks are turned on - pregen_basedir = expt_config['platform'].get('DOMAIN_PREGEN_BASEDIR') - if pregen_basedir is None and not \ - (run_task_make_grid and run_task_make_orog and run_task_make_sfc_climo): + pregen_basedir = expt_config["platform"].get("DOMAIN_PREGEN_BASEDIR") + if pregen_basedir is None and not ( + run_task_make_grid and run_task_make_orog and run_task_make_sfc_climo + ): raise Exception( f""" DOMAIN_PREGEN_BASEDIR must be set when any of the following @@ -420,12 +445,15 @@ def setup(USHdir, user_config_fn="config.yaml"): ) # A batch system account is specified - if expt_config['platform'].get('WORKFLOW_MANAGER') is not None: - if not expt.get('user').get('ACCOUNT'): - raise Exception(dedent(f''' + if expt_config["platform"].get("WORKFLOW_MANAGER") is not None: + if not expt.get("user").get("ACCOUNT"): + raise Exception( + dedent( + f""" ACCOUNT must be specified in config or machine file if using a workflow manager. - WORKFLOW_MANAGER = {expt_config["platform"].get("WORKFLOW_MANAGER")}\n''' - )) + WORKFLOW_MANAGER = {expt_config["platform"].get("WORKFLOW_MANAGER")}\n""" + ) + ) # # ----------------------------------------------------------------------- @@ -436,29 +464,29 @@ def setup(USHdir, user_config_fn="config.yaml"): # def get_location(xcs, fmt, expt_cfg): if ("data" in expt_cfg) and (xcs in expt_cfg["data"]): - v = expt_cfg["data"][xcs] - if not isinstance(v,dict): - return v - else: - return v[fmt] + v = expt_cfg["data"][xcs] + if not isinstance(v, dict): + return v + else: + return v[fmt] else: - return "" + return "" # Get the paths to any platform-supported data streams - get_extrn_ics = expt_config.get('task_get_extrn_ics', {}) + get_extrn_ics = expt_config.get("task_get_extrn_ics", {}) extrn_mdl_sysbasedir_ics = get_location( - get_extrn_ics.get('EXTRN_MDL_NAME_ICS'), - get_extrn_ics.get('FV3GFS_FILE_FMT_ICS'), - expt_config, - ) + get_extrn_ics.get("EXTRN_MDL_NAME_ICS"), + get_extrn_ics.get("FV3GFS_FILE_FMT_ICS"), + expt_config, + ) get_extrn_ics["EXTRN_MDL_SYSBASEDIR_ICS"] = extrn_mdl_sysbasedir_ics - get_extrn_lbcs = expt_config.get('task_get_extrn_lbcs', {}) + get_extrn_lbcs = expt_config.get("task_get_extrn_lbcs", {}) extrn_mdl_sysbasedir_lbcs = get_location( - get_extrn_lbcs.get('EXTRN_MDL_NAME_LBCS'), - get_extrn_lbcs.get('FV3GFS_FILE_FMT_LBCS'), - expt_config, - ) + get_extrn_lbcs.get("EXTRN_MDL_NAME_LBCS"), + get_extrn_lbcs.get("FV3GFS_FILE_FMT_LBCS"), + expt_config, + ) get_extrn_lbcs["EXTRN_MDL_SYSBASEDIR_LBCS"] = extrn_mdl_sysbasedir_lbcs # remove the data key -- it's not needed beyond this point @@ -469,11 +497,11 @@ def get_location(xcs, fmt, expt_cfg): # USE_USER_STAGED_EXTRN_FILES is set to TRUE task_keys = zip( [get_extrn_ics, get_extrn_lbcs], - ['EXTRN_MDL_SOURCE_BASEDIR_ICS', 'EXTRN_MDL_SOURCE_BASEDIR_LBCS'], - ) + ["EXTRN_MDL_SOURCE_BASEDIR_ICS", "EXTRN_MDL_SOURCE_BASEDIR_LBCS"], + ) for task, data_key in task_keys: - use_staged_extrn_files = task.get('USE_USER_STAGED_EXTRN_FILES') + use_staged_extrn_files = task.get("USE_USER_STAGED_EXTRN_FILES") if use_staged_extrn_files: basedir = task[data_key] # Check for the base directory up to the first templated field. @@ -499,18 +527,17 @@ def get_location(xcs, fmt, expt_cfg): # # Gather the pre-defined grid parameters, if needed - fcst_config = expt_config['task_run_fcst'] - grid_config = expt_confg['task_make_grid'] - if fcst_config.get('PREDEF_GRID_NAME'): + fcst_config = expt_config["task_run_fcst"] + grid_config = expt_confg["task_make_grid"] + if fcst_config.get("PREDEF_GRID_NAME"): grid_params = set_predef_grid_params(USHdir, fcst_config) # Users like to change these variables, so don't overwrite them special_vars = ["DT_ATMOS", "LAYOUT_X", "LAYOUT_Y", "BLOCKSIZE"] for param, value in grid_params.items(): - if param in special_vars and - fcst_config.get(param) is not None: + if param in special_vars and fcst_config.get(param) is not None: continue - elif param.startswith('WRTCMP'): + elif param.startswith("WRTCMP"): fcst_config[param] = value else: grid_config[param] = value @@ -523,33 +550,33 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - grid_gen_method = workflow_config['GRID_GEN_METHOD'] + grid_gen_method = workflow_config["GRID_GEN_METHOD"] if grid_gen_method == "GFDLgrid": grid_params = set_gridparams_GFDLgrid( - lon_of_t6_ctr=grid_config['GFDLgrid_LON_T6_CTR'], - lat_of_t6_ctr=grid_config['GFDLgrid_LAT_T6_CTR'], - res_of_t6g=grid_config['GFDLgrid_NUM_CELLS'], - stretch_factor=grid_config['GFDLgrid_STRETCH_FAC'], - refine_ratio_t6g_to_t7g=grid_config['GFDLgrid_REFINE_RATIO'], - istart_of_t7_on_t6g=grid_config['GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G'], - iend_of_t7_on_t6g=grid_config['GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G'], - jstart_of_t7_on_t6g=grid_config['GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G'], - jend_of_t7_on_t6g=grid_config['GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G'], + lon_of_t6_ctr=grid_config["GFDLgrid_LON_T6_CTR"], + lat_of_t6_ctr=grid_config["GFDLgrid_LAT_T6_CTR"], + res_of_t6g=grid_config["GFDLgrid_NUM_CELLS"], + stretch_factor=grid_config["GFDLgrid_STRETCH_FAC"], + refine_ratio_t6g_to_t7g=grid_config["GFDLgrid_REFINE_RATIO"], + istart_of_t7_on_t6g=grid_config["GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G"], + iend_of_t7_on_t6g=grid_config["GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G"], + jstart_of_t7_on_t6g=grid_config["GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G"], + jend_of_t7_on_t6g=grid_config["GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G"], verbose=verbose, - nh4=expt_config['constants']['NH4'], + nh4=expt_config["constants"]["NH4"], run_envir=run_envir, ) elif grid_gen_method == "ESGgrid": grid_params = set_gridparams_ESGgrid( - lon_ctr=grid_config['ESGgrid_LON_CTR'], - lat_ctr=grid_config['ESGgrid_LAT_CTR'], - nx=grid_config['ESGgrid_NX'], - ny=grid_config['ESGgrid_NY'], - pazi=grid_config['ESGgrid_PAZI'], - halo_width=grid_config['ESGgrid_WIDE_HALO_WIDTH'], - delx=grid_config['ESGgrid_DELX'], - dely=grid_config['ESGgrid_DELY'], - constants=expt_config['constants'], + lon_ctr=grid_config["ESGgrid_LON_CTR"], + lat_ctr=grid_config["ESGgrid_LAT_CTR"], + nx=grid_config["ESGgrid_NX"], + ny=grid_config["ESGgrid_NY"], + pazi=grid_config["ESGgrid_PAZI"], + halo_width=grid_config["ESGgrid_WIDE_HALO_WIDTH"], + delx=grid_config["ESGgrid_DELX"], + dely=grid_config["ESGgrid_DELY"], + constants=expt_config["constants"], ) else: grid_params = { @@ -578,13 +605,13 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - global_sect = expt_config['global'] - if not global_sect.get('DO_SHUM'): - global_sect['SHUM_MAG'] = -999.0 - if not global_sect.get('DO_SKEB'): - global_sect['SKEB_MAG'] = -999.0 - if not global_sect.get('DO_SPPT'): - global_sect['SPPT_MAG'] = -999.0 + global_sect = expt_config["global"] + if not global_sect.get("DO_SHUM"): + global_sect["SHUM_MAG"] = -999.0 + if not global_sect.get("DO_SKEB"): + global_sect["SKEB_MAG"] = -999.0 + if not global_sect.get("DO_SPPT"): + global_sect["SPPT_MAG"] = -999.0 # # ----------------------------------------------------------------------- # @@ -594,10 +621,10 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - if global_sect.get('DO_SPP'): - global_sect['N_VAR_SPP'] = len(global_sect['SPP_VAR_LIST']) + if global_sect.get("DO_SPP"): + global_sect["N_VAR_SPP"] = len(global_sect["SPP_VAR_LIST"]) else: - global_sect['N_VAR_SPP'] = 0 + global_sect["N_VAR_SPP"] = 0 # # ----------------------------------------------------------------------- # @@ -607,25 +634,26 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - spp_vars = ['SPP_MAG_LIST', - 'SPP_LSCALE', - 'SPP_TSCALE', - 'SPP_SIGTOP1', - 'SPP_SIGTOP2', - 'SPP_STDDEV_CUTOFF', - 'ISEED_SPP', - ] + spp_vars = [ + "SPP_MAG_LIST", + "SPP_LSCALE", + "SPP_TSCALE", + "SPP_SIGTOP1", + "SPP_SIGTOP2", + "SPP_STDDEV_CUTOFF", + "ISEED_SPP", + ] - if global_sect.get('DO_SPP'): + if global_sect.get("DO_SPP"): for spp_var in spp_vars: - if len(global_sect[spp_var]) != global_sect['N_VAR_SPP']: + if len(global_sect[spp_var]) != global_sect["N_VAR_SPP"]: raise Exception( - f''' + f""" All MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or RRTMG SPP-related namelist variables must be of equal length to SPP_VAR_LIST: SPP_VAR_LIST (length {global_sect['N_VAR_SPP']}) {spp_var} (length {len(global_sect[spp_var])}) - ''' + """ ) # # ----------------------------------------------------------------------- @@ -642,16 +670,16 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - if global_sect.get('DO_LSM_SPP'): - global_sect['N_VAR_LNDP'] = len(global_sect['LSM_SPP_VAR_LIST']) - global_sect['LNDP_TYPE'] = 2 - global_sect['LNDP_MODEL_TYPE'] = 2 - global_sect['FHCYC_LSM_SPP_OR_NOT'] = 999 - else - global_sect['N_VAR_LNDP'] = 0 - global_sect['LNDP_TYPE'] = 0 - global_sect['LNDP_MODEL_TYPE'] = 0 - global_sect['FHCYC_LSM_SPP_OR_NOT'] = 0 + if global_sect.get("DO_LSM_SPP"): + global_sect["N_VAR_LNDP"] = len(global_sect["LSM_SPP_VAR_LIST"]) + global_sect["LNDP_TYPE"] = 2 + global_sect["LNDP_MODEL_TYPE"] = 2 + global_sect["FHCYC_LSM_SPP_OR_NOT"] = 999 + else: + global_sect["N_VAR_LNDP"] = 0 + global_sect["LNDP_TYPE"] = 0 + global_sect["LNDP_MODEL_TYPE"] = 0 + global_sect["FHCYC_LSM_SPP_OR_NOT"] = 0 # # ----------------------------------------------------------------------- # @@ -661,38 +689,40 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - lsm_spp_vars = ['LSM_SPP_MAG_LIST', - 'LSM_SPP_LSCALE', - 'LSM_SPP_TSCALE', - ] - if global_sect.get('DO_LSM_SPP'): + lsm_spp_vars = [ + "LSM_SPP_MAG_LIST", + "LSM_SPP_LSCALE", + "LSM_SPP_TSCALE", + ] + if global_sect.get("DO_LSM_SPP"): for lsm_spp_var in lsm_spp_vars: - if len(global_sect[lsm_spp_var]) != global_sect['N_VAR_LNDP']: + if len(global_sect[lsm_spp_var]) != global_sect["N_VAR_LNDP"]: raise Exception( - f''' + f""" All MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or RRTMG SPP-related namelist variables must be of equal length to SPP_VAR_LIST: All Noah or RUC-LSM SPP-related namelist variables (except ISEED_LSM_SPP) must be equal of equal length to LSM_SPP_VAR_LIST: LSM_SPP_VAR_LIST (length {global_sect['N_VAR_LNDP']}) {lsm_spp_var} (length {len(global_sect[lsm_spp_var])} - ''' + """ ) - # Make sure RESTART_INTERVAL is set to an integer value - restart_interval = fcst_config.get('RESTART_INTERVAL') + restart_interval = fcst_config.get("RESTART_INTERVAL") if not isinstance(restart_interval, int): try: - fcst_config['RESTART_INTERVAL'] = int(restart_interval) + fcst_config["RESTART_INTERVAL"] = int(restart_interval) except ValueError: - raise ValueError(f"\nRESTART_INTERVAL = {restart_interval}, must be an integer value\n") + raise ValueError( + f"\nRESTART_INTERVAL = {restart_interval}, must be an integer value\n" + ) # Check whether the forecast length (FCST_LEN_HRS) is evenly divisible - # by the BC update interval (LBC_SPEC_INTVL_HRS). If so, generate an + # by the BC update interval (LBC_SPEC_INTVL_HRS). If so, generate an # array of forecast hours at which the boundary values will be updated. - lbc_spec_intvl_hrs = get_extrn_lbcs.get('LBC_SPEC_INTVL_HRS') + lbc_spec_intvl_hrs = get_extrn_lbcs.get("LBC_SPEC_INTVL_HRS") rem = fcst_len_hrs % lbc_spec_intvl_hrs if rem != 0: raise Exception( @@ -713,63 +743,72 @@ def get_location(xcs, fmt, expt_cfg): # # If using a custom post configuration file, make sure that it exists. - post_config = expt_config('task_run_post') - if post_config.get('USE_CUSTOM_POST_CONFIG_FILE'): - custom_post_config_fp = post_config.get('CUSTOM_POST_CONFIG_FP') + post_config = expt_config("task_run_post") + if post_config.get("USE_CUSTOM_POST_CONFIG_FILE"): + custom_post_config_fp = post_config.get("CUSTOM_POST_CONFIG_FP") try: # os.path.exists returns exception if passed None, so use # "try/except" to catch it and the non-existence of a # provided path if not os.path.exists(custom_post_config_fp): - raise FileNotFoundError(dedent( - f''' + raise FileNotFoundError( + dedent( + f""" USE_CUSTOM_POST_CONFIG_FILE has been set, but the custom post configuration file CUSTOM_POST_CONFIG_FP = {custom_post_config_fp} - could not be found.''' - )) from None + could not be found.""" + ) + ) from None except TypeError: - raise TypeError(dedent( - f""" + raise TypeError( + dedent( + f""" USE_CUSTOM_POST_CONFIG_FILE has been set, but the custom post configuration file path (CUSTOM_POST_CONFIG_FP) is None. - """)) from None + """ + ) + ) from None except FileNotFoundError: raise - # If using external CRTM fix files to allow post-processing of synthetic # satellite products from the UPP, make sure the CRTM fix file directory exists. - if global_sect.get('USE_CRTM'): - crtm_dir = global_sect.get('CRTM_DIR') + if global_sect.get("USE_CRTM"): + crtm_dir = global_sect.get("CRTM_DIR") try: # os.path.exists returns exception if passed None, so use # "try/except" to catch it and the non-existence of a # provided path if not os.path.exists(crtm_dir): - raise FileNotFoundError(dedent( - f''' + raise FileNotFoundError( + dedent( + f""" USE_CRTM has been set, but the external CRTM fix file directory: CRTM_DIR = {crtm_dir} - could not be found.''' - )) from None + could not be found.""" + ) + ) from None except TypeError: - raise TypeError(dedent( - f""" + raise TypeError( + dedent( + f""" USE_CRTM has been set, but the external CRTM fix file directory (CRTM_DIR) is None. - """)) from None + """ + ) + ) from None except FileNotFoundError: raise # If performing sub-hourly model output and post-processing, check that # the output interval DT_SUBHOURLY_POST_MNTS (in minutes) is specified # correctly. - if post_config.get('SUB_HOURLY_POST'): + if post_config.get("SUB_HOURLY_POST"): # Subhourly post should be set with minutes between 1 and 59 for # real subhourly post to be performed. - dt_subhourly_post_mnts = post_config.get('DT_SUBHOURLY_POST_MNTS') + dt_subhourly_post_mnts = post_config.get("DT_SUBHOURLY_POST_MNTS") if dt_subhourly_post_mnts == 0: logger.warning( f""" @@ -780,7 +819,7 @@ def get_location(xcs, fmt, expt_cfg): Resetting SUB_HOURLY_POST to \"FALSE\". If you do not want this, you must set DT_SUBHOURLY_POST_MNTS to something other than zero.""" ) - post_config['SUB_HOURLY_POST'] = False + post_config["SUB_HOURLY_POST"] = False if dt_subhourly_post_mnts < 1 or dt_subhourly_post_mnts > 59: raise ValueError( @@ -793,7 +832,7 @@ def get_location(xcs, fmt, expt_cfg): # Check that DT_SUBHOURLY_POST_MNTS (after converting to seconds) is # evenly divisible by the forecast model's main time step DT_ATMOS. - dt_atmos = fcst_config['DT_ATMOS'] + dt_atmos = fcst_config["DT_ATMOS"] rem = dt_subhourly_post_mnts * 60 % dt_atmos if rem != 0: raise ValueError( @@ -813,8 +852,8 @@ def get_location(xcs, fmt, expt_cfg): ) # Make sure the post output domain is set - predef_grid_name = fcst_config.get('PREDEF_GRID_NAME') - post_output_domain_name = post_config.get('POST_OUTPUT_DOMAIN_NAME') + predef_grid_name = fcst_config.get("PREDEF_GRID_NAME") + post_output_domain_name = post_config.get("POST_OUTPUT_DOMAIN_NAME") if not post_output_domain_name: if not predef_grid_name: @@ -839,24 +878,24 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # - run_envir = expt_config['user'].get('RUN_ENVIR', "") + run_envir = expt_config["user"].get("RUN_ENVIR", "") # These NCO variables need to be set based on the user's specificed # run environment. The default is set in config_defaults for nco. If # running in community mode, we set these paths to the experiment # directory. nco_vars = [ - 'opsroot', - 'comroot', - 'packageroot', - 'dataroot', - 'dcomroot', - 'comin_basedir', - 'comout_basedir', - ] - - nco_config = expt_config['nco'] - if run_envir =! "nco": + "opsroot", + "comroot", + "packageroot", + "dataroot", + "dcomroot", + "comin_basedir", + "comout_basedir", + ] + + nco_config = expt_config["nco"] + if run_envir != "nco": # Put the variables in config dict. for nco_var in nco_vars: nco_config[nco_var.upper()] = exptdir @@ -869,13 +908,12 @@ def get_location(xcs, fmt, expt_cfg): mkdir_vrfy(f' -p "{nco_config.get("DATAROOT")}"') mkdir_vrfy(f' -p "{nco_config.get("DCOMROOT")}"') mkdir_vrfy(f' -p "{nco_config.get("LOGDIR")}"') - if nco_config['DBNROOT']: + if nco_config["DBNROOT"]: mkdir_vrfy(f' -p "{nco_config["DBNROOT"]}"') # create experiment dir mkdir_vrfy(f' -p "{exptdir}"') - # ----------------------------------------------------------------------- # # The FV3 forecast model needs the following input files in the run di- @@ -915,7 +953,7 @@ def get_location(xcs, fmt, expt_cfg): # # Check for the CCPP_PHYSICS suite xml file - ccpp_phys_suite_in_ccpp_fp = workflow_config['CCPP_PHYS_SUITE_IN_CCPP_FP'] + ccpp_phys_suite_in_ccpp_fp = workflow_config["CCPP_PHYS_SUITE_IN_CCPP_FP"] if not os.path.exists(ccpp_phys_suite_in_ccpp_fp): raise FileNotFoundError( f''' @@ -925,7 +963,7 @@ def get_location(xcs, fmt, expt_cfg): ) # Check for the field dict file - field_dict_in_uwm_fp = workflow_config['FIELD_DICT_IN_UWM_FP'] + field_dict_in_uwm_fp = workflow_config["FIELD_DICT_IN_UWM_FP"] if not os.path.exists(field_dict_in_uwm_fp): raise FileNotFoundError( f''' @@ -936,16 +974,16 @@ def get_location(xcs, fmt, expt_cfg): # Set the appropriate ozone production/loss file paths and symlinks ozone_param, fixgsm_ozone_fn, ozone_link_mappings = set_ozone_param( - ccpp_phys_suite_in_ccpp_fp, - fcst_config['CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING'], - ) + ccpp_phys_suite_in_ccpp_fp, + fcst_config["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"], + ) # Reset the dummy value saved in the last list item to the ozone # file name - fcst_config['FIXgsm_FILES_TO_COPY_TO_FIXam'][-1] = fixgsm_ozone_fn + fcst_config["FIXgsm_FILES_TO_COPY_TO_FIXam"][-1] = fixgsm_ozone_fn # Reset the experiment config list with the update list - fcst_config['CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING'] = ozone_link_mappings + fcst_config["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"] = ozone_link_mappings log_info( f""" @@ -978,12 +1016,12 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - workflow_switches = expt_config['workflow_switches'] + workflow_switches = expt_config["workflow_switches"] # Ensemble verification can only be run in ensemble mode - do_ensemble = global_sect['DO_ENSEMBLE'] - run_task_vx_ensgrid = workflow_switches['RUN_TASK_VX_ENSGRID'] - run_task_vx_enspoint = workflow_switches['RUN_TASK_VX_ENSPOINT'] + do_ensemble = global_sect["DO_ENSEMBLE"] + run_task_vx_ensgrid = workflow_switches["RUN_TASK_VX_ENSGRID"] + run_task_vx_enspoint = workflow_switches["RUN_TASK_VX_ENSPOINT"] if (not do_ensemble) and (run_task_vx_ensgrid or run_task_vx_enspoint): raise Exception( f''' @@ -998,11 +1036,11 @@ def get_location(xcs, fmt, expt_cfg): # turned off. Link the files, and check that they all contain the # same resolution input. # - prep_tasks = ['GRID', 'OROG', 'SFC_CLIMO'] + prep_tasks = ["GRID", "OROG", "SFC_CLIMO"] res_in_fixlam_filenames = None for prep_task in prep_tasks: res_in_fns = "" - switch = f'RUN_TASK_MAKE_{prep_task}' + switch = f"RUN_TASK_MAKE_{prep_task}" # If the user doesn't want to run the given task, link the fix # file if not workflow_switches[switch]: @@ -1011,10 +1049,12 @@ def get_location(xcs, fmt, expt_cfg): dir_key = f"{prep_task}_DIR" expt_config[sect_key][dir_key] = task_dir - msg = dedent(f""" + msg = dedent( + f""" {dir_key} not specified! Setting {dir_key} = {task_dir} - """) + """ + ) logger.warning(msg) # Link the fix files and check that their resolution is @@ -1023,20 +1063,21 @@ def get_location(xcs, fmt, expt_cfg): verbose=verbose, file_group=prep_task.lower(), source_dir=task_dir, - target_dir=workflow_config['FIXlam'], - ccpp_phys_suite=workflow_config['CCPP_PHYS_SUITE'], - constants=expt_config['constants'] - dot_or_underscore=workflow_config['DOT_OR_USCORE'], - nhw=grid_params['NHW'], + target_dir=workflow_config["FIXlam"], + ccpp_phys_suite=workflow_config["CCPP_PHYS_SUITE"], + constants=expt_config["constants"], + dot_or_underscore=workflow_config["DOT_OR_USCORE"], + nhw=grid_params["NHW"], run_task=False, - sfc_climo_fields=expt_config['task_run_fcst']['SFC_CLIMO_FIELDS'], - ) + sfc_climo_fields=expt_config["task_run_fcst"]["SFC_CLIMO_FIELDS"], + ) if res_in_fixlam_filenames is None: res_in_fixlam_filenames = res_in_fns else: if res_in_fixlam_filesnames != res_in_fns: - raise Exception(dedent( - f""" + raise Exception( + dedent( + f""" The resolution of the pregenerated files for {prep_task} do not match those that were alread set: @@ -1044,10 +1085,10 @@ def get_location(xcs, fmt, expt_cfg): Resolution in {prep_task}: {res_in_fns} Resolution expected: {res_in_fixlam_filesnames} """ - )) + ) + ) - - if not os.path.exists(task_dir): + if not os.path.exists(task_dir): raise FileNotFoundError( f''' The directory ({dir_key}) that should contain the pregenerated @@ -1055,8 +1096,8 @@ def get_location(xcs, fmt, expt_cfg): {dir_key} = \"{task_dir}\"''' ) - workflow_config['RES_IN_FIXLAM_FILENAMES'] = res_in_fixlam_filesnames - workflow_config['CRES'] = f"C{res_in_fixlam_filenames}" + workflow_config["RES_IN_FIXLAM_FILENAMES"] = res_in_fixlam_filesnames + workflow_config["CRES"] = f"C{res_in_fixlam_filenames}" # # ----------------------------------------------------------------------- @@ -1077,17 +1118,21 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - if fcst_config['WRITE_DOPOST']: + if fcst_config["WRITE_DOPOST"]: # Turn off run_post - if workflow_switches['RUN_TASK_RUN_POST']: - logger.warning(dedent(f""" + if workflow_switches["RUN_TASK_RUN_POST"]: + logger.warning( + dedent( + f""" Inline post is turned on, deactivating post-processing tasks: RUN_TASK_RUN_POST = False - """)) - workflow_switches['RUN_TASK_RUN_POST'] = False + """ + ) + ) + workflow_switches["RUN_TASK_RUN_POST"] = False # Check if SUB_HOURLY_POST is on - if expt_config['task_run_post']['SUB_HOURLY_POST']: + if expt_config["task_run_post"]["SUB_HOURLY_POST"]: raise Exception( f""" SUB_HOURLY_POST is NOT available with Inline Post yet.""" @@ -1101,9 +1146,9 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - workflow_config['SDF_USES_RUC_LSM'] = check_ruc_lsm( - ccpp_phys_suite_fp=CCPP_PHYS_SUITE_IN_CCPP_FP - ) + workflow_config["SDF_USES_RUC_LSM"] = check_ruc_lsm( + ccpp_phys_suite_fp=CCPP_PHYS_SUITE_IN_CCPP_FP + ) # # ----------------------------------------------------------------------- # @@ -1118,19 +1163,22 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # - link_thompson_climo = (get_extrn_ics['EXTRN_MDL_NAME_ICS'] not in ["HRRR", "RAP"]) \ - or (get_extrn_lbcs['EXTRN_MDL_NAME_LBCS'] not in ["HRRR", "RAP"]) + link_thompson_climo = ( + get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RAP"] + ) or (get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RAP"]) use_thompson, mapping, fix_files = set_thompson_mp_fix_files( ccpp_phys_suite_fp=CCPP_PHYS_SUITE_IN_CCPP_FP, thompson_mp_climo_fn=THOMPSON_MP_CLIMO_FN, link_thompson_climo=link_thompson_climo, ) - workflow_config['SDF_USES_THOMPSON_MP'] = use_thompson + workflow_config["SDF_USES_THOMPSON_MP"] = use_thompson if use_thompson: - expt_config['task_run_fcst']['CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING'].append(mapping) - expt_config['task_run_fcst']['FIXgsm_FILES_TO_COPY_TO_FIXam'].append(fix_files) + expt_config["task_run_fcst"]["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].append( + mapping + ) + expt_config["task_run_fcst"]["FIXgsm_FILES_TO_COPY_TO_FIXam"].append(fix_files) log_info( f""" @@ -1168,7 +1216,7 @@ def get_location(xcs, fmt, expt_cfg): all_lines = cfg_to_yaml_str(expt_config) log_info(all_lines, verbose=debug) - global_var_defns_fp = workflow_config['GLOBAL_VAR_DEFNS_FP'] + global_var_defns_fp = workflow_config["GLOBAL_VAR_DEFNS_FP"] # print info message log_info( f""" @@ -1205,6 +1253,7 @@ def get_location(xcs, fmt, expt_cfg): return expt_config + # # ----------------------------------------------------------------------- # From 17790514404d50d4b4725b633954e61a13354dff Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Tue, 8 Nov 2022 01:36:28 +0000 Subject: [PATCH 04/19] A tiny attempt at linting. --- ush/generate_FV3LAM_wflow.py | 105 +++++++++++++++++++++-------------- 1 file changed, 62 insertions(+), 43 deletions(-) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 73ba8e22ba..4914830edf 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -31,6 +31,7 @@ set_env_var, get_env_var, lowercase, + flatten_dict, ) from setup import setup @@ -41,12 +42,12 @@ from check_python_version import check_python_version -def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> None: +def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> None: """Function to setup a forecast experiment and create a workflow (according to the parameters specified in the config file) Args: - USHdir (str): The full path of the ush/ directory where this script is located + ushdir (str): The full path of the ush/ directory where this script is located logfile (str): The name of the file where logging is written Returns: None @@ -68,7 +69,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> # The setup function reads the user configuration file and fills in # non-user-specified values from config_defaults.yaml - expt_config = setup(USHdir) + expt_config = setup(ushdir) # # ----------------------------------------------------------------------- @@ -114,7 +115,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> # Dictionary of settings to pass to fill_jinja # settings = {} - for k, v in var_defs_dict.items(): + for k, v in flatten_dict(expt_config).items(): settings[lowercase(k)] = v ensmem_indx_name = "" @@ -125,23 +126,27 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> uscore_ensmem_name = f"_mem#{ensmem_indx_name}#" slash_ensmem_subdir = f"/mem#{ensmem_indx_name}#" - d = DATE_FIRST_CYCL + timedelta(seconds=DT_ATMOS) - time_str = d.strftime("%M:%S") + + dt_atmos = expt_config['task_run_fcst']['DT_ATMOS'] + date_first_cycl = expt_config['workflow']['DATE_FIRST_CYCL'] + date_last_cycl = expt_config['workflow']['DATE_LAST_CYCL'] + first_file_time = date_first_cycl + timedelta(seconds=dt_atmos) + fcst_threads = expt_config['task_run_fcst']['OMP_NUM_THREADS_RUN_FCST'] settings.update( { # # Number of cores used for a task # - "ncores_run_fcst": PE_MEMBER01, - "native_run_fcst": f"--cpus-per-task {OMP_NUM_THREADS_RUN_FCST} --exclusive", + "ncores_run_fcst": expt_config['task_run_fcst']['PE_MEMBER01'], + "native_run_fcst": f"--cpus-per-task {fcst_threads} --exclusive", # # Parameters that determine the set of cycles to run. # - "date_first_cycl": date_to_str(DATE_FIRST_CYCL, format="%Y%m%d%H00"), - "date_last_cycl": date_to_str(DATE_LAST_CYCL, format="%Y%m%d%H00"), - "cdate_first_cycl": DATE_FIRST_CYCL, - "cycl_freq": f"{INCR_CYCL_FREQ:02d}:00:00", + "date_first_cycl": date_to_str(date_first_cycl, format="%Y%m%d%H00"), + "date_last_cycl": date_to_str(date_last_cycl, format="%Y%m%d%H00"), + "cdate_first_cycl": date_first_cycl, + "cycl_freq": f"{expt_config['workflow']['INCR_CYCL_FREQ']:02d}:00:00", # # Ensemble-related parameters. # @@ -151,23 +156,25 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> # # Parameters associated with subhourly post-processed output # - "delta_min": DT_SUBHOURLY_POST_MNTS, - "first_fv3_file_tstr": f"000:{time_str}", + "delta_min": expt_config['task_run_post']['DT_SUBHOURLY_POST_MNTS'], + "first_fv3_file_tstr": first_file_time.strftime("000:%M:%S"), } ) # Log "settings" variable. settings_str = cfg_to_yaml_str(settings) + verbose = expt_config['workflow']['VERBOSE'] + log_info( f""" The variable 'settings' specifying values of the rococo XML variables has been set as follows: #----------------------------------------------------------------------- settings =\n\n""", - verbose=VERBOSE, + verbose=verbose, ) - log_info(settings_str, verbose=VERBOSE) + log_info(settings_str, verbose=verbose) # # Call the python script to generate the experiment's actual XML file @@ -175,7 +182,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> # try: fill_jinja_template( - ["-q", "-u", settings_str, "-t", template_xml_fp, "-o", WFLOW_XML_FP] + ["-q", "-u", settings_str, "-t", template_xml_fp, "-o", wflow_xml_fp] ) except: logging.exception( @@ -187,7 +194,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> Full path to template rocoto XML file: template_xml_fp = '{template_xml_fp}' Full path to output rocoto XML file: - WFLOW_XML_FP = '{WFLOW_XML_FP}' + WFLOW_XML_FP = '{wflow_xml_fp}' Namelist settings specified on command line:\n settings =\n\n""" ) @@ -201,17 +208,21 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> # # ----------------------------------------------------------------------- # + exptdir = expt_config['workflow']['EXPTDIR'] + wflow_launch_script_fp = expt_config['workflow']['WFLOW_LAUNCH_SCRIPT_FP'] log_info( f""" Creating symlink in the experiment directory (EXPTDIR) that points to the workflow launch script (WFLOW_LAUNCH_SCRIPT_FP): - EXPTDIR = '{EXPTDIR}' - WFLOW_LAUNCH_SCRIPT_FP = '{WFLOW_LAUNCH_SCRIPT_FP}'""", - verbose=VERBOSE, + EXPTDIR = '{exptdir}' + WFLOW_LAUNCH_SCRIPT_FP = '{wflow_launch_script_fp}'""", + verbose=verbose, ) create_symlink_to_file( - WFLOW_LAUNCH_SCRIPT_FP, os.path.join(EXPTDIR, WFLOW_LAUNCH_SCRIPT_FN), False + wflow_launch_script_fp, + os.path.join(exptdir, wflow_launch_script_fn), + False ) # # ----------------------------------------------------------------------- @@ -222,6 +233,13 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> # # ----------------------------------------------------------------------- # + # From here on out, going back to setting variables for everything + # in the flattened expt_config dictionary + # TODO: Reference all these variables in their respective + # dictionaries, instead. + import_vars(dictionary=flatten_dict(expt_config), + target_dict=locals()) + if USE_CRON_TO_RELAUNCH: add_crontab_line() @@ -235,7 +253,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> Symlinking fixed files from system directory (FIXgsm) to a subdirectory (FIXam): FIXgsm = '{FIXgsm}' FIXam = '{FIXam}'""", - verbose=VERBOSE, + verbose=verbose, ) ln_vrfy(f"""-fsn '{FIXgsm}' '{FIXam}'""") @@ -246,7 +264,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> Copying fixed files from system directory (FIXgsm) to a subdirectory (FIXam): FIXgsm = '{FIXgsm}' FIXam = '{FIXam}'""", - verbose=VERBOSE, + verbose=verbose, ) check_for_preexist_dir_file(FIXam, "delete") @@ -272,7 +290,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> FIXaer = '{FIXaer}' FIXlut = '{FIXlut}' FIXclim = '{FIXclim}'""", - verbose=VERBOSE, + verbose=verbose, ) check_for_preexist_dir_file(FIXclim, "delete") @@ -294,27 +312,27 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> log_info( f""" Copying templates of various input files to the experiment directory...""", - verbose=VERBOSE, + verbose=verbose, ) log_info( f""" Copying the template data table file to the experiment directory...""", - verbose=VERBOSE, + verbose=verbose, ) cp_vrfy(DATA_TABLE_TMPL_FP, DATA_TABLE_FP) log_info( f""" Copying the template field table file to the experiment directory...""", - verbose=VERBOSE, + verbose=verbose, ) cp_vrfy(FIELD_TABLE_TMPL_FP, FIELD_TABLE_FP) log_info( f""" Copying the template NEMS configuration file to the experiment directory...""", - verbose=VERBOSE, + verbose=verbose, ) cp_vrfy(NEMS_CONFIG_TMPL_FP, NEMS_CONFIG_FP) # @@ -326,7 +344,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> f""" Copying the CCPP physics suite definition XML file from its location in the forecast model directory sturcture to the experiment directory...""", - verbose=VERBOSE, + verbose=verbose, ) cp_vrfy(CCPP_PHYS_SUITE_IN_CCPP_FP, CCPP_PHYS_SUITE_FP) # @@ -338,7 +356,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> f""" Copying the field dictionary file from its location in the forecast model directory sturcture to the experiment directory...""", - verbose=VERBOSE, + verbose=verbose, ) cp_vrfy(FIELD_DICT_IN_UWM_FP, FIELD_DICT_FP) # @@ -584,9 +602,9 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> f""" The variable 'settings' specifying values of the weather model's namelist variables has been set as follows:\n""", - verbose=VERBOSE, + verbose=verbose, ) - log_info("\nsettings =\n\n" + settings_str, verbose=VERBOSE) + log_info("\nsettings =\n\n" + settings_str, verbose=verbose) # # ----------------------------------------------------------------------- # @@ -657,7 +675,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> # get_nomads_data( # NOMADS_file_type, # EXPTDIR, - # USHdir, + # ushdir, # DATE_FIRST_CYCL, # CYCL_HRS, # FCST_LEN_HRS, @@ -673,7 +691,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> # # ----------------------------------------------------------------------- # - cp_vrfy(os.path.join(USHdir, EXPT_CONFIG_FN), EXPTDIR) + cp_vrfy(os.path.join(ushdir, EXPT_CONFIG_FN), EXPTDIR) # Note workflow generation completion log_info( @@ -700,9 +718,10 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> # ----------------------------------------------------------------------- # if WORKFLOW_MANAGER == "rocoto": - wflow_db_fn = f"{os.path.splitext(WFLOW_XML_FN)[0]}.db" - rocotorun_cmd = f"rocotorun -w {WFLOW_XML_FN} -d {wflow_db_fn} -v 10" - rocotostat_cmd = f"rocotostat -w {WFLOW_XML_FN} -d {wflow_db_fn} -v 10" + wflow_xml_fn = settings['WFLOW_XML_FN'] + wflow_db_fn = f"{os.path.splitext(wflow_xml_fn)[0]}.db" + rocotorun_cmd = f"rocotorun -w {wflow_xml_fn} -d {wflow_db_fn} -v 10" + rocotostat_cmd = f"rocotostat -w {wflow_xml_fn} -d {wflow_db_fn} -v 10" log_info( f""" @@ -742,7 +761,7 @@ def generate_FV3LAM_wflow(USHdir, logfile: str = "log.generate_FV3LAM_wflow") -> def get_nomads_data( NOMADS_file_type, EXPTDIR, - USHdir, + ushdir, DATE_FIRST_CYCL, CYCL_HRS, FCST_LEN_HRS, @@ -751,7 +770,7 @@ def get_nomads_data( print("Getting NOMADS online data") print(f"NOMADS_file_type= {NOMADS_file_type}") cd_vrfy(EXPTDIR) - NOMADS_script = os.path.join(USHdir, "NOMADS_get_extrn_mdl_files.sh") + NOMADS_script = os.path.join(ushdir, "NOMADS_get_extrn_mdl_files.sh") run_command( f"""{NOMADS_script} \ {date_to_str(DATE_FIRST_CYCL,format='%Y%m%d')} \ @@ -783,12 +802,12 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow") -> None: if __name__ == "__main__": USHdir = os.path.dirname(os.path.abspath(__file__)) - logfile = f"{USHdir}/log.generate_FV3LAM_wflow" + wflow_logfile = f"{USHdir}/log.generate_FV3LAM_wflow" # Call the generate_FV3LAM_wflow function defined above to generate the # experiment/workflow. try: - generate_FV3LAM_wflow(USHdir, logfile) + generate_FV3LAM_wflow(USHdir, wflow_logfile) except: logging.exception( dedent( @@ -797,7 +816,7 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow") -> None: FATAL ERROR: Experiment generation failed. See the error message(s) printed below. For more detailed information, check the log file from the workflow - generation script: {logfile} + generation script: {wflow_logfile} *********************************************************************\n """ ) From 75f1a27d698bf9a5d5af43cf0b231c7ecf0f7dae Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Tue, 22 Nov 2022 15:45:40 +0000 Subject: [PATCH 05/19] WIP --- parm/FV3LAM_wflow.xml | 7 +- tests/WE2E/machine_suites/fundamental | 9 -- ush/config_defaults.yaml | 56 ++++----- ush/generate_FV3LAM_wflow.py | 25 ++-- ush/link_fix.py | 15 +-- ush/python_utils/__init__.py | 1 + ush/python_utils/config_parser.py | 41 +++++-- ush/set_thompson_mp_fix_files.py | 3 +- ush/setup.py | 168 +++++++++++++++----------- 9 files changed, 189 insertions(+), 136 deletions(-) delete mode 100644 tests/WE2E/machine_suites/fundamental diff --git a/parm/FV3LAM_wflow.xml b/parm/FV3LAM_wflow.xml index 622e0b42d6..37b6b86840 100644 --- a/parm/FV3LAM_wflow.xml +++ b/parm/FV3LAM_wflow.xml @@ -355,11 +355,8 @@ MODULES_RUN_TASK_FP script. {%- if do_ensemble %} - -{%- for m in range(1, num_ens_members+1) -%} - {%- set fmtstr=" %0"~ndigits_ensmem_names~"d" -%} - {{- fmtstr%m -}} -{%- endfor %} + {% for m in range(1, num_ens_members+1) %}{{ "%03d" }}{% endfor %} + {%- endif %} {%- if run_task_make_ics %} diff --git a/tests/WE2E/machine_suites/fundamental b/tests/WE2E/machine_suites/fundamental deleted file mode 100644 index 0887e6c58e..0000000000 --- a/tests/WE2E/machine_suites/fundamental +++ /dev/null @@ -1,9 +0,0 @@ -grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2 -grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_HRRR -grid_RRFS_CONUS_25km_ics_GSMGFS_lbcs_GSMGFS_suite_GFS_v15p2 -grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR -grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_RRFS_v1beta -grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR -grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 510f1f743c..7844dfc06c 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -55,8 +55,8 @@ user: MACHINE: "BIG_COMPUTER" ACCOUNT: "" - HOMEdir: '{{ HOMEdir }}' - USHdir: '{{ USHdir }}' + HOMEdir: '{{ user.HOMEdir }}' + USHdir: '{{ user.USHdir }}' SCRIPTSdir: '{{ [HOMEdir, "scripts"]|path_join }}' JOBSdir: '{{ [HOMEdir, "jobs"]|path_join }}' SORCdir: '{{ [HOMEdir, "sorc"]|path_join }}' @@ -66,7 +66,7 @@ user: VX_CONFIG_DIR: '{{ [HOMEdir, "parm"]|path_join }}' METPLUS_CONF: '{{ [PARMdir, "metplus"]|path_join }}' MET_CONFIG: '{{ [PARMdir, "met"]|path_join }}' - UFS_WTHR_MDL_DIR: '{{ UFS_WTHR_MDL_DIR }}' + UFS_WTHR_MDL_DIR: '{{ user.UFS_WTHR_MDL_DIR }}' #---------------------------- # PLATFORM config parameters @@ -159,10 +159,10 @@ platform: WORKFLOW_MANAGER: "" NCORES_PER_NODE: "" LMOD_PATH: "" - BUILD_MOD_FN: "build_{{ user.MAACHINE|lower() }}_{{ workflow.COMPILER }}" + BUILD_MOD_FN: "build_{{ user.MACHINE|lower() }}_{{ workflow.COMPILER }}" WFLOW_MOD_FN: "wflow_{{ user.MACHINE|lower() }}" - BUILD_VER_FN: "build.ver.{{ user.MAACHINE|lower() }}" - RUN_VER_FN: "run.ver.{{ user.MAACHINE|lower() }}" + BUILD_VER_FN: "build.ver.{{ user.MACHINE|lower() }}" + RUN_VER_FN: "run.ver.{{ user.MACHINE|lower() }}" SCHED: "" PARTITION_DEFAULT: "" QUEUE_DEFAULT: "" @@ -327,7 +327,7 @@ platform: # that will point to a subdirectory (having the name of the grid being # used) under this directory. This variable should be set to a null # string in this file, but it can be specified in the user-specified - : workflow configuration file (EXPT_CONFIG_FN). + # workflow configuration file (EXPT_CONFIG_FN). # #----------------------------------------------------------------------- # @@ -378,7 +378,7 @@ workflow: # #----------------------------------------------------------------------- # - CPL: "{{ eq(workflow.FCST_MODEL, 'fv3gfs_aqm', /) }}" + CPL: "{{ workflow.FCST_MODEL == 'fv3gfs_aqm' }}" # #----------------------------------------------------------------------- # @@ -426,8 +426,8 @@ workflow: # installed. #----------------------------------------------------------------------- # - EXPT_BASEDIR: "" - EXPT_SUBDIR: "" + EXPT_BASEDIR: "{{ workflow.EXPT_BASEDIR }}" + EXPT_SUBDIR: "{{ EXPT_SUBDIR }}" EXEC_SUBDIR: "exec" EXPTDIR: "{{ [EXPT_BASEDIR, EXPT_SUBDIR]|path_join}}" # @@ -548,8 +548,10 @@ workflow: FV3_EXEC_FN: "ufs_model" DATA_TABLE_FN: "data_table" - DIAG_TABLE_FN: "diag_table.{{ CCPP_PHYS_SUITE }}" - FIELD_TABLE_FN: "field_table.{{ CCPP_PHYS_SUITE }}" + DIAG_TABLE_FN: "diag_table" + FIELD_TABLE_FN: "field_table" + DIAG_TABLE_TMPL_FN: "diag_table.{{ CCPP_PHYS_SUITE }}" + FIELD_TABLE_TMPL_FN: "field_table.{{ CCPP_PHYS_SUITE }}" MODEL_CONFIG_FN: "model_configure" NEMS_CONFIG_FN: "nems.configure" @@ -557,8 +559,8 @@ workflow: FV3_NML_YAML_CONFIG_FP: '{{ [user.PARMdir, FV3_NML_YAML_CONFIG_FN]|path_join}}' FV3_NML_BASE_ENS_FP: '{{ [EXPTDIR, FV3_NML_BASE_ENS_FN]|path_join}}' DATA_TABLE_TMPL_FP: '{{ [user.PARMdir, DATA_TABLE_FN]|path_join}}' - DIAG_TABLE_TMPL_FP: '{{ [user.PARMdir, DIAG_TABLE_FN]|path_join}}' - FIELD_TABLE_TMPL_FP: '{{ [user.PARMdir, FIELD_TABLE_FN]|path_join}}' + DIAG_TABLE_TMPL_FP: '{{ [user.PARMdir, DIAG_TABLE_TMPL_FN]|path_join}}' + FIELD_TABLE_TMPL_FP: '{{ [user.PARMdir, FIELD_TABLE_TMPL_FN]|path_join}}' MODEL_CONFIG_TMPL_FP: '{{ [user.PARMdir, MODEL_CONFIG_FN]|path_join}}' NEMS_CONFIG_TMPL_FP: '{{ [user.PARMdir, NEMS_CONFIG_FN]|path_join}}' @@ -608,7 +610,7 @@ workflow: # #----------------------------------------------------------------------- # - FIXdir: "{% EXPTDIR if workflow_switches.RUN_TASK_MAKE_GRID else path_join([user.HOMEdir, 'fix']) %}" + FIXdir: "{{ EXPTDIR if workflow_switches.RUN_TASK_MAKE_GRID else path_join([user.HOMEdir, 'fix']) }}" FIXam: "{{ [FIXdir, 'fix_am']|path_join}}" FIXclim: "{{ [FIXdir, 'fix_clim']|path_join}}" FIXlam: "{{ [FIXdir, 'fix_lam']|path_join}}" @@ -634,7 +636,7 @@ workflow: CCPP_PHYS_SUITE: "FV3_GFS_v16" CCPP_PHYS_SUITE_FN: "suite_{{ CCPP_PHYS_SUITE }}.xml" CCPP_PHYS_SUITE_IN_CCPP_FP: "{{ [user.UFS_WTHR_MDL_DIR, 'FV3', 'ccpp', 'suites', CCPP_PHYS_SUITE_FN] |path_join}}" - CCPP_PHYS_SUITE_FP: "{{ [EXPTDIR, CCPP_PHYS_SUITE_FN]|path_join}}" + CCPP_PHYS_SUITE_FP: "{{ [workflow.EXPTDIR, CCPP_PHYS_SUITE_FN]|path_join}}" # #----------------------------------------------------------------------- # @@ -643,8 +645,8 @@ workflow: #----------------------------------------------------------------------- # FIELD_DICT_FN: "fd_nems.yaml" - FIELD_DICT_IN_UWM_FP: "{{ [user.FIELD_DICT_IN_UWM_FP, 'tests', 'parm', FIELD_DICT_FN]|path_join}}" - FIELD_DICT_FP: "{{ [EXPTDIR, FIELD_DICT_FN]|path_join}}" + FIELD_DICT_IN_UWM_FP: "{{ [user.UFS_WTHR_MDL_DIR, 'tests', 'parm', FIELD_DICT_FN]|path_join}}" + FIELD_DICT_FP: "{{ [workflow.EXPTDIR, FIELD_DICT_FN]|path_join}}" # #----------------------------------------------------------------------- # @@ -1257,7 +1259,7 @@ task_make_sfc_climo: KMP_AFFINITY_MAKE_SFC_CLIMO: "scatter" OMP_NUM_THREADS_MAKE_SFC_CLIMO: 1 OMP_STACKSIZE_MAKE_SFC_CLIMO: "1024m" - SFC_CLIMO_DIR: "{{ [wokflow.EXPTDIR, 'sfc_climo']|path_join}}" + SFC_CLIMO_DIR: "{{ [workflow.EXPTDIR, 'sfc_climo']|path_join}}" SFC_CLIMO_INPUT_DIR: "" #---------------------------- @@ -1532,7 +1534,7 @@ task_make_lbcs: KMP_AFFINITY_MAKE_LBCS: "scatter" OMP_NUM_THREADS_MAKE_LBCS: 1 OMP_STACKSIZE_MAKE_LBCS: "1024m" - LBC_SPEC_FCST_HRS: "{% for h in range(task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS, task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS + workflow.FCST_LEN_HRS, task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS %}{{ h }}{% endfor %}" + LBC_SPEC_FCST_HRS: "{% for h in range(task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS, task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS + workflow.FCST_LEN_HRS, task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS) %}{{ h }}{% endfor %}" #---------------------------- # FORECAST config parameters @@ -1643,9 +1645,9 @@ task_run_fcst: # #----------------------------------------------------------------------- # - LAYOUT_X: "" - LAYOUT_Y: "" - BLOCKSIZE: "" + LAYOUT_X: "{{ LAYOUT_X }}" + LAYOUT_Y: "{{ LAYOUT_Y }}" + BLOCKSIZE: "{{ BLOCKSIZE }}" # #----------------------------------------------------------------------- # @@ -1807,7 +1809,7 @@ task_run_fcst: # #----------------------------------------------------------------------- # - PREDEF_GRID_NAME: "" + PREDEF_GRID_NAME: "" # #----------------------------------------------------------------------- # @@ -1816,7 +1818,7 @@ task_run_fcst: # #----------------------------------------------------------------------- # - USE_MERRA_CLIMO: "{{ eq(workflow.CCPP_PHYS_SUITE, 'FV3_GFS_v15_thompson_mynn_lam3km', /) }}" + USE_MERRA_CLIMO: "{{ workflow.CCPP_PHYS_SUITE == 'FV3_GFS_v15_thompson_mynn_lam3km' }}" # #----------------------------------------------------------------------- # @@ -2236,8 +2238,8 @@ global: # DO_ENSEMBLE: false NUM_ENS_MEMBERS: 0 - ENSMEM_NAMES: "{% for m in range(NUM_ENS_MEMBERS) %}{{ 'mem%03d, ' % m }}{% endfor %}" - FV3_NML_ENSMEM_FPS: "{% for mem in ENSMEM_NAMES %}{{ [EXPTDIR, '%s_%s' % FV3_NML_FN, mem}}{% endfor %}" + ENSMEM_NAMES: "{% for m in range(NUM_ENS_MEMBERS) %} 'mem%03d, ' % m {% endfor %}" + FV3_NML_ENSMEM_FPS: "{% for mem in ENSMEM_NAMES %}{{ [EXPTDIR, '%s_%s' % FV3_NML_FN, mem]|path_join }}{% endfor %}" # #----------------------------------------------------------------------- diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 4914830edf..6b999e88ec 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -185,9 +185,17 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> ["-q", "-u", settings_str, "-t", template_xml_fp, "-o", wflow_xml_fp] ) except: - logging.exception( + logging.info( dedent( f""" + Variable settings specified on command line for + fill_jinja_template.py:\n + settings =\n\n""" + ) + + settings_str + ) + raise Exception( + dedent(f""" Call to python script fill_jinja_template.py to create a rocoto workflow XML file from a template file failed. Parameters passed to this script are: @@ -195,10 +203,8 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> template_xml_fp = '{template_xml_fp}' Full path to output rocoto XML file: WFLOW_XML_FP = '{wflow_xml_fp}' - Namelist settings specified on command line:\n - settings =\n\n""" + """ ) - + settings_str ) # # ----------------------------------------------------------------------- @@ -210,6 +216,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> # exptdir = expt_config['workflow']['EXPTDIR'] wflow_launch_script_fp = expt_config['workflow']['WFLOW_LAUNCH_SCRIPT_FP'] + wflow_launch_script_fn = expt_config['workflow']['WFLOW_LAUNCH_SCRIPT_FN'] log_info( f""" Creating symlink in the experiment directory (EXPTDIR) that points to the @@ -237,8 +244,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> # in the flattened expt_config dictionary # TODO: Reference all these variables in their respective # dictionaries, instead. - import_vars(dictionary=flatten_dict(expt_config), - target_dict=locals()) + import_vars(dictionary=flatten_dict(expt_config)) if USE_CRON_TO_RELAUNCH: add_crontab_line() @@ -718,10 +724,9 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> # ----------------------------------------------------------------------- # if WORKFLOW_MANAGER == "rocoto": - wflow_xml_fn = settings['WFLOW_XML_FN'] - wflow_db_fn = f"{os.path.splitext(wflow_xml_fn)[0]}.db" - rocotorun_cmd = f"rocotorun -w {wflow_xml_fn} -d {wflow_db_fn} -v 10" - rocotostat_cmd = f"rocotostat -w {wflow_xml_fn} -d {wflow_db_fn} -v 10" + wflow_db_fn = f"{os.path.splitext(WFLOW_XML_FN)[0]}.db" + rocotorun_cmd = f"rocotorun -w {WFLOW_XML_FN} -d {wflow_db_fn} -v 10" + rocotostat_cmd = f"rocotostat -w {WFLOW_XML_FN} -d {wflow_db_fn} -v 10" log_info( f""" diff --git a/ush/link_fix.py b/ush/link_fix.py index 827f467b65..44f73d2f73 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -30,7 +30,7 @@ def link_fix( target_dir, ccpp_phys_suite, constants, - dot_or_underscore, + dot_or_uscore, nhw, run_task, sfc_climo_fields, @@ -46,7 +46,7 @@ def link_fix( source_dir: the path to directory where the file_group fix files are linked from target_dir: the directory where the fix files should be linked to - dot_or_underscore: str containing either a dot or an underscore + dot_or_uscore: str containing either a dot or an underscore nhw: grid parameter setting constants: dict containing the constants used by SRW run_task: boolean value indicating whether the task is to be run @@ -361,7 +361,8 @@ def link_fix( # Create links without halo and tile7, and with "tile1" halo_tile = f"{cres}.{field}.tile{tile_rgnl}.halo{nh0}.nc" - no_halo_tile = re.sub(f"tile{tile_rgnl}.halo{nh0}", "tile1", True) + no_halo_tile = re.sub(f"tile{tile_rgnl}.halo{nh0}", "tile1", halo_tile) + create_symlink_to_file(halo_tile, no_halo_tile, True) # Change directory back to original one. cd_vrfy(save_dir) @@ -400,14 +401,14 @@ def parse_args(argv): link_fix( verbose=cfg["workflow"]["VERBOSE"], file_group=args.file_group, - source_dir=cfg["task_make_{args.file_group.upper()}"][f"{args.file_group}_DIR"], + source_dir=cfg[f"task_make_{args.file_group.lower()}"][f"{args.file_group.upper()}_DIR"], target_dir=cfg["workflow"]["FIXlam"], ccpp_phys_suite=cfg["workflow"]["CCPP_PHYS_SUITE"], constants=cfg["constants"], - dot_or_underscore=cfg["workflow"]["DOT_OR_USCORE"], + dot_or_uscore=cfg["workflow"]["DOT_OR_USCORE"], nhw=cfg["grid_params"]["NHW"], run_task=True, - sfc_climo_fields=cfg["task_run_fcst"]["SFC_CLIMO_FIELDS"], + sfc_climo_fields=cfg["fixed_files"]["SFC_CLIMO_FIELDS"], ) @@ -420,7 +421,7 @@ def test_link_fix(self): target_dir=self.FIXlam, ccpp_phys_suite=self.cfg["CCPP_PHYS_SUITE"], constants=self.cfg["constants"], - dot_or_underscore=self.cfg["DOT_OR_USCORE"], + dot_or_uscore=self.cfg["DOT_OR_USCORE"], nhw=self.cfg["NHW"], run_task=False, sfc_climo_fields=["foo", "bar"], diff --git a/ush/python_utils/__init__.py b/ush/python_utils/__init__.py index 02e2b83b77..dabd8b3f08 100644 --- a/ush/python_utils/__init__.py +++ b/ush/python_utils/__init__.py @@ -47,4 +47,5 @@ load_config_file, load_yaml_config, cfg_to_yaml_str, + extend_yaml, ) diff --git a/ush/python_utils/config_parser.py b/ush/python_utils/config_parser.py index 9588fdf21e..58b2d1c72c 100644 --- a/ush/python_utils/config_parser.py +++ b/ush/python_utils/config_parser.py @@ -35,6 +35,8 @@ import xml.etree.ElementTree as ET from xml.dom import minidom +import jinja2 + from .environment import list_to_str, str_to_list from .run_command import run_command @@ -88,6 +90,11 @@ def join_str(loader, node): except NameError: pass +def path_join(arg): + """ A filter for jinja2 that joins paths """ + + return os.path.join(*arg) + def extend_yaml(yaml_dict, full_dict=None): ''' @@ -123,9 +130,12 @@ def extend_yaml(yaml_dict, full_dict=None): if '{%' in v: templates = [v_str] else: + # Separates out all the double curly bracket pairs templates = re.findall(r'{{[^}]*}}|\S', v_str) data = [] for template in templates: + if len(template) > 1: + print(template) j2env = jinja2.Environment(loader=jinja2.BaseLoader, undefined=jinja2.StrictUndefined) j2env.filters['path_join'] = path_join @@ -133,13 +143,21 @@ def extend_yaml(yaml_dict, full_dict=None): try: # Fill in a template that has the appropriate variables # set. - template = j2tmpl.render(env=os.environ, **full_dict) + template = j2tmpl.render(**yaml_dict, **full_dict) except jinja2.exceptions.UndefinedError as e: # Leave a templated field as-is in the resulting dict - print(f'Error: {e}') + #print(f'Error: {e}') + print(f'Preserved template: {k}: {template}') + #for a, b in full_dict.items(): + # print(f' {a}: {b}') + pass + except TypeError: + print(f'Preserved template: {k}: {template}') + except ZeroDivisionError: print(f'Preserved template: {k}: {template}') - for a, b in full_dict.items(): - print(f' {a}: {b}') + except: + print(f'{k}: {template}') + raise data.append(template) @@ -149,6 +167,7 @@ def extend_yaml(yaml_dict, full_dict=None): # Put the full template line back together as it was, # filled or not yaml_dict[k] = ''.join(data) + print(f" {k}: {yaml_dict[k]}") ########## @@ -434,12 +453,16 @@ def update_dict(dict_o, dict_t, provide_default=False): Returns: None """ - for k, v in dict_t.items(): + for k, v in dict_o.items(): if isinstance(v, dict): - update_dict(dict_o, v, provide_default) - elif k in dict_o.keys(): - if (not provide_default) or (dict_t[k] is None) or (len(dict_t[k]) == 0): - dict_t[k] = dict_o[k] + if isinstance(dict_t.get(k), dict): + update_dict(v, dict_t[k], provide_default) + else: + dict_t[k] = v + elif k in dict_t.keys(): + if (not provide_default) or (dict_t[k] is None) or \ + (len(dict_t[k]) == 0) or ("{{" in dict_t[k]): + dict_t[k] = v def check_structure_dict(dict_o, dict_t): diff --git a/ush/set_thompson_mp_fix_files.py b/ush/set_thompson_mp_fix_files.py index 59a702f327..4b41c44310 100644 --- a/ush/set_thompson_mp_fix_files.py +++ b/ush/set_thompson_mp_fix_files.py @@ -54,6 +54,8 @@ def set_thompson_mp_fix_files( # # ----------------------------------------------------------------------- # + mapping = [] + thompson_mp_fix_files = [] if sdf_uses_thompson_mp: # # ----------------------------------------------------------------------- @@ -78,7 +80,6 @@ def set_thompson_mp_fix_files( if link_thompson_climo: thompson_mp_fix_files.append(thompson_mp_climo_fn) - mapping = [] for fix_file in thompson_mp_fix_files: mapping.append(f"{fix_file} | {fix_file}") diff --git a/ush/setup.py b/ush/setup.py index 1648d541bb..cf7c4e22ab 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -15,9 +15,10 @@ check_var_valid_value, lowercase, uppercase, + list_to_str, check_for_preexist_dir_file, flatten_dict, - check_strcuture_dict, + check_structure_dict, update_dict, import_vars, get_env_var, @@ -28,6 +29,7 @@ load_ini_config, get_ini_value, str_to_list, + extend_yaml, ) from set_cycle_dates import set_cycle_dates @@ -115,6 +117,12 @@ def load_config_for_setup(ushdir, default_config, user_config): ) machine_cfg = load_config_file(machine_file) + # Load the fixed files configuration + cfg_f = load_config_file( + os.path.join(ushdir, os.pardir, "parm", + "fixed_files_mapping.yaml") + ) + # Load the constants file cfg_c = load_config_file(os.path.join(ushdir, "constants.yaml")) @@ -130,17 +138,26 @@ def load_config_for_setup(ushdir, default_config, user_config): # Machine settings update_dict(machine_cfg, cfg_d) + # Fixed files + update_dict(cfg_f, cfg_d) + # User settings (take precedence over all others) update_dict(cfg_u, cfg_d) + extend_yaml(cfg_d) + # Do any conversions of data types for sect, settings in cfg_d.items(): for k, v in settings.items(): if not (v is None or v == ""): cfg_d[sect][k] = str_to_list(v) + for k, v in cfg_d['task_run_fcst'].items(): + print(f"*** {k}: {v}") + # Mandatory variables *must* be set in the user's config or the machine file; the default value is invalid mandatory = [ + "EXPT_SUBDIR", "NCORES_PER_NODE", "FIXgsm", "FIXaer", @@ -148,23 +165,24 @@ def load_config_for_setup(ushdir, default_config, user_config): "TOPO_DIR", "SFC_CLIMO_INPUT_DIR", ] + flat_cfg = flatten_dict(cfg_d) for val in mandatory: - if not cfg_d.get("task_run_fcst", {}).get("val"): + if not flat_cfg.get(val): raise Exception( dedent( f""" - Mandatory variable "{val}" not found in: - user config file {user_config} - OR - machine file {machine_file} - """ + Mandatory variable "{val}" not found in: + user config file {user_config} + OR + machine file {machine_file} + """ ) ) # Check that input dates are in a date format dates = ["DATE_FIRST_CYCL", "DATE_LAST_CYCL"] for val in dates: - if not isinstance(cfg_d["user"][val], datetime.date): + if not isinstance(cfg_d["workflow"][val], datetime.date): raise Exception( dedent( f""" @@ -175,22 +193,6 @@ def load_config_for_setup(ushdir, default_config, user_config): ) ) - # Check to make sure mandatory workflow variables are set. - vlist = ["EXPT_SUBDIR"] - for val in vlist: - if not cfg_d["task_run_fcst"].get("val"): - raise Exception(f"\nMandatory variable '{val}' has not been set\n") - - # Check to make sure that mandatory forecast variables are set. - vlist = [ - "DT_ATMOS", - "LAYOUT_X", - "LAYOUT_Y", - "BLOCKSIZE", - ] - for val in vlist: - if not cfg_d["task_run_fcst"].get("val"): - raise Exception(f"\nMandatory variable '{val}' has not been set\n") return cfg_d @@ -281,7 +283,7 @@ def setup(USHdir, user_config_fn="config.yaml"): """ logger = getLogger(__name__) - cd_verify(USHdir) + cd_vrfy(USHdir) # print message log_info( @@ -357,8 +359,9 @@ def setup(USHdir, user_config_fn="config.yaml"): # ----------------------------------------------------------------------- # expt_basedir = workflow_config.get("EXPT_BASEDIR") + home_dir = expt_config['user'].get("HOMEdir") if (not expt_basedir) or (expt_basedir[0] != "/"): - if not expt_basedir: + if not expt_basedir or "{{" in expt_basedir: expt_basedir = "" expt_basedir = os.path.join(home_dir, "..", "expt_dirs", expt_basedir) try: @@ -367,9 +370,11 @@ def setup(USHdir, user_config_fn="config.yaml"): pass expt_basedir = os.path.abspath(expt_basedir) - mkdir_vrfy(f' -p "{expt_basedir}"') + #mkdir_vrfy(f' -p "{expt_basedir}"') workflow_config["EXPT_BASEDIR"] = expt_basedir + # Update some paths that include EXPT_BASEDIR + extend_yaml(expt_config) # # ----------------------------------------------------------------------- # @@ -406,6 +411,7 @@ def setup(USHdir, user_config_fn="config.yaml"): ) raise FileExistsError(errmsg) from None + # # ----------------------------------------------------------------------- # @@ -446,7 +452,7 @@ def setup(USHdir, user_config_fn="config.yaml"): # A batch system account is specified if expt_config["platform"].get("WORKFLOW_MANAGER") is not None: - if not expt.get("user").get("ACCOUNT"): + if not expt_config.get("user").get("ACCOUNT"): raise Exception( dedent( f""" @@ -528,17 +534,22 @@ def get_location(xcs, fmt, expt_cfg): # Gather the pre-defined grid parameters, if needed fcst_config = expt_config["task_run_fcst"] - grid_config = expt_confg["task_make_grid"] + grid_config = expt_config["task_make_grid"] if fcst_config.get("PREDEF_GRID_NAME"): grid_params = set_predef_grid_params(USHdir, fcst_config) # Users like to change these variables, so don't overwrite them special_vars = ["DT_ATMOS", "LAYOUT_X", "LAYOUT_Y", "BLOCKSIZE"] for param, value in grid_params.items(): - if param in special_vars and fcst_config.get(param) is not None: - continue + if param in special_vars: + if fcst_config.get(param) and "{{" not in fcst_config.get(param): + continue + else: + fcst_config[param] = value elif param.startswith("WRTCMP"): fcst_config[param] = value + elif param == "GRID_GEN_METHOD": + workflow_config[param] = value else: grid_config[param] = value @@ -579,20 +590,29 @@ def get_location(xcs, fmt, expt_cfg): constants=expt_config["constants"], ) else: - grid_params = { - "LON_CTR": LON_CTR, - "LAT_CTR": LAT_CTR, - "NX": NX, - "NY": NY, - "NHW": NHW, - "STRETCH_FAC": STRETCH_FAC, - } + + errmsg = dedent( + f""" + Valid values of GRID_GEN_METHOD are GFDLgrid and ESGgrid. + The value provided is: + GRID_GEN_METHOD = {grid_gen_method} + """ + ) + raise KeyError(errmsg) from None # Add a grid parameter section to the experiment config expt_config["grid_params"] = grid_params - # grid params - cfg_d["grid_params"] = grid_params + # Check to make sure that mandatory forecast variables are set. + vlist = [ + "DT_ATMOS", + "LAYOUT_X", + "LAYOUT_Y", + "BLOCKSIZE", + ] + for val in vlist: + if not fcst_config.get(val): + raise Exception(f"\nMandatory variable '{val}' has not been set\n") # # ----------------------------------------------------------------------- @@ -743,7 +763,7 @@ def get_location(xcs, fmt, expt_cfg): # # If using a custom post configuration file, make sure that it exists. - post_config = expt_config("task_run_post") + post_config = expt_config["task_run_post"] if post_config.get("USE_CUSTOM_POST_CONFIG_FILE"): custom_post_config_fp = post_config.get("CUSTOM_POST_CONFIG_FP") try: @@ -868,7 +888,7 @@ def get_location(xcs, fmt, expt_cfg): ) post_output_domain_name = predef_grid_name - if not isintstance(post_output_domain_name, int): + if not isinstance(post_output_domain_name, int): post_output_domain_name = lowercase(post_output_domain_name) # # ----------------------------------------------------------------------- @@ -972,18 +992,20 @@ def get_location(xcs, fmt, expt_cfg): FIELD_DICT_IN_UWM_FP = \"{field_dict_in_uwm_fp}\"''' ) + + fixed_files = expt_config["fixed_files"] # Set the appropriate ozone production/loss file paths and symlinks ozone_param, fixgsm_ozone_fn, ozone_link_mappings = set_ozone_param( ccpp_phys_suite_in_ccpp_fp, - fcst_config["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"], + fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"], ) # Reset the dummy value saved in the last list item to the ozone # file name - fcst_config["FIXgsm_FILES_TO_COPY_TO_FIXam"][-1] = fixgsm_ozone_fn + fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"][-1] = fixgsm_ozone_fn # Reset the experiment config list with the update list - fcst_config["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"] = ozone_link_mappings + fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"] = ozone_link_mappings log_info( f""" @@ -1005,7 +1027,7 @@ def get_location(xcs, fmt, expt_cfg): CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = {list_to_str(ozone_link_mappings)} """, verbose=verbose, - _dedent=False, + dedent_=False, ) # @@ -1069,12 +1091,12 @@ def get_location(xcs, fmt, expt_cfg): dot_or_underscore=workflow_config["DOT_OR_USCORE"], nhw=grid_params["NHW"], run_task=False, - sfc_climo_fields=expt_config["task_run_fcst"]["SFC_CLIMO_FIELDS"], + sfc_climo_fields=fixed_files["SFC_CLIMO_FIELDS"], ) if res_in_fixlam_filenames is None: res_in_fixlam_filenames = res_in_fns else: - if res_in_fixlam_filesnames != res_in_fns: + if res_in_fixlam_filenames != res_in_fns: raise Exception( dedent( f""" @@ -1083,20 +1105,20 @@ def get_location(xcs, fmt, expt_cfg): set: Resolution in {prep_task}: {res_in_fns} - Resolution expected: {res_in_fixlam_filesnames} + Resolution expected: {res_in_fixlam_filenames} """ ) ) - if not os.path.exists(task_dir): - raise FileNotFoundError( - f''' - The directory ({dir_key}) that should contain the pregenerated - {prep_task.lower()} files does not exist: - {dir_key} = \"{task_dir}\"''' - ) + if not os.path.exists(task_dir): + raise FileNotFoundError( + f''' + The directory ({dir_key}) that should contain the pregenerated + {prep_task.lower()} files does not exist: + {dir_key} = \"{task_dir}\"''' + ) - workflow_config["RES_IN_FIXLAM_FILENAMES"] = res_in_fixlam_filesnames + workflow_config["RES_IN_FIXLAM_FILENAMES"] = res_in_fixlam_filenames workflow_config["CRES"] = f"C{res_in_fixlam_filenames}" # @@ -1108,7 +1130,8 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - mkdir_vrfy(f' -p "{FIXlam}"') + fixlam = workflow_config["FIXlam"] + mkdir_vrfy(f' -p "{fixlam}"') # # ----------------------------------------------------------------------- @@ -1147,7 +1170,7 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # workflow_config["SDF_USES_RUC_LSM"] = check_ruc_lsm( - ccpp_phys_suite_fp=CCPP_PHYS_SUITE_IN_CCPP_FP + ccpp_phys_suite_fp=ccpp_phys_suite_in_ccpp_fp ) # # ----------------------------------------------------------------------- @@ -1167,18 +1190,18 @@ def get_location(xcs, fmt, expt_cfg): get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RAP"] ) or (get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RAP"]) use_thompson, mapping, fix_files = set_thompson_mp_fix_files( - ccpp_phys_suite_fp=CCPP_PHYS_SUITE_IN_CCPP_FP, - thompson_mp_climo_fn=THOMPSON_MP_CLIMO_FN, + ccpp_phys_suite_fp=ccpp_phys_suite_in_ccpp_fp, + thompson_mp_climo_fn=workflow_config["THOMPSON_MP_CLIMO_FN"], link_thompson_climo=link_thompson_climo, ) workflow_config["SDF_USES_THOMPSON_MP"] = use_thompson if use_thompson: - expt_config["task_run_fcst"]["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].append( + fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].append( mapping ) - expt_config["task_run_fcst"]["FIXgsm_FILES_TO_COPY_TO_FIXam"].append(fix_files) + fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"].append(fix_files) log_info( f""" @@ -1195,12 +1218,14 @@ def get_location(xcs, fmt, expt_cfg): ) log_info( f""" - FIXgsm_FILES_TO_COPY_TO_FIXam = {list_to_str(FIXgsm_FILES_TO_COPY_TO_FIXam)} + FIXgsm_FILES_TO_COPY_TO_FIXam = + {list_to_str(fixed_files['FIXgsm_FILES_TO_COPY_TO_FIXam'])} """ ) log_info( f""" - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = {list_to_str(CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING)} + CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = + {list_to_str(fixed_files['CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING'])} """ ) # @@ -1212,6 +1237,13 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # + + extend_yaml(expt_config) + for sect, sect_keys in expt_config.items(): + for k, v in sect_keys.items(): + expt_config[sect][k] = str_to_list(v) + extend_yaml(expt_config) + # print content of var_defns if DEBUG=True all_lines = cfg_to_yaml_str(expt_config) log_info(all_lines, verbose=debug) @@ -1223,7 +1255,7 @@ def get_location(xcs, fmt, expt_cfg): Generating the global experiment variable definitions file here: GLOBAL_VAR_DEFNS_FP = '{global_var_defns_fp}' For more detailed information, set DEBUG to 'TRUE' in the experiment - configuration file ('{user_config}').""" + configuration file ('{user_config_fn}').""" ) with open(global_var_defns_fp, "a") as f: @@ -1240,7 +1272,7 @@ def get_location(xcs, fmt, expt_cfg): # loop through the flattened expt_config and check validity of params cfg_v = load_config_file("valid_param_vals.yaml") for k, v in flatten_dict(expt_config).items(): - if v == None: + if v is None or v == '': continue vkey = "valid_vals_" + k if (vkey in cfg_v) and not (v in cfg_v[vkey]): From e045a6bc7ec88b4cc65a5d31717db134aedc9f03 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 2 Dec 2022 16:28:06 +0000 Subject: [PATCH 06/19] Finishing up tests. --- parm/FV3LAM_wflow.xml | 2 +- scripts/exregional_make_lbcs.sh | 3 + scripts/exregional_run_vx_ensgrid.sh | 2 +- tests/WE2E/run_WE2E_tests.sh | 6 +- .../config.specify_template_filenames.yaml | 4 +- ush/bash_utils/source_config.sh | 1 + ush/config_defaults.yaml | 121 +++++++++--------- ush/generate_FV3LAM_wflow.py | 1 + ush/python_utils/config_parser.py | 1 + ush/set_predef_grid_params.py | 9 +- ush/setup.py | 28 ++-- 11 files changed, 99 insertions(+), 79 deletions(-) diff --git a/parm/FV3LAM_wflow.xml b/parm/FV3LAM_wflow.xml index 974fcf1041..6d85161157 100644 --- a/parm/FV3LAM_wflow.xml +++ b/parm/FV3LAM_wflow.xml @@ -361,7 +361,7 @@ MODULES_RUN_TASK_FP script. {%- if do_ensemble %} - {% for m in range(1, num_ens_members+1) %}{{ "%03d" }}{% endfor %} + {% for m in range(1, num_ens_members+1) %}{{ "%03d " % m }}{% endfor %} {%- endif %} diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index 5f185256dc..ec031ac99c 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -8,6 +8,7 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh +set -x source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -65,6 +66,8 @@ export OMP_STACKSIZE=${OMP_STACKSIZE_MAKE_LBCS} # eval ${PRE_TASK_CMDS} +set -x + nprocs=$(( NNODES_MAKE_LBCS*PPN_MAKE_LBCS )) if [ -z "${RUN_CMD_UTILS:-}" ] ; then diff --git a/scripts/exregional_run_vx_ensgrid.sh b/scripts/exregional_run_vx_ensgrid.sh index a5080cc7cb..5177efee4a 100755 --- a/scripts/exregional_run_vx_ensgrid.sh +++ b/scripts/exregional_run_vx_ensgrid.sh @@ -76,7 +76,7 @@ export fhr_last fhr_list=`echo ${FHR} | $SED "s/ /,/g"` export fhr_list -NUM_PAD=${NDIGITS_ENSMEM_NAMES} +NUM_PAD=3 # #----------------------------------------------------------------------- diff --git a/tests/WE2E/run_WE2E_tests.sh b/tests/WE2E/run_WE2E_tests.sh index 461041c08d..ce4e029f32 100755 --- a/tests/WE2E/run_WE2E_tests.sh +++ b/tests/WE2E/run_WE2E_tests.sh @@ -787,10 +787,14 @@ Please correct and rerun." # #----------------------------------------------------------------------- # + set -x + save_USHdir=${USHdir} source_config ${USHdir}/config_defaults.yaml + USHdir=${save_USHdir} MACHINE_FILE=${machine_file:-"${USHdir}/machine/${machine,,}.yaml"} source_config ${MACHINE_FILE} source_config ${test_config_fp} + # #----------------------------------------------------------------------- # @@ -1016,7 +1020,7 @@ model_ver="we2e"" # # Set NCO mode OPSROOT # -OPSROOT=\"${opsroot}\"" +OPSROOT=\"${opsroot:-$OPSROOT}\"" fi # diff --git a/tests/WE2E/test_configs/wflow_features/config.specify_template_filenames.yaml b/tests/WE2E/test_configs/wflow_features/config.specify_template_filenames.yaml index 8fdff3997b..462de85819 100644 --- a/tests/WE2E/test_configs/wflow_features/config.specify_template_filenames.yaml +++ b/tests/WE2E/test_configs/wflow_features/config.specify_template_filenames.yaml @@ -6,8 +6,8 @@ user: RUN_ENVIR: community workflow: DATA_TABLE_TMPL_FN: data_table - DIAG_TABLE_TMPL_FN: diag_table - FIELD_TABLE_TMPL_FN: field_table + DIAG_TABLE_TMPL_FN: diag_table.FV3_GFS_v15p2 + FIELD_TABLE_TMPL_FN: field_table.FV3_GFS_v15p2 MODEL_CONFIG_TMPL_FN: model_configure NEMS_CONFIG_TMPL_FN: nems.configure CCPP_PHYS_SUITE: FV3_GFS_v15p2 diff --git a/ush/bash_utils/source_config.sh b/ush/bash_utils/source_config.sh index df5a79a0df..9d0d6cc5c0 100644 --- a/ush/bash_utils/source_config.sh +++ b/ush/bash_utils/source_config.sh @@ -6,6 +6,7 @@ # function config_to_str() { + set -x $USHdir/config_utils.py -o $1 -c $2 "${@:3}" } diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 34d529c806..efef734689 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -159,10 +159,10 @@ platform: WORKFLOW_MANAGER: "" NCORES_PER_NODE: "" LMOD_PATH: "" - BUILD_MOD_FN: "build_{{ user.MACHINE|lower() }}_{{ workflow.COMPILER }}" - WFLOW_MOD_FN: "wflow_{{ user.MACHINE|lower() }}" - BUILD_VER_FN: "build.ver.{{ user.MACHINE|lower() }}" - RUN_VER_FN: "run.ver.{{ user.MACHINE|lower() }}" + BUILD_MOD_FN: 'build_{{ user.MACHINE|lower() }}_{{ workflow.COMPILER }}' + WFLOW_MOD_FN: 'wflow_{{ user.MACHINE|lower() }}' + BUILD_VER_FN: 'build.ver.{{ user.MACHINE|lower() }}' + RUN_VER_FN: 'run.ver.{{ user.MACHINE|lower() }}' SCHED: "" PARTITION_DEFAULT: "" QUEUE_DEFAULT: "" @@ -419,7 +419,7 @@ workflow: # #----------------------------------------------------------------------- # - CPL: "{{ workflow.FCST_MODEL == 'fv3gfs_aqm' }}" + CPL: '{{ workflow.FCST_MODEL == "fv3gfs_aqm" }}' # #----------------------------------------------------------------------- # @@ -440,7 +440,7 @@ workflow: USE_CRON_TO_RELAUNCH: false CRON_RELAUNCH_INTVL_MNTS: 3 CRONTAB_LINE: "" - LOAD_MODULES_RUN_TASK_FP: "{{ [user.USHdir, 'load_modules_run_task.sh']|path_join}}" + LOAD_MODULES_RUN_TASK_FP: '{{ [user.USHdir, "load_modules_run_task.sh"]|path_join }}' # #----------------------------------------------------------------------- @@ -467,10 +467,10 @@ workflow: # installed. #----------------------------------------------------------------------- # - EXPT_BASEDIR: "{{ workflow.EXPT_BASEDIR }}" - EXPT_SUBDIR: "{{ EXPT_SUBDIR }}" + EXPT_BASEDIR: '{{ workflow.EXPT_BASEDIR }}' + EXPT_SUBDIR: '{{ EXPT_SUBDIR }}' EXEC_SUBDIR: "exec" - EXPTDIR: "{{ [EXPT_BASEDIR, EXPT_SUBDIR]|path_join}}" + EXPTDIR: '{{ [EXPT_BASEDIR, EXPT_SUBDIR]|path_join }}' # #----------------------------------------------------------------------- # @@ -591,24 +591,24 @@ workflow: DATA_TABLE_FN: "data_table" DIAG_TABLE_FN: "diag_table" FIELD_TABLE_FN: "field_table" - DIAG_TABLE_TMPL_FN: "diag_table.{{ CCPP_PHYS_SUITE }}" - FIELD_TABLE_TMPL_FN: "field_table.{{ CCPP_PHYS_SUITE }}" + DIAG_TABLE_TMPL_FN: 'diag_table.{{ CCPP_PHYS_SUITE }}' + FIELD_TABLE_TMPL_FN: 'field_table.{{ CCPP_PHYS_SUITE }}' MODEL_CONFIG_FN: "model_configure" NEMS_CONFIG_FN: "nems.configure" - FV3_NML_BASE_SUITE_FP: '{{ [user.PARMdir, FV3_NML_BASE_SUITE_FN]|path_join}}' - FV3_NML_YAML_CONFIG_FP: '{{ [user.PARMdir, FV3_NML_YAML_CONFIG_FN]|path_join}}' - FV3_NML_BASE_ENS_FP: '{{ [EXPTDIR, FV3_NML_BASE_ENS_FN]|path_join}}' - DATA_TABLE_TMPL_FP: '{{ [user.PARMdir, DATA_TABLE_FN]|path_join}}' - DIAG_TABLE_TMPL_FP: '{{ [user.PARMdir, DIAG_TABLE_TMPL_FN]|path_join}}' - FIELD_TABLE_TMPL_FP: '{{ [user.PARMdir, FIELD_TABLE_TMPL_FN]|path_join}}' - MODEL_CONFIG_TMPL_FP: '{{ [user.PARMdir, MODEL_CONFIG_FN]|path_join}}' - NEMS_CONFIG_TMPL_FP: '{{ [user.PARMdir, NEMS_CONFIG_FN]|path_join}}' + FV3_NML_BASE_SUITE_FP: '{{ [user.PARMdir, FV3_NML_BASE_SUITE_FN]|path_join }}' + FV3_NML_YAML_CONFIG_FP: '{{ [user.PARMdir, FV3_NML_YAML_CONFIG_FN]|path_join }}' + FV3_NML_BASE_ENS_FP: '{{ [EXPTDIR, FV3_NML_BASE_ENS_FN]|path_join }}' + DATA_TABLE_TMPL_FP: '{{ [user.PARMdir, DATA_TABLE_FN]|path_join }}' + DIAG_TABLE_TMPL_FP: '{{ [user.PARMdir, DIAG_TABLE_TMPL_FN]|path_join }}' + FIELD_TABLE_TMPL_FP: '{{ [user.PARMdir, FIELD_TABLE_TMPL_FN]|path_join }}' + MODEL_CONFIG_TMPL_FP: '{{ [user.PARMdir, MODEL_CONFIG_FN]|path_join }}' + NEMS_CONFIG_TMPL_FP: '{{ [user.PARMdir, NEMS_CONFIG_FN]|path_join }}' # These are staged in the exptdir at configuration time - DATA_TABLE_FP: '{{ [EXPTDIR, DATA_TABLE_FN]|path_join}}' - FIELD_TABLE_FP: '{{ [EXPTDIR, FIELD_TABLE_FN]|path_join}}' - NEMS_CONFIG_FP: '{{ [EXPTDIR, NEMS_CONFIG_FN]|path_join}}' + DATA_TABLE_FP: '{{ [EXPTDIR, DATA_TABLE_FN]|path_join }}' + FIELD_TABLE_FP: '{{ [EXPTDIR, FIELD_TABLE_FN]|path_join }}' + NEMS_CONFIG_FP: '{{ [EXPTDIR, NEMS_CONFIG_FN]|path_join }}' FV3_NML_FP: '{{ [EXPTDIR, FV3_NML_FN]|path_join }}' FCST_MODEL: "ufs-weather-model" @@ -618,9 +618,9 @@ workflow: WFLOW_LAUNCH_SCRIPT_FN: "launch_FV3LAM_wflow.sh" WFLOW_LAUNCH_LOG_FN: "log.launch_FV3LAM_wflow" - GLOBAL_VAR_DEFNS_FP: '{{ [EXPTDIR, GLOBAL_VAR_DEFNS_FN] |path_join}}' - WFLOW_LAUNCH_SCRIPT_FP: '{{ [user.USHdir, WFLOW_LAUNCH_SCRIPT_FN] |path_join}}' - WFLOW_LAUNCH_LOG_FP: '{{ [EXPTDIR, WFLOW_LAUNCH_LOG_FN] |path_join}}' + GLOBAL_VAR_DEFNS_FP: '{{ [EXPTDIR, GLOBAL_VAR_DEFNS_FN] |path_join }}' + WFLOW_LAUNCH_SCRIPT_FP: '{{ [user.USHdir, WFLOW_LAUNCH_SCRIPT_FN] |path_join }}' + WFLOW_LAUNCH_LOG_FP: '{{ [EXPTDIR, WFLOW_LAUNCH_LOG_FN] |path_join }}' # #----------------------------------------------------------------------- # @@ -651,13 +651,13 @@ workflow: # #----------------------------------------------------------------------- # - FIXdir: "{{ EXPTDIR if workflow_switches.RUN_TASK_MAKE_GRID else path_join([user.HOMEdir, 'fix']) }}" - FIXam: "{{ [FIXdir, 'fix_am']|path_join}}" - FIXclim: "{{ [FIXdir, 'fix_clim']|path_join}}" - FIXlam: "{{ [FIXdir, 'fix_lam']|path_join}}" + FIXdir: '{{ EXPTDIR if workflow_switches.RUN_TASK_MAKE_GRID else [user.HOMEdir, "fix"]|path_join }}' + FIXam: '{{ [FIXdir, "fix_am"]|path_join }}' + FIXclim: '{{ [FIXdir, "fix_clim"]|path_join }}' + FIXlam: '{{ [FIXdir, "fix_lam"]|path_join }}' THOMPSON_MP_CLIMO_FN: "Thompson_MP_MONTHLY_CLIMO.nc" - THOMPSON_MP_CLIMO_FP: '{{ [FIXam, THOMPSON_MP_CLIMO_FN]|path_join}}' + THOMPSON_MP_CLIMO_FP: '{{ [FIXam, THOMPSON_MP_CLIMO_FN]|path_join }}' # #----------------------------------------------------------------------- # @@ -675,9 +675,9 @@ workflow: #----------------------------------------------------------------------- # CCPP_PHYS_SUITE: "FV3_GFS_v16" - CCPP_PHYS_SUITE_FN: "suite_{{ CCPP_PHYS_SUITE }}.xml" - CCPP_PHYS_SUITE_IN_CCPP_FP: "{{ [user.UFS_WTHR_MDL_DIR, 'FV3', 'ccpp', 'suites', CCPP_PHYS_SUITE_FN] |path_join}}" - CCPP_PHYS_SUITE_FP: "{{ [workflow.EXPTDIR, CCPP_PHYS_SUITE_FN]|path_join}}" + CCPP_PHYS_SUITE_FN: 'suite_{{ CCPP_PHYS_SUITE }}.xml' + CCPP_PHYS_SUITE_IN_CCPP_FP: '{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "suites", CCPP_PHYS_SUITE_FN] |path_join }}' + CCPP_PHYS_SUITE_FP: '{{ [workflow.EXPTDIR, CCPP_PHYS_SUITE_FN]|path_join }}' # #----------------------------------------------------------------------- # @@ -686,8 +686,8 @@ workflow: #----------------------------------------------------------------------- # FIELD_DICT_FN: "fd_nems.yaml" - FIELD_DICT_IN_UWM_FP: "{{ [user.UFS_WTHR_MDL_DIR, 'tests', 'parm', FIELD_DICT_FN]|path_join}}" - FIELD_DICT_FP: "{{ [workflow.EXPTDIR, FIELD_DICT_FN]|path_join}}" + FIELD_DICT_IN_UWM_FP: '{{ [user.UFS_WTHR_MDL_DIR, "tests", "parm", FIELD_DICT_FN]|path_join }}' + FIELD_DICT_FP: '{{ [workflow.EXPTDIR, FIELD_DICT_FN]|path_join }}' # #----------------------------------------------------------------------- # @@ -905,14 +905,15 @@ nco: NET: "rrfs" RUN: "rrfs" model_ver: "v1.0.0" - OPSROOT: "{{ workflow.EXPT_BASEDIR }}/../nco_dirs" - COMROOT: "{{ OPSROOT }}/com" - PACKAGEROOT: "{{ OPSROOT }}/packages" - DATAROOT: "{{ OPSROOT }}/tmp" - DCOMROOT: "{{ OPSROOT }}/dcom" - LOGDIR: "{{ OPSROOT }}/output" - COMIN_BASEDIR: "{{ COMROOT }}/{{ NET }}/{{ model_ver }}" - COMOUT_BASEDIR: "{{ COMROOT }}/{{ NET }}/{{ model_ver }}" + OPSROOT: '{{ workflow.EXPT_BASEDIR }}/../nco_dirs' + COMROOT: '{{ OPSROOT }}/com' + PACKAGEROOT: '{{ OPSROOT }}/packages' + DATAROOT: '{{ OPSROOT }}/tmp' + DCOMROOT: '{{ OPSROOT }}/dcom' + LOGDIR: '{{ OPSROOT }}/output' + EXTROOT: '{{ OPSROOT }}/ext' + COMIN_BASEDIR: '{{ COMROOT }}/{{ NET }}/{{ model_ver }}' + COMOUT_BASEDIR: '{{ COMROOT }}/{{ NET }}/{{ model_ver }}' # #----------------------------------------------------------------------- @@ -1044,7 +1045,7 @@ task_make_grid: # #----------------------------------------------------------------------- # - GRID_DIR: "{{ [workflow.EXPTDIR, 'grid']|path_join}}" + GRID_DIR: '{{ [workflow.EXPTDIR, "grid"]|path_join }}' # #----------------------------------------------------------------------- # @@ -1325,7 +1326,7 @@ task_make_orog: KMP_AFFINITY_MAKE_OROG: "disabled" OMP_NUM_THREADS_MAKE_OROG: 6 OMP_STACKSIZE_MAKE_OROG: "2048m" - OROG_DIR: "{{ [workflow.EXPTDIR, 'orog']|path_join }}" + OROG_DIR: '{{ [workflow.EXPTDIR, "orog"]|path_join }}' TOPO_DIR: "" #---------------------------- @@ -1340,7 +1341,7 @@ task_make_sfc_climo: KMP_AFFINITY_MAKE_SFC_CLIMO: "scatter" OMP_NUM_THREADS_MAKE_SFC_CLIMO: 1 OMP_STACKSIZE_MAKE_SFC_CLIMO: "1024m" - SFC_CLIMO_DIR: "{{ [workflow.EXPTDIR, 'sfc_climo']|path_join }}" + SFC_CLIMO_DIR: '{{ [workflow.EXPTDIR, "sfc_climo"]|path_join }}' SFC_CLIMO_INPUT_DIR: "" #---------------------------- @@ -1488,7 +1489,7 @@ task_get_extrn_lbcs: # EXTRN_MDL_NAME_LBCS: "FV3GFS" LBC_SPEC_INTVL_HRS: 6 - EXTRN_MDL_LBCS_OFFSET_HRS: "{{ 3 if EXTRN_MDL_NAME_LBCS == 'RAP' else 0 }}" + EXTRN_MDL_LBCS_OFFSET_HRS: '{{ 3 if EXTRN_MDL_NAME_LBCS == "RAP" else 0 }}' FV3GFS_FILE_FMT_LBCS: "nemsio" #----------------------------------------------------------------------- # @@ -1580,18 +1581,18 @@ task_make_lbcs: KMP_AFFINITY_MAKE_LBCS: "scatter" OMP_NUM_THREADS_MAKE_LBCS: 1 OMP_STACKSIZE_MAKE_LBCS: "1024m" - LBC_SPEC_FCST_HRS: "{% for h in range(task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS, task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS + workflow.FCST_LEN_HRS, task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS) %}{{ h }}{% endfor %}" + LBC_SPEC_FCST_HRS: '( {% for h in range(task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS, task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS + workflow.FCST_LEN_HRS, task_get_extrn_lbcs.LBC_SPEC_INTVL_HRS) %}{{ "%d " % h }}{% endfor %} )' #---------------------------- # FORECAST config parameters #----------------------------- task_run_fcst: RUN_FCST_TN: "run_fcst" - NNODES_RUN_FCST: "{{ (PE_MEMBER01 + PPN_RUN_FCST - 1) // PPN_RUN_FCST }}" - PPN_RUN_FCST: "{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_RUN_FCST }}" + NNODES_RUN_FCST: '{{ (PE_MEMBER01 + PPN_RUN_FCST - 1) // PPN_RUN_FCST }}' + PPN_RUN_FCST: '{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_RUN_FCST }}' WTIME_RUN_FCST: 04:30:00 MAXTRIES_RUN_FCST: 1 - FV3_EXEC_FP: '{{ [user.EXECdir, workflow.FV3_EXEC_FN]|path_join}}' + FV3_EXEC_FP: '{{ [user.EXECdir, workflow.FV3_EXEC_FN]|path_join }}' # #----------------------------------------------------------------------- # @@ -1691,9 +1692,9 @@ task_run_fcst: # #----------------------------------------------------------------------- # - LAYOUT_X: "{{ LAYOUT_X }}" - LAYOUT_Y: "{{ LAYOUT_Y }}" - BLOCKSIZE: "{{ BLOCKSIZE }}" + LAYOUT_X: '{{ LAYOUT_X }}' + LAYOUT_Y: '{{ LAYOUT_Y }}' + BLOCKSIZE: '{{ BLOCKSIZE }}' # #----------------------------------------------------------------------- # @@ -1796,8 +1797,8 @@ task_run_fcst: PE_MEMBER01: '{{ LAYOUT_Y * LAYOUT_X + WRTCMP_write_groups * WRTCMP_write_tasks_per_group if QUILTING else LAYOUT_Y * LAYOUT_X}}' - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 20 + WRTCMP_write_groups: "" + WRTCMP_write_tasks_per_group: "" WRTCMP_output_grid: "''" WRTCMP_cen_lon: "" @@ -1830,7 +1831,7 @@ task_run_fcst: # #----------------------------------------------------------------------- # - USE_MERRA_CLIMO: "{{ workflow.CCPP_PHYS_SUITE == 'FV3_GFS_v15_thompson_mynn_lam3km' }}" + USE_MERRA_CLIMO: '{{ workflow.CCPP_PHYS_SUITE == "FV3_GFS_v15_thompson_mynn_lam3km" }}' #---------------------------- # POST config parameters @@ -1905,7 +1906,7 @@ task_run_post: # USE_CUSTOM_POST_CONFIG_FILE: false CUSTOM_POST_CONFIG_FP: "" - POST_OUTPUT_DOMAIN_NAME: "{{ task_run_fcst.PREDEF_GRID_NAME }}" + POST_OUTPUT_DOMAIN_NAME: '{{ workflow.PREDEF_GRID_NAME }}' #---------------------------- # GET OBS CCPA config parameters @@ -2229,8 +2230,8 @@ global: # DO_ENSEMBLE: false NUM_ENS_MEMBERS: 0 - ENSMEM_NAMES: "{% for m in range(NUM_ENS_MEMBERS) %} 'mem%03d, ' % m {% endfor %}" - FV3_NML_ENSMEM_FPS: "{% for mem in ENSMEM_NAMES %}{{ [EXPTDIR, '%s_%s' % FV3_NML_FN, mem]|path_join }}{% endfor %}" + ENSMEM_NAMES: '{% for m in range(NUM_ENS_MEMBERS) %} "mem%03d, " % m {% endfor %}' + FV3_NML_ENSMEM_FPS: '{% for mem in ENSMEM_NAMES %}{{ [EXPTDIR, "%s_%s" % FV3_NML_FN, mem]|path_join }}{% endfor %}' # #----------------------------------------------------------------------- diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 25d8a99150..49c4aabe5e 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -241,6 +241,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> # TODO: Reference all these variables in their respective # dictionaries, instead. import_vars(dictionary=flatten_dict(expt_config)) + export_vars(source_dict=flatten_dict(expt_config)) if USE_CRON_TO_RELAUNCH: add_crontab_line() diff --git a/ush/python_utils/config_parser.py b/ush/python_utils/config_parser.py index c3e68bcdc8..54342c5786 100644 --- a/ush/python_utils/config_parser.py +++ b/ush/python_utils/config_parser.py @@ -577,6 +577,7 @@ def cfg_main(): args = parser.parse_args() cfg = load_config_file(args.cfg, 2) + if args.validate: cfg_t = load_config_file(args.validate, 1) r = check_structure_dict(cfg, cfg_t) diff --git a/ush/set_predef_grid_params.py b/ush/set_predef_grid_params.py index 949018f5ab..3b12f8a527 100644 --- a/ush/set_predef_grid_params.py +++ b/ush/set_predef_grid_params.py @@ -10,21 +10,20 @@ ) -def set_predef_grid_params(USHdir, fcst_config): +def set_predef_grid_params(USHdir, grid_name, quilting): """Sets grid parameters for the specified predfined grid Args: USHdir: path to the SRW ush directory - fcst_config: dict containing grid settings + grid_name str specifying the predefined grid name. + quilting: bool whether quiliting should be used for output Returns: Dictionary of grid parameters """ - predef_grid_name = fcst_config["PREDEF_GRID_NAME"] - quilting = fcst_config["QUILTING"] params_dict = load_config_file(os.path.join(USHdir, "predef_grid_params.yaml")) try: - params_dict = params_dict[predef_grid_name] + params_dict = params_dict[grid_name] except KeyError: errmsg = dedent( f""" diff --git a/ush/setup.py b/ush/setup.py index c6815118dc..cc931169ab 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -533,14 +533,22 @@ def get_location(xcs, fmt, expt_cfg): # Gather the pre-defined grid parameters, if needed fcst_config = expt_config["task_run_fcst"] grid_config = expt_config["task_make_grid"] - if fcst_config.get("PREDEF_GRID_NAME"): - grid_params = set_predef_grid_params(USHdir, fcst_config) + if workflow_config.get("PREDEF_GRID_NAME"): + grid_params = set_predef_grid_params( + USHdir, + workflow_config['PREDEF_GRID_NAME'], + fcst_config['QUILTING'], + ) # Users like to change these variables, so don't overwrite them special_vars = ["DT_ATMOS", "LAYOUT_X", "LAYOUT_Y", "BLOCKSIZE"] for param, value in grid_params.items(): if param in special_vars: - if fcst_config.get(param) and "{{" not in fcst_config.get(param): + param_val = fcst_config.get(param) + if param_val and isinstance(param_val, str) and \ + "{{" not in param_val: + continue + elif isinstance(param_val, (int, float)): continue else: fcst_config[param] = value @@ -551,6 +559,7 @@ def get_location(xcs, fmt, expt_cfg): else: grid_config[param] = value + run_envir = expt_config["user"].get("RUN_ENVIR", "") # # ----------------------------------------------------------------------- # @@ -870,7 +879,7 @@ def get_location(xcs, fmt, expt_cfg): ) # Make sure the post output domain is set - predef_grid_name = fcst_config.get("PREDEF_GRID_NAME") + predef_grid_name = workflow_config.get("PREDEF_GRID_NAME") post_output_domain_name = post_config.get("POST_OUTPUT_DOMAIN_NAME") if not post_output_domain_name: @@ -896,7 +905,6 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # - run_envir = expt_config["user"].get("RUN_ENVIR", "") # These NCO variables need to be set based on the user's specificed # run environment. The default is set in config_defaults for nco. If @@ -919,6 +927,8 @@ def get_location(xcs, fmt, expt_cfg): for nco_var in nco_vars: nco_config[nco_var.upper()] = exptdir + nco_config["LOGDIR"] = os.path.join(exptdir, "log") + # create NCO directories if run_envir == "nco": mkdir_vrfy(f' -p "{nco_config.get("OPSROOT")}"') @@ -1087,7 +1097,7 @@ def get_location(xcs, fmt, expt_cfg): target_dir=workflow_config["FIXlam"], ccpp_phys_suite=workflow_config["CCPP_PHYS_SUITE"], constants=expt_config["constants"], - dot_or_underscore=workflow_config["DOT_OR_USCORE"], + dot_or_uscore=workflow_config["DOT_OR_USCORE"], nhw=grid_params["NHW"], run_task=False, sfc_climo_fields=fixed_files["SFC_CLIMO_FIELDS"], @@ -1197,10 +1207,10 @@ def get_location(xcs, fmt, expt_cfg): workflow_config["SDF_USES_THOMPSON_MP"] = use_thompson if use_thompson: - fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].append( + fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].extend( mapping ) - fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"].append(fix_files) + fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"].extend(fix_files) log_info( f""" @@ -1212,7 +1222,7 @@ def get_location(xcs, fmt, expt_cfg): CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING. After these modifications, the values of these parameters are as follows: - CCPP_PHYS_SUITE = \"{CCPP_PHYS_SUITE}\" + CCPP_PHYS_SUITE = \"{workflow_config["CCPP_PHYS_SUITE"]}\" """ ) log_info( From 4cec6f0581e23eac902282ba69c31e495eb168fa Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 2 Dec 2022 19:31:29 +0000 Subject: [PATCH 07/19] Cleanup mods found during self review. --- scripts/exregional_make_lbcs.sh | 3 --- tests/WE2E/machine_suites/fundamental | 9 +++++++++ tests/WE2E/run_WE2E_tests.sh | 2 -- ush/bash_utils/source_config.sh | 1 - ush/python_utils/config_parser.py | 1 - ush/setup.py | 29 ++++++++++++++------------- 6 files changed, 24 insertions(+), 21 deletions(-) create mode 100644 tests/WE2E/machine_suites/fundamental diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index adcf1bd606..53fefc112d 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -8,7 +8,6 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -set -x source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -66,8 +65,6 @@ export OMP_STACKSIZE=${OMP_STACKSIZE_MAKE_LBCS} # eval ${PRE_TASK_CMDS} -set -x - nprocs=$(( NNODES_MAKE_LBCS*PPN_MAKE_LBCS )) if [ -z "${RUN_CMD_UTILS:-}" ] ; then diff --git a/tests/WE2E/machine_suites/fundamental b/tests/WE2E/machine_suites/fundamental new file mode 100644 index 0000000000..0887e6c58e --- /dev/null +++ b/tests/WE2E/machine_suites/fundamental @@ -0,0 +1,9 @@ +grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2 +grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 +grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_HRRR +grid_RRFS_CONUS_25km_ics_GSMGFS_lbcs_GSMGFS_suite_GFS_v15p2 +grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR +grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR +grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR diff --git a/tests/WE2E/run_WE2E_tests.sh b/tests/WE2E/run_WE2E_tests.sh index ae4ed89dca..cfcf6e8c39 100755 --- a/tests/WE2E/run_WE2E_tests.sh +++ b/tests/WE2E/run_WE2E_tests.sh @@ -797,14 +797,12 @@ Please correct and rerun." # #----------------------------------------------------------------------- # - set -x save_USHdir=${USHdir} source_config ${USHdir}/config_defaults.yaml USHdir=${save_USHdir} MACHINE_FILE=${machine_file:-"${USHdir}/machine/${machine,,}.yaml"} source_config ${MACHINE_FILE} source_config ${test_config_fp} - # #----------------------------------------------------------------------- # diff --git a/ush/bash_utils/source_config.sh b/ush/bash_utils/source_config.sh index 9d0d6cc5c0..df5a79a0df 100644 --- a/ush/bash_utils/source_config.sh +++ b/ush/bash_utils/source_config.sh @@ -6,7 +6,6 @@ # function config_to_str() { - set -x $USHdir/config_utils.py -o $1 -c $2 "${@:3}" } diff --git a/ush/python_utils/config_parser.py b/ush/python_utils/config_parser.py index 54342c5786..c3e68bcdc8 100644 --- a/ush/python_utils/config_parser.py +++ b/ush/python_utils/config_parser.py @@ -577,7 +577,6 @@ def cfg_main(): args = parser.parse_args() cfg = load_config_file(args.cfg, 2) - if args.validate: cfg_t = load_config_file(args.validate, 1) r = check_structure_dict(cfg, cfg_t) diff --git a/ush/setup.py b/ush/setup.py index 30dd86d258..79668a513a 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -391,22 +391,22 @@ def setup(USHdir, user_config_fn="config.yaml"): except ValueError: logger.exception( f""" - Check that the following values are valid: - EXPTDIR {exptdir} - PREEXISTING_DIR_METHOD {preexisting_dir_method} - """ + Check that the following values are valid: + EXPTDIR {exptdir} + PREEXISTING_DIR_METHOD {preexisting_dir_method} + """ ) raise except FileExistsError: errmsg = dedent( f""" - EXPTDIR ({exptdir}) already exists, and PREEXISTING_DIR_METHOD = {preexisting_dir_method} + EXPTDIR ({exptdir}) already exists, and PREEXISTING_DIR_METHOD = {preexisting_dir_method} - To ignore this error, delete the directory, or set - PREEXISTING_DIR_METHOD = delete, or - PREEXISTING_DIR_METHOD = rename - in your config file. - """ + To ignore this error, delete the directory, or set + PREEXISTING_DIR_METHOD = delete, or + PREEXISTING_DIR_METHOD = rename + in your config file. + """ ) raise FileExistsError(errmsg) from None @@ -938,6 +938,7 @@ def get_location(xcs, fmt, expt_cfg): mkdir_vrfy(f' -p "{nco_config.get("DATAROOT")}"') mkdir_vrfy(f' -p "{nco_config.get("DCOMROOT")}"') mkdir_vrfy(f' -p "{nco_config.get("LOGDIR")}"') + mkdir_vrfy(f' -p "{nco_config.get("EXTROOT")}"') if nco_config["DBNROOT"]: mkdir_vrfy(f' -p "{nco_config["DBNROOT"]}"') @@ -986,20 +987,20 @@ def get_location(xcs, fmt, expt_cfg): ccpp_phys_suite_in_ccpp_fp = workflow_config["CCPP_PHYS_SUITE_IN_CCPP_FP"] if not os.path.exists(ccpp_phys_suite_in_ccpp_fp): raise FileNotFoundError( - f''' + f""" The CCPP suite definition file (CCPP_PHYS_SUITE_IN_CCPP_FP) does not exist in the local clone of the ufs-weather-model: - CCPP_PHYS_SUITE_IN_CCPP_FP = \"{ccpp_phys_suite_in_ccpp_fp}\"''' + CCPP_PHYS_SUITE_IN_CCPP_FP = '{ccpp_phys_suite_in_ccpp_fp}'""" ) # Check for the field dict file field_dict_in_uwm_fp = workflow_config["FIELD_DICT_IN_UWM_FP"] if not os.path.exists(field_dict_in_uwm_fp): raise FileNotFoundError( - f''' + f""" The field dictionary file (FIELD_DICT_IN_UWM_FP) does not exist in the local clone of the ufs-weather-model: - FIELD_DICT_IN_UWM_FP = \"{field_dict_in_uwm_fp}\"''' + FIELD_DICT_IN_UWM_FP = '{field_dict_in_uwm_fp}'""" ) From 29272d54ec4880ebb34485b235e683477f11f865 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 2 Dec 2022 19:36:19 +0000 Subject: [PATCH 08/19] Fix ozone test failure. --- ush/set_ozone_param.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/ush/set_ozone_param.py b/ush/set_ozone_param.py index 7fc146105f..f1b30a239f 100644 --- a/ush/set_ozone_param.py +++ b/ush/set_ozone_param.py @@ -134,13 +134,11 @@ def set_ozone_param(ccpp_phys_suite_fp, link_mappings): class Testing(unittest.TestCase): def test_set_ozone_param(self): USHdir = os.path.dirname(os.path.abspath(__file__)) - self.assertEqual( - "ozphys_2015", - set_ozone_param( - f"{USHdir}{os.sep}test_data{os.sep}suite_FV3_GSD_SAR.xml", - self.CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING, - ), + ozone_param, _, _ = set_ozone_param( + f"{USHdir}{os.sep}test_data{os.sep}suite_FV3_GSD_SAR.xml", + self.CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING, ) + self.assertEqual("ozphys_2015", ozone_param) def setUp(self): self.CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = [ From db4e7eb00f317b6d1a360fb9f1cb600da93b1a42 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 2 Dec 2022 20:27:03 +0000 Subject: [PATCH 09/19] Pass unit tests --- ush/calculate_cost.py | 33 +++++++++++++++++++-------------- ush/set_gridparams_GFDLgrid.py | 2 +- ush/set_predef_grid_params.py | 6 ++++-- 3 files changed, 24 insertions(+), 17 deletions(-) diff --git a/ush/calculate_cost.py b/ush/calculate_cost.py index 1e0b52a204..1abe729545 100755 --- a/ush/calculate_cost.py +++ b/ush/calculate_cost.py @@ -31,17 +31,19 @@ def calculate_cost(config_fn): ] import_vars(env_vars=IMPORTS) + ushdir = os.path.dirname(os.path.abspath(__file__)) + # get grid config parameters (predefined or custom) if PREDEF_GRID_NAME: QUILTING = False params_dict = set_predef_grid_params( - PREDEF_GRID_NAME, - QUILTING, - DT_ATMOS, - LAYOUT_X, - LAYOUT_Y, - BLOCKSIZE, + USHdir=ushdir, + grid_name=PREDEF_GRID_NAME, + quilting=QUILTING, ) + for param, value in params_dict.items(): + if param in IMPORTS and globals()[param] is not None: + params_dict[param] = globals()[param] import_vars(dictionary=params_dict) else: cfg_u = load_config_file(config_fn) @@ -60,11 +62,13 @@ def calculate_cost(config_fn): iend_of_t7_on_t6g=GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G, jstart_of_t7_on_t6g=GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G, jend_of_t7_on_t6g=GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G, - RUN_ENVIR="community", - VERBOSE=False, + run_envir="community", + verbose=False, + nh4=4, ) elif GRID_GEN_METHOD == "ESGgrid": + constants = load_config_file(os.path.join(ushdir, "constants.yaml")) grid_params = set_gridparams_ESGgrid( lon_ctr=ESGgrid_LON_CTR, lat_ctr=ESGgrid_LAT_CTR, @@ -74,6 +78,7 @@ def calculate_cost(config_fn): halo_width=ESGgrid_WIDE_HALO_WIDTH, delx=ESGgrid_DELX, dely=ESGgrid_DELY, + constants=constants["constants"], ) NX = grid_params["NX"] @@ -84,13 +89,13 @@ def calculate_cost(config_fn): PREDEF_GRID_NAME = "RRFS_CONUS_25km" params_dict = set_predef_grid_params( - PREDEF_GRID_NAME, - QUILTING, - DT_ATMOS, - LAYOUT_X, - LAYOUT_Y, - BLOCKSIZE, + USHdir=os.path.dirname(os.path.abspath(__file__)), + grid_name=PREDEF_GRID_NAME, + quilting=QUILTING, ) + for param, value in params_dict.items(): + if param in IMPORTS and globals()[param] is not None: + params_dict[param] = globals()[param] import_vars(dictionary=params_dict) cost.extend([DT_ATMOS, ESGgrid_NX * ESGgrid_NY]) diff --git a/ush/set_gridparams_GFDLgrid.py b/ush/set_gridparams_GFDLgrid.py index 838c2f550e..c8a0165ec4 100644 --- a/ush/set_gridparams_GFDLgrid.py +++ b/ush/set_gridparams_GFDLgrid.py @@ -491,7 +491,7 @@ def test_set_gridparams_GFDLgrid(self): iend_of_t7_on_t6g=84, jstart_of_t7_on_t6g=17, jend_of_t7_on_t6g=80, - run_env="community", + run_envir="community", verbose=True, nh4=4, ) diff --git a/ush/set_predef_grid_params.py b/ush/set_predef_grid_params.py index 3b12f8a527..535f17ed24 100644 --- a/ush/set_predef_grid_params.py +++ b/ush/set_predef_grid_params.py @@ -54,7 +54,8 @@ def test_set_predef_grid_params(self): ) params_dict = set_predef_grid_params( ushdir, - fcst_config, + fcst_config["PREDEF_GRID_NAME"], + fcst_config["QUILTING"], ) self.assertEqual(params_dict["GRID_GEN_METHOD"], "ESGgrid") self.assertEqual(params_dict["ESGgrid_LON_CTR"], -97.5) @@ -68,6 +69,7 @@ def test_set_predef_grid_params(self): ) params_dict = set_predef_grid_params( ushdir, - fcst_config, + fcst_config["PREDEF_GRID_NAME"], + fcst_config["QUILTING"], ) self.assertEqual(params_dict["WRTCMP_nx"], 1799) From 24bf06e46d2fa170bad61f77c2b3de6d56b09a8e Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 5 Dec 2022 16:33:49 +0000 Subject: [PATCH 10/19] Fix NCO test --- ush/config.nco.yaml | 1 - ush/config_defaults.yaml | 2 +- ush/generate_FV3LAM_wflow.py | 28 +++++++++++++++++++++------- ush/setup.py | 4 +++- 4 files changed, 25 insertions(+), 10 deletions(-) diff --git a/ush/config.nco.yaml b/ush/config.nco.yaml index 26171f2f3d..9640b1a4a4 100644 --- a/ush/config.nco.yaml +++ b/ush/config.nco.yaml @@ -25,7 +25,6 @@ nco: NET: rrfs model_ver: v1.0 RUN: rrfs_test - OPSROOT: "" task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: grib2 diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 7d0388d59e..aaa63ada50 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1050,7 +1050,7 @@ task_make_grid: # #----------------------------------------------------------------------- # - GRID_DIR: '{{ [workflow.EXPTDIR, "grid"]|path_join }}' + GRID_DIR: '{{ [workflow.EXPTDIR, "grid"]|path_join if workflow_switches.RUN_TASK_MAKE_GRID else [platform.DOMAIN_PREGEN_BASEDIR, workflow.PREDEF_GRID_NAME]|path_join}}' # #----------------------------------------------------------------------- # diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 49c4aabe5e..dde451ed55 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -13,6 +13,8 @@ log_info, import_vars, export_vars, + load_config_file, + update_dict, cp_vrfy, ln_vrfy, mkdir_vrfy, @@ -67,6 +69,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> # non-user-specified values from config_defaults.yaml expt_config = setup(ushdir) + verbose = expt_config['workflow']['VERBOSE'] # # ----------------------------------------------------------------------- # @@ -160,8 +163,6 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> # Log "settings" variable. settings_str = cfg_to_yaml_str(settings) - verbose = expt_config['workflow']['VERBOSE'] - log_info( f""" The variable 'settings' specifying values of the rococo XML variables @@ -812,11 +813,24 @@ def run_workflow(USHdir, logfile): run_workflow(USHdir, logfile) # nco test case - set_env_var("OPSROOT", f"{USHdir}/../../nco_dirs") - cp_vrfy(f"{USHdir}/config.nco.yaml", f"{USHdir}/config.yaml") - run_command( - f"""{SED} -i 's/MACHINE: hera/MACHINE: linux/g' {USHdir}/config.yaml""" - ) + nco_test_config = load_config_file(f"{USHdir}/config.nco.yaml") + # Since we don't have a pre-gen grid dir on a generic linux + # platform, turn the make_* tasks on for this test. + cfg_updates = { + 'user': { + 'MACHINE': 'linux', + }, + 'workflow_switches': { + 'RUN_TASK_MAKE_GRID': True, + 'RUN_TASK_MAKE_OROG': True, + 'RUN_TASK_MAKE_SFC_CLIMO': True, + }, + } + update_dict(cfg_updates, nco_test_config) + + with open(f"{USHdir}/config.yaml", 'w') as cfg_file: + cfg_file.write(cfg_to_yaml_str(nco_test_config)) + run_workflow(USHdir, logfile) def setUp(self): diff --git a/ush/setup.py b/ush/setup.py index 79668a513a..5ce0c8041d 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -102,7 +102,9 @@ def load_config_for_setup(ushdir, default_config, user_config): ) # Load the machine config file - machine = cfg_u.get("user").get("MACHINE") + machine = uppercase(cfg_u.get("user").get("MACHINE")) + cfg_u["user"]["MACHINE"] = uppercase(machine) + machine_file = os.path.join(ushdir, "machine", f"{lowercase(machine)}.yaml") if not os.path.exists(machine_file): From 884e0b10e7ebd1e001b597796bde9b7fcc02f96f Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Thu, 8 Dec 2022 17:04:47 +0000 Subject: [PATCH 11/19] Minor changes from review comments. --- tests/WE2E/run_WE2E_tests.sh | 3 +++ ush/config_defaults.yaml | 1 - ush/constants.yaml | 2 +- ush/set_predef_grid_params.py | 2 +- ush/setup.py | 23 ++++++----------------- 5 files changed, 11 insertions(+), 20 deletions(-) diff --git a/tests/WE2E/run_WE2E_tests.sh b/tests/WE2E/run_WE2E_tests.sh index cfcf6e8c39..1e03b908e8 100755 --- a/tests/WE2E/run_WE2E_tests.sh +++ b/tests/WE2E/run_WE2E_tests.sh @@ -797,6 +797,9 @@ Please correct and rerun." # #----------------------------------------------------------------------- # + + # Save the environment variable since a default will override when + # sourced. save_USHdir=${USHdir} source_config ${USHdir}/config_defaults.yaml USHdir=${save_USHdir} diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 090d32730f..f8f6a93279 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -158,7 +158,6 @@ platform: # WORKFLOW_MANAGER: "" NCORES_PER_NODE: "" - LMOD_PATH: "" BUILD_MOD_FN: 'build_{{ user.MACHINE|lower() }}_{{ workflow.COMPILER }}' WFLOW_MOD_FN: 'wflow_{{ user.MACHINE|lower() }}' BUILD_VER_FN: 'build.ver.{{ user.MACHINE|lower() }}' diff --git a/ush/constants.yaml b/ush/constants.yaml index c7ee96005e..c4a3fa9d67 100644 --- a/ush/constants.yaml +++ b/ush/constants.yaml @@ -37,7 +37,7 @@ constants: # Grid type string, set to regional for SRW # # TILE_RGNL: 7 - # Tile number set ot 7 for a regional grid in SRW + # Tile number set to 7 for a regional grid in SRW # # We will let: # diff --git a/ush/set_predef_grid_params.py b/ush/set_predef_grid_params.py index 535f17ed24..35096cefd3 100644 --- a/ush/set_predef_grid_params.py +++ b/ush/set_predef_grid_params.py @@ -16,7 +16,7 @@ def set_predef_grid_params(USHdir, grid_name, quilting): Args: USHdir: path to the SRW ush directory grid_name str specifying the predefined grid name. - quilting: bool whether quiliting should be used for output + quilting: bool whether quilting should be used for output Returns: Dictionary of grid parameters """ diff --git a/ush/setup.py b/ush/setup.py index 5ce0c8041d..310df56ee3 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -218,10 +218,10 @@ def set_srw_paths(ushdir, expt_config): """ # HOMEdir is the location of the SRW clone, one directory above ush/ - home_dir = os.path.abspath(os.path.dirname(__file__) + os.sep + os.pardir) + homedir = os.path.abspath(os.path.dirname(__file__) + os.sep + os.pardir) # Read Externals.cfg - mng_extrns_cfg_fn = os.path.join(home_dir, "Externals.cfg") + mng_extrns_cfg_fn = os.path.join(homedir, "Externals.cfg") try: mng_extrns_cfg_fn = os.readlink(mng_extrns_cfg_fn) except: @@ -243,7 +243,7 @@ def set_srw_paths(ushdir, expt_config): raise Exception(errmsg) from None # Check that the model code has been downloaded - ufs_wthr_mdl_dir = os.path.join(home_dir, ufs_wthr_mdl_dir) + ufs_wthr_mdl_dir = os.path.join(homedir, ufs_wthr_mdl_dir) if not os.path.exists(ufs_wthr_mdl_dir): raise FileNotFoundError( dedent( @@ -257,7 +257,7 @@ def set_srw_paths(ushdir, expt_config): ) return dict( - HOMEdir=home_dir, + HOMEdir=homedir, USHdir=ushdir, UFS_WTHR_MDL_DIR=ufs_wthr_mdl_dir, ) @@ -360,18 +360,17 @@ def setup(USHdir, user_config_fn="config.yaml"): # ----------------------------------------------------------------------- # expt_basedir = workflow_config.get("EXPT_BASEDIR") - home_dir = expt_config['user'].get("HOMEdir") + homedir = expt_config['user'].get("HOMEdir") if (not expt_basedir) or (expt_basedir[0] != "/"): if not expt_basedir or "{{" in expt_basedir: expt_basedir = "" - expt_basedir = os.path.join(home_dir, "..", "expt_dirs", expt_basedir) + expt_basedir = os.path.join(homedir, "..", "expt_dirs", expt_basedir) try: expt_basedir = os.path.realpath(expt_basedir) except: pass expt_basedir = os.path.abspath(expt_basedir) - #mkdir_vrfy(f' -p "{expt_basedir}"') workflow_config["EXPT_BASEDIR"] = expt_basedir # Update some paths that include EXPT_BASEDIR @@ -738,16 +737,6 @@ def get_location(xcs, fmt, expt_cfg): """ ) - # Make sure RESTART_INTERVAL is set to an integer value - restart_interval = fcst_config.get("RESTART_INTERVAL") - if not isinstance(restart_interval, int): - try: - fcst_config["RESTART_INTERVAL"] = int(restart_interval) - except ValueError: - raise ValueError( - f"\nRESTART_INTERVAL = {restart_interval}, must be an integer value\n" - ) - # Check whether the forecast length (FCST_LEN_HRS) is evenly divisible # by the BC update interval (LBC_SPEC_INTVL_HRS). If so, generate an # array of forecast hours at which the boundary values will be updated. From a3ae539326f7f11fe4f8683674756ec428b31d10 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Thu, 8 Dec 2022 18:31:55 +0000 Subject: [PATCH 12/19] Run black on files I modified. --- ush/generate_FV3LAM_wflow.py | 42 +++++----- ush/link_fix.py | 4 +- ush/python_utils/config_parser.py | 125 ++++++++++++++++-------------- ush/setup.py | 28 +++---- 4 files changed, 99 insertions(+), 100 deletions(-) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index dde451ed55..6a18b5026b 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -69,7 +69,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> # non-user-specified values from config_defaults.yaml expt_config = setup(ushdir) - verbose = expt_config['workflow']['VERBOSE'] + verbose = expt_config["workflow"]["VERBOSE"] # # ----------------------------------------------------------------------- # @@ -125,19 +125,18 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> uscore_ensmem_name = f"_mem#{ensmem_indx_name}#" slash_ensmem_subdir = f"/mem#{ensmem_indx_name}#" - - dt_atmos = expt_config['task_run_fcst']['DT_ATMOS'] - date_first_cycl = expt_config['workflow']['DATE_FIRST_CYCL'] - date_last_cycl = expt_config['workflow']['DATE_LAST_CYCL'] + dt_atmos = expt_config["task_run_fcst"]["DT_ATMOS"] + date_first_cycl = expt_config["workflow"]["DATE_FIRST_CYCL"] + date_last_cycl = expt_config["workflow"]["DATE_LAST_CYCL"] first_file_time = date_first_cycl + timedelta(seconds=dt_atmos) - fcst_threads = expt_config['task_run_fcst']['OMP_NUM_THREADS_RUN_FCST'] + fcst_threads = expt_config["task_run_fcst"]["OMP_NUM_THREADS_RUN_FCST"] settings.update( { # # Number of cores used for a task # - "ncores_run_fcst": expt_config['task_run_fcst']['PE_MEMBER01'], + "ncores_run_fcst": expt_config["task_run_fcst"]["PE_MEMBER01"], "native_run_fcst": f"--cpus-per-task {fcst_threads} --exclusive", # # Parameters that determine the set of cycles to run. @@ -155,7 +154,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> # # Parameters associated with subhourly post-processed output # - "delta_min": expt_config['task_run_post']['DT_SUBHOURLY_POST_MNTS'], + "delta_min": expt_config["task_run_post"]["DT_SUBHOURLY_POST_MNTS"], "first_fv3_file_tstr": first_file_time.strftime("000:%M:%S"), } ) @@ -192,7 +191,8 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> + settings_str ) raise Exception( - dedent(f""" + dedent( + f""" Call to python script fill_jinja_template.py to create a rocoto workflow XML file from a template file failed. Parameters passed to this script are: @@ -211,9 +211,9 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> # # ----------------------------------------------------------------------- # - exptdir = expt_config['workflow']['EXPTDIR'] - wflow_launch_script_fp = expt_config['workflow']['WFLOW_LAUNCH_SCRIPT_FP'] - wflow_launch_script_fn = expt_config['workflow']['WFLOW_LAUNCH_SCRIPT_FN'] + exptdir = expt_config["workflow"]["EXPTDIR"] + wflow_launch_script_fp = expt_config["workflow"]["WFLOW_LAUNCH_SCRIPT_FP"] + wflow_launch_script_fn = expt_config["workflow"]["WFLOW_LAUNCH_SCRIPT_FN"] log_info( f""" Creating symlink in the experiment directory (EXPTDIR) that points to the @@ -224,9 +224,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> ) create_symlink_to_file( - wflow_launch_script_fp, - os.path.join(exptdir, wflow_launch_script_fn), - False + wflow_launch_script_fp, os.path.join(exptdir, wflow_launch_script_fn), False ) # # ----------------------------------------------------------------------- @@ -817,18 +815,18 @@ def run_workflow(USHdir, logfile): # Since we don't have a pre-gen grid dir on a generic linux # platform, turn the make_* tasks on for this test. cfg_updates = { - 'user': { - 'MACHINE': 'linux', + "user": { + "MACHINE": "linux", }, - 'workflow_switches': { - 'RUN_TASK_MAKE_GRID': True, - 'RUN_TASK_MAKE_OROG': True, - 'RUN_TASK_MAKE_SFC_CLIMO': True, + "workflow_switches": { + "RUN_TASK_MAKE_GRID": True, + "RUN_TASK_MAKE_OROG": True, + "RUN_TASK_MAKE_SFC_CLIMO": True, }, } update_dict(cfg_updates, nco_test_config) - with open(f"{USHdir}/config.yaml", 'w') as cfg_file: + with open(f"{USHdir}/config.yaml", "w") as cfg_file: cfg_file.write(cfg_to_yaml_str(nco_test_config)) run_workflow(USHdir, logfile) diff --git a/ush/link_fix.py b/ush/link_fix.py index 44f73d2f73..eaea31e61c 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -401,7 +401,9 @@ def parse_args(argv): link_fix( verbose=cfg["workflow"]["VERBOSE"], file_group=args.file_group, - source_dir=cfg[f"task_make_{args.file_group.lower()}"][f"{args.file_group.upper()}_DIR"], + source_dir=cfg[f"task_make_{args.file_group.lower()}"][ + f"{args.file_group.upper()}_DIR" + ], target_dir=cfg["workflow"]["FIXlam"], ccpp_phys_suite=cfg["workflow"]["CCPP_PHYS_SUITE"], constants=cfg["constants"], diff --git a/ush/python_utils/config_parser.py b/ush/python_utils/config_parser.py index c3e68bcdc8..cb73879d24 100644 --- a/ush/python_utils/config_parser.py +++ b/ush/python_utils/config_parser.py @@ -90,17 +90,19 @@ def join_str(loader, node): except NameError: pass + def path_join(arg): - """ A filter for jinja2 that joins paths """ + """A filter for jinja2 that joins paths""" return os.path.join(*arg) + def extend_yaml(yaml_dict, full_dict=None): - ''' + """ Updates yaml_dict inplace by rendering any existing Jinja2 templates that exist in a value. - ''' + """ if full_dict is None: full_dict = yaml_dict @@ -114,60 +116,61 @@ def extend_yaml(yaml_dict, full_dict=None): extend_yaml(v, full_dict) else: - # Save a bit of compute and only do this part for strings that - # contain the jinja double brackets. - v_str = str(v.text) if isinstance(v, ET.Element) else str(v) - is_a_template = any((ele for ele in ['{{', '{%'] if ele in v_str)) - if is_a_template: - - # Find expressions first, and process them as a single template - # if they exist - # Find individual double curly brace template in the string - # otherwise. We need one substitution template at a time so that - # we can opt to leave some un-filled when they are not yet set. - # For example, we can save cycle-dependent templates to fill in - # at run time. - if '{%' in v: - templates = [v_str] - else: - # Separates out all the double curly bracket pairs - templates = re.findall(r'{{[^}]*}}|\S', v_str) - data = [] - for template in templates: - if len(template) > 1: - print(template) - j2env = jinja2.Environment(loader=jinja2.BaseLoader, - undefined=jinja2.StrictUndefined) - j2env.filters['path_join'] = path_join - j2tmpl = j2env.from_string(template) - try: - # Fill in a template that has the appropriate variables - # set. - template = j2tmpl.render(**yaml_dict, **full_dict) - except jinja2.exceptions.UndefinedError as e: - # Leave a templated field as-is in the resulting dict - #print(f'Error: {e}') - print(f'Preserved template: {k}: {template}') - #for a, b in full_dict.items(): - # print(f' {a}: {b}') - pass - except TypeError: - print(f'Preserved template: {k}: {template}') - except ZeroDivisionError: - print(f'Preserved template: {k}: {template}') - except: - print(f'{k}: {template}') - raise - - data.append(template) - - if isinstance(v, ET.Element): - v.text = ''.join(data) - else: - # Put the full template line back together as it was, - # filled or not - yaml_dict[k] = ''.join(data) - print(f" {k}: {yaml_dict[k]}") + # Save a bit of compute and only do this part for strings that + # contain the jinja double brackets. + v_str = str(v.text) if isinstance(v, ET.Element) else str(v) + is_a_template = any((ele for ele in ["{{", "{%"] if ele in v_str)) + if is_a_template: + + # Find expressions first, and process them as a single template + # if they exist + # Find individual double curly brace template in the string + # otherwise. We need one substitution template at a time so that + # we can opt to leave some un-filled when they are not yet set. + # For example, we can save cycle-dependent templates to fill in + # at run time. + if "{%" in v: + templates = [v_str] + else: + # Separates out all the double curly bracket pairs + templates = re.findall(r"{{[^}]*}}|\S", v_str) + data = [] + for template in templates: + if len(template) > 1: + print(template) + j2env = jinja2.Environment( + loader=jinja2.BaseLoader, undefined=jinja2.StrictUndefined + ) + j2env.filters["path_join"] = path_join + j2tmpl = j2env.from_string(template) + try: + # Fill in a template that has the appropriate variables + # set. + template = j2tmpl.render(**yaml_dict, **full_dict) + except jinja2.exceptions.UndefinedError as e: + # Leave a templated field as-is in the resulting dict + # print(f'Error: {e}') + print(f"Preserved template: {k}: {template}") + # for a, b in full_dict.items(): + # print(f' {a}: {b}') + pass + except TypeError: + print(f"Preserved template: {k}: {template}") + except ZeroDivisionError: + print(f"Preserved template: {k}: {template}") + except: + print(f"{k}: {template}") + raise + + data.append(template) + + if isinstance(v, ET.Element): + v.text = "".join(data) + else: + # Put the full template line back together as it was, + # filled or not + yaml_dict[k] = "".join(data) + print(f" {k}: {yaml_dict[k]}") ########## @@ -460,8 +463,12 @@ def update_dict(dict_o, dict_t, provide_default=False): else: dict_t[k] = v elif k in dict_t.keys(): - if (not provide_default) or (dict_t[k] is None) or \ - (len(dict_t[k]) == 0) or ("{{" in dict_t[k]): + if ( + (not provide_default) + or (dict_t[k] is None) + or (len(dict_t[k]) == 0) + or ("{{" in dict_t[k]) + ): dict_t[k] = v diff --git a/ush/setup.py b/ush/setup.py index 310df56ee3..af0d97a45a 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -120,8 +120,7 @@ def load_config_for_setup(ushdir, default_config, user_config): # Load the fixed files configuration cfg_f = load_config_file( - os.path.join(ushdir, os.pardir, "parm", - "fixed_files_mapping.yaml") + os.path.join(ushdir, os.pardir, "parm", "fixed_files_mapping.yaml") ) # Load the constants file @@ -153,7 +152,7 @@ def load_config_for_setup(ushdir, default_config, user_config): if not (v is None or v == ""): cfg_d[sect][k] = str_to_list(v) - for k, v in cfg_d['task_run_fcst'].items(): + for k, v in cfg_d["task_run_fcst"].items(): print(f"*** {k}: {v}") # Mandatory variables *must* be set in the user's config or the machine file; the default value is invalid @@ -194,7 +193,6 @@ def load_config_for_setup(ushdir, default_config, user_config): ) ) - return cfg_d @@ -360,7 +358,7 @@ def setup(USHdir, user_config_fn="config.yaml"): # ----------------------------------------------------------------------- # expt_basedir = workflow_config.get("EXPT_BASEDIR") - homedir = expt_config['user'].get("HOMEdir") + homedir = expt_config["user"].get("HOMEdir") if (not expt_basedir) or (expt_basedir[0] != "/"): if not expt_basedir or "{{" in expt_basedir: expt_basedir = "" @@ -537,18 +535,17 @@ def get_location(xcs, fmt, expt_cfg): grid_config = expt_config["task_make_grid"] if workflow_config.get("PREDEF_GRID_NAME"): grid_params = set_predef_grid_params( - USHdir, - workflow_config['PREDEF_GRID_NAME'], - fcst_config['QUILTING'], - ) + USHdir, + workflow_config["PREDEF_GRID_NAME"], + fcst_config["QUILTING"], + ) # Users like to change these variables, so don't overwrite them special_vars = ["DT_ATMOS", "LAYOUT_X", "LAYOUT_Y", "BLOCKSIZE"] for param, value in grid_params.items(): if param in special_vars: param_val = fcst_config.get(param) - if param_val and isinstance(param_val, str) and \ - "{{" not in param_val: + if param_val and isinstance(param_val, str) and "{{" not in param_val: continue elif isinstance(param_val, (int, float)): continue @@ -897,7 +894,6 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # - # These NCO variables need to be set based on the user's specificed # run environment. The default is set in config_defaults for nco. If # running in community mode, we set these paths to the experiment @@ -994,7 +990,6 @@ def get_location(xcs, fmt, expt_cfg): FIELD_DICT_IN_UWM_FP = '{field_dict_in_uwm_fp}'""" ) - fixed_files = expt_config["fixed_files"] # Set the appropriate ozone production/loss file paths and symlinks ozone_param, fixgsm_ozone_fn, ozone_link_mappings = set_ozone_param( @@ -1200,9 +1195,7 @@ def get_location(xcs, fmt, expt_cfg): workflow_config["SDF_USES_THOMPSON_MP"] = use_thompson if use_thompson: - fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].extend( - mapping - ) + fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].extend(mapping) fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"].extend(fix_files) log_info( @@ -1239,7 +1232,6 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # - extend_yaml(expt_config) for sect, sect_keys in expt_config.items(): for k, v in sect_keys.items(): @@ -1274,7 +1266,7 @@ def get_location(xcs, fmt, expt_cfg): # loop through the flattened expt_config and check validity of params cfg_v = load_config_file("valid_param_vals.yaml") for k, v in flatten_dict(expt_config).items(): - if v is None or v == '': + if v is None or v == "": continue vkey = "valid_vals_" + k if (vkey in cfg_v) and not (v in cfg_v[vkey]): From 946df717d69db19ef9f8d792b9c23f853047c701 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Thu, 8 Dec 2022 19:46:20 +0000 Subject: [PATCH 13/19] LOGDIR can't be in var_defns.sh to avoid namespace collision. --- parm/FV3LAM_wflow.xml | 4 ++-- ush/config_defaults.yaml | 4 ++-- ush/setup.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/parm/FV3LAM_wflow.xml b/parm/FV3LAM_wflow.xml index aad38b487d..6aeca45e75 100644 --- a/parm/FV3LAM_wflow.xml +++ b/parm/FV3LAM_wflow.xml @@ -111,10 +111,10 @@ Directories and files. {%- endif %} {%- if run_envir == "nco" %} -@Y@m@d"> +@Y@m@d"> {%- else %} - + {%- endif %} diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index f8f6a93279..7a5ec360b0 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -892,7 +892,7 @@ nco: # OPSROOT: # The operations root directory in NCO mode. # - # LOGDIR: + # LOGBASEDIR: # Directory in which the log files from the workflow tasks will be placed. # # For more information on NCO standards @@ -910,7 +910,7 @@ nco: PACKAGEROOT: '{{ OPSROOT }}/packages' DATAROOT: '{{ OPSROOT }}/tmp' DCOMROOT: '{{ OPSROOT }}/dcom' - LOGDIR: '{{ OPSROOT }}/output' + LOGBASEDIR: '{{ OPSROOT }}/output' EXTROOT: '{{ OPSROOT }}/ext' COMIN_BASEDIR: '{{ COMROOT }}/{{ NET }}/{{ model_ver }}' COMOUT_BASEDIR: '{{ COMROOT }}/{{ NET }}/{{ model_ver }}' diff --git a/ush/setup.py b/ush/setup.py index af0d97a45a..3d21bf4065 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -915,7 +915,7 @@ def get_location(xcs, fmt, expt_cfg): for nco_var in nco_vars: nco_config[nco_var.upper()] = exptdir - nco_config["LOGDIR"] = os.path.join(exptdir, "log") + nco_config["LOGBASEDIR"] = os.path.join(exptdir, "log") # create NCO directories if run_envir == "nco": @@ -924,7 +924,7 @@ def get_location(xcs, fmt, expt_cfg): mkdir_vrfy(f' -p "{nco_config.get("PACKAGEROOT")}"') mkdir_vrfy(f' -p "{nco_config.get("DATAROOT")}"') mkdir_vrfy(f' -p "{nco_config.get("DCOMROOT")}"') - mkdir_vrfy(f' -p "{nco_config.get("LOGDIR")}"') + mkdir_vrfy(f' -p "{nco_config.get("LOGBASEDIR")}"') mkdir_vrfy(f' -p "{nco_config.get("EXTROOT")}"') if nco_config["DBNROOT"]: mkdir_vrfy(f' -p "{nco_config["DBNROOT"]}"') From 44696431b78f6bb70aeb799646920cbbbaca066e Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Thu, 8 Dec 2022 21:12:45 +0000 Subject: [PATCH 14/19] Spell check. --- ush/config_defaults.yaml | 6 +++--- ush/generate_FV3LAM_wflow.py | 12 ++++++------ ush/link_fix.py | 2 +- ush/set_gridparams_GFDLgrid.py | 2 +- ush/set_ozone_param.py | 2 +- ush/set_predef_grid_params.py | 2 +- ush/setup.py | 18 +++++++++--------- 7 files changed, 22 insertions(+), 22 deletions(-) diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 7a5ec360b0..fa1f605a71 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1226,7 +1226,7 @@ task_make_grid: # GFDLgrid_NUM_CELLS: # Number of points in each of the two horizontal directions (x and y) on # each tile of the parent global grid. Note that the name of this parameter - # is really a misnomer because although it has the stirng "RES" (for + # is really a misnomer because although it has the string "RES" (for # "resolution") in its name, it specifies number of grid cells, not grid # size (in say meters or kilometers). However, we keep this name in order # to remain consistent with the usage of the word "resolution" in the @@ -1639,7 +1639,7 @@ task_run_fcst: # Set model_configure parameters. Definitions: # # DT_ATMOS: - # The main forecast model integraton time step. As described in the + # The main forecast model integration time step. As described in the # forecast model documentation, "It corresponds to the frequency with # which the top level routine in the dynamics is called as well as the # frequency with which the physics is called." @@ -2221,7 +2221,7 @@ global: # # USE_CRTM: # Flag that defines whether external CRTM coefficient files have been - # staged by the user in order to output synthetic statellite products + # staged by the user in order to output synthetic satellite products # available within the UPP. If this is set to true, then the workflow # will check for these files in the directory CRTM_DIR. Otherwise, it is # assumed that no satellite fields are being requested in the UPP diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 6a18b5026b..f8637e71e5 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -229,9 +229,9 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> # # ----------------------------------------------------------------------- # - # If USE_CRON_TO_RELAUNCH is set to TRUE, add a line to the user's cron - # table to call the (re)launch script every CRON_RELAUNCH_INTVL_MNTS mi- - # nutes. + # If USE_CRON_TO_RELAUNCH is set to TRUE, add a line to the user's + # cron table to call the (re)launch script every + # CRON_RELAUNCH_INTVL_MNTS minutes. # # ----------------------------------------------------------------------- # @@ -345,7 +345,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> log_info( f""" Copying the CCPP physics suite definition XML file from its location in - the forecast model directory sturcture to the experiment directory...""", + the forecast model directory structure to the experiment directory...""", verbose=verbose, ) cp_vrfy(CCPP_PHYS_SUITE_IN_CCPP_FP, CCPP_PHYS_SUITE_FP) @@ -357,7 +357,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> log_info( f""" Copying the field dictionary file from its location in the forecast - model directory sturcture to the experiment directory...""", + model directory structure to the experiment directory...""", verbose=verbose, ) cp_vrfy(FIELD_DICT_IN_UWM_FP, FIELD_DICT_FP) @@ -615,7 +615,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow") -> # the base (i.e. starting) namelist file, with physics-suite-dependent # modifications to the base file specified in the yaml configuration file # FV3_NML_YAML_CONFIG_FP (for the physics suite specified by CCPP_PHYS_SUITE), - # and with additional physics-suite-independent modificaitons specified + # and with additional physics-suite-independent modifications specified # in the variable "settings" set above. # # ----------------------------------------------------------------------- diff --git a/ush/link_fix.py b/ush/link_fix.py index eaea31e61c..635b69675e 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -345,7 +345,7 @@ def link_fix( # climatology files that do not contain the halo size in their names. # These are needed by the make_ics task. # - # The forecat model needs sfc climo files to be named without the + # The forecast model needs sfc climo files to be named without the # tile7 and halo references, and with only "tile1" in the name. # # ----------------------------------------------------------------------- diff --git a/ush/set_gridparams_GFDLgrid.py b/ush/set_gridparams_GFDLgrid.py index c8a0165ec4..a87079a899 100644 --- a/ush/set_gridparams_GFDLgrid.py +++ b/ush/set_gridparams_GFDLgrid.py @@ -182,7 +182,7 @@ def set_gridparams_GFDLgrid( # on the tile 6 grid; it cannot cut through tile 6 cells. (Note that # this implies that the starting indices on the tile 6 supergrid must be # odd while the ending indices must be even; the above expressions sa- - # tisfy this requirement.) We perfrom these calculations next. + # tisfy this requirement.) We perform these calculations next. # # ----------------------------------------------------------------------- # diff --git a/ush/set_ozone_param.py b/ush/set_ozone_param.py index f1b30a239f..a82199d966 100644 --- a/ush/set_ozone_param.py +++ b/ush/set_ozone_param.py @@ -91,7 +91,7 @@ def set_ozone_param(ccpp_phys_suite_fp, link_mappings): # Set the element in the array CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING that # specifies the mapping between the symlink for the ozone production/loss # file that must be created in each cycle directory and its target in the - # FIXam directory. The name of the symlink is alrady in the array, but + # FIXam directory. The name of the symlink is already in the array, but # the target is not because it depends on the ozone parameterization that # the physics suite uses. Since we determined the ozone parameterization # above, we now set the target of the symlink accordingly. diff --git a/ush/set_predef_grid_params.py b/ush/set_predef_grid_params.py index 35096cefd3..b5761992b7 100644 --- a/ush/set_predef_grid_params.py +++ b/ush/set_predef_grid_params.py @@ -11,7 +11,7 @@ def set_predef_grid_params(USHdir, grid_name, quilting): - """Sets grid parameters for the specified predfined grid + """Sets grid parameters for the specified predefined grid Args: USHdir: path to the SRW ush directory diff --git a/ush/setup.py b/ush/setup.py index 3d21bf4065..cd82fc6b7a 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -894,7 +894,7 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # - # These NCO variables need to be set based on the user's specificed + # These NCO variables need to be set based on the user's specified # run environment. The default is set in config_defaults for nco. If # running in community mode, we set these paths to the experiment # directory. @@ -934,8 +934,8 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # - # The FV3 forecast model needs the following input files in the run di- - # rectory to start a forecast: + # The FV3 forecast model needs the following input files in the run + # directory to start a forecast: # # (1) The data table file # (2) The diagnostics table file @@ -955,12 +955,12 @@ def get_location(xcs, fmt, expt_cfg): # configuration file (or derived from such values). The scripts then # use the resulting "actual" files as inputs to the forecast model. # - # Note that the CCPP physics suite defintion file does not have a cor- - # responding template file because it does not contain any values that - # need to be replaced according to the experiment/workflow configura- - # tion. If using CCPP, this file simply needs to be copied over from - # its location in the forecast model's directory structure to the ex- - # periment directory. + # Note that the CCPP physics suite definition file does not have a + # corresponding template file because it does not contain any values + # that need to be replaced according to the experiment/workflow + # configuration. If using CCPP, this file simply needs to be copied + # over from its location in the forecast model's directory structure + # to the experiment directory. # # Below, we first set the names of the templates for the first six files # listed above. We then set the full paths to these template files. From 5a7b6d8412f3c2b6b01cb8badfe3183551a88a04 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Thu, 8 Dec 2022 21:28:45 +0000 Subject: [PATCH 15/19] Fix failures on machines with no data streams. Also use env values when set for NCO variables. --- ush/setup.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/ush/setup.py b/ush/setup.py index cd82fc6b7a..b356d74bf4 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -468,11 +468,11 @@ def setup(USHdir, user_config_fn="config.yaml"): def get_location(xcs, fmt, expt_cfg): ics_lbcs = expt_cfg.get("data", {}).get("ics_lbcs") if ics_lbcs is not None: - v = ics_lbcs[xcs] + v = ics_lbcs.get(xcs) if not isinstance(v, dict): return v else: - return v[fmt] + return v.get(fmt, "") else: return "" @@ -917,8 +917,14 @@ def get_location(xcs, fmt, expt_cfg): nco_config["LOGBASEDIR"] = os.path.join(exptdir, "log") - # create NCO directories + # Use env variables for NCO variables and create NCO directories if run_envir == "nco": + + for nco_var in nco_vars: + envar = os.environ.get(nco_var) + if envar is not None: + nco_config[nco_var.upper()] = envar + mkdir_vrfy(f' -p "{nco_config.get("OPSROOT")}"') mkdir_vrfy(f' -p "{nco_config.get("COMROOT")}"') mkdir_vrfy(f' -p "{nco_config.get("PACKAGEROOT")}"') From 139ec9eafe30b4625df78caea01cb8bcbe486524 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 12 Dec 2022 19:44:38 +0000 Subject: [PATCH 16/19] Make directory before linking files there. --- ush/setup.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/ush/setup.py b/ush/setup.py index b356d74bf4..7b97e349ec 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -1056,6 +1056,18 @@ def get_location(xcs, fmt, expt_cfg): RUN_TASK_VX_ENSPOINT = \"{run_task_vx_enspoint}\"''' ) + # + # ----------------------------------------------------------------------- + # NOTE: currently this is executed no matter what, should it be dependent on the logic described below?? + # If not running the MAKE_GRID_TN, MAKE_OROG_TN, and/or MAKE_SFC_CLIMO + # tasks, create symlinks under the FIXlam directory to pregenerated grid, + # orography, and surface climatology files. + # + # ----------------------------------------------------------------------- + # + fixlam = workflow_config["FIXlam"] + mkdir_vrfy(f' -p "{fixlam}"') + # # Use the pregenerated domain files if the RUN_TASK_MAKE* tasks are # turned off. Link the files, and check that they all contain the @@ -1124,18 +1136,6 @@ def get_location(xcs, fmt, expt_cfg): workflow_config["RES_IN_FIXLAM_FILENAMES"] = res_in_fixlam_filenames workflow_config["CRES"] = f"C{res_in_fixlam_filenames}" - # - # ----------------------------------------------------------------------- - # NOTE: currently this is executed no matter what, should it be dependent on the logic described below?? - # If not running the MAKE_GRID_TN, MAKE_OROG_TN, and/or MAKE_SFC_CLIMO - # tasks, create symlinks under the FIXlam directory to pregenerated grid, - # orography, and surface climatology files. - # - # ----------------------------------------------------------------------- - # - fixlam = workflow_config["FIXlam"] - mkdir_vrfy(f' -p "{fixlam}"') - # # ----------------------------------------------------------------------- # From 487b812224c693f758d07cc115c09cd4fcb7edbf Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 12 Dec 2022 20:04:52 +0000 Subject: [PATCH 17/19] Perhaps better error catching. Remove print statements from extend_yaml. --- tests/WE2E/run_WE2E_tests.sh | 5 ++++- ush/python_utils/config_parser.py | 11 ++--------- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/tests/WE2E/run_WE2E_tests.sh b/tests/WE2E/run_WE2E_tests.sh index 1e03b908e8..014cf8912c 100755 --- a/tests/WE2E/run_WE2E_tests.sh +++ b/tests/WE2E/run_WE2E_tests.sh @@ -1319,10 +1319,13 @@ exist or is not a directory: # #----------------------------------------------------------------------- # - $USHdir/generate_FV3LAM_wflow.py || \ + $USHdir/generate_FV3LAM_wflow.py + + if [ $? != 0 ] ; then print_err_msg_exit "\ Could not generate an experiment for the test specified by test_name: test_name = \"${test_name}\"" + fi done diff --git a/ush/python_utils/config_parser.py b/ush/python_utils/config_parser.py index cb73879d24..aa9d04aed3 100644 --- a/ush/python_utils/config_parser.py +++ b/ush/python_utils/config_parser.py @@ -136,8 +136,6 @@ def extend_yaml(yaml_dict, full_dict=None): templates = re.findall(r"{{[^}]*}}|\S", v_str) data = [] for template in templates: - if len(template) > 1: - print(template) j2env = jinja2.Environment( loader=jinja2.BaseLoader, undefined=jinja2.StrictUndefined ) @@ -149,15 +147,11 @@ def extend_yaml(yaml_dict, full_dict=None): template = j2tmpl.render(**yaml_dict, **full_dict) except jinja2.exceptions.UndefinedError as e: # Leave a templated field as-is in the resulting dict - # print(f'Error: {e}') - print(f"Preserved template: {k}: {template}") - # for a, b in full_dict.items(): - # print(f' {a}: {b}') pass except TypeError: - print(f"Preserved template: {k}: {template}") + pass except ZeroDivisionError: - print(f"Preserved template: {k}: {template}") + pass except: print(f"{k}: {template}") raise @@ -170,7 +164,6 @@ def extend_yaml(yaml_dict, full_dict=None): # Put the full template line back together as it was, # filled or not yaml_dict[k] = "".join(data) - print(f" {k}: {yaml_dict[k]}") ########## From 1afaaa73e726a80603b256333ec4664eb4f5dcd6 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Tue, 13 Dec 2022 14:48:21 +0000 Subject: [PATCH 18/19] Removing relics. --- ush/config_defaults.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index fa1f605a71..8fa6f61df5 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1331,7 +1331,6 @@ task_make_orog: OMP_NUM_THREADS_MAKE_OROG: 6 OMP_STACKSIZE_MAKE_OROG: "2048m" OROG_DIR: '{{ [workflow.EXPTDIR, "orog"]|path_join }}' - TOPO_DIR: "" #---------------------------- # MAKE SFC CLIMO config parameters @@ -1346,7 +1345,6 @@ task_make_sfc_climo: OMP_NUM_THREADS_MAKE_SFC_CLIMO: 1 OMP_STACKSIZE_MAKE_SFC_CLIMO: "1024m" SFC_CLIMO_DIR: '{{ [workflow.EXPTDIR, "sfc_climo"]|path_join }}' - SFC_CLIMO_INPUT_DIR: "" #---------------------------- # EXTRN ICS config parameters From 3b82d281c895fa393e7cbb8f51963c7f63537c36 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 16 Dec 2022 20:49:33 +0000 Subject: [PATCH 19/19] Better logging. Change logic for handling grid_dir, etc. when specified separate from pregen_basedir. --- ush/config_defaults.yaml | 6 +++--- ush/link_fix.py | 9 ++++++++- ush/setup.py | 33 ++++++++++++++++++++++----------- 3 files changed, 33 insertions(+), 15 deletions(-) diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 8fa6f61df5..1ecac44ba0 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1049,7 +1049,7 @@ task_make_grid: # #----------------------------------------------------------------------- # - GRID_DIR: '{{ [workflow.EXPTDIR, "grid"]|path_join if workflow_switches.RUN_TASK_MAKE_GRID else [platform.DOMAIN_PREGEN_BASEDIR, workflow.PREDEF_GRID_NAME]|path_join}}' + GRID_DIR: '{{ [workflow.EXPTDIR, "grid"]|path_join if workflow_switches.RUN_TASK_MAKE_GRID else "" }}' # #----------------------------------------------------------------------- # @@ -1330,7 +1330,7 @@ task_make_orog: KMP_AFFINITY_MAKE_OROG: "disabled" OMP_NUM_THREADS_MAKE_OROG: 6 OMP_STACKSIZE_MAKE_OROG: "2048m" - OROG_DIR: '{{ [workflow.EXPTDIR, "orog"]|path_join }}' + OROG_DIR: '{{ [workflow.EXPTDIR, "orog"]|path_join if workflow_switches.RUN_TASK_MAKE_OROG else "" }}' #---------------------------- # MAKE SFC CLIMO config parameters @@ -1344,7 +1344,7 @@ task_make_sfc_climo: KMP_AFFINITY_MAKE_SFC_CLIMO: "scatter" OMP_NUM_THREADS_MAKE_SFC_CLIMO: 1 OMP_STACKSIZE_MAKE_SFC_CLIMO: "1024m" - SFC_CLIMO_DIR: '{{ [workflow.EXPTDIR, "sfc_climo"]|path_join }}' + SFC_CLIMO_DIR: '{{ [workflow.EXPTDIR, "sfc_climo"]|path_join if workflow_switches.RUN_TASK_MAKE_SFC_CLIMO else "" }}' #---------------------------- # EXTRN ICS config parameters diff --git a/ush/link_fix.py b/ush/link_fix.py index 635b69675e..0351c7a7d2 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -241,13 +241,20 @@ def link_fix( for pattern in fps: files = glob.glob(pattern) + if not files: + print_err_msg_exit( + f""" + Trying to link files in group: {file_group} + No files were found matching the pattern {pattern}. + """ + ) for fp in files: fn = os.path.basename(fp) regex_search = "^C([0-9]*).*" res = find_pattern_in_str(regex_search, fn) - if res is None: + if not res: print_err_msg_exit( f""" The resolution could not be extracted from the current file's name. The diff --git a/ush/setup.py b/ush/setup.py index 7b97e349ec..415e3a7725 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -1079,20 +1079,31 @@ def get_location(xcs, fmt, expt_cfg): res_in_fns = "" switch = f"RUN_TASK_MAKE_{prep_task}" # If the user doesn't want to run the given task, link the fix - # file + # file from the staged files. if not workflow_switches[switch]: - task_dir = os.path.join(pregen_basedir, predef_grid_name) sect_key = f"task_make_{prep_task.lower()}" dir_key = f"{prep_task}_DIR" - expt_config[sect_key][dir_key] = task_dir + task_dir = expt_config[sect_key].get(dir_key) - msg = dedent( - f""" - {dir_key} not specified! - Setting {dir_key} = {task_dir} - """ - ) - logger.warning(msg) + if not task_dir: + task_dir = os.path.join(pregen_basedir, predef_grid_name) + expt_config[sect_key][dir_key] = task_dir + msg = dedent( + f""" + {dir_key} will use pre-generated files. + Setting {dir_key} = {task_dir} + """ + ) + logger.warning(msg) + + if not os.path.exists(task_dir): + msg = dedent( + f""" + File directory does not exist! + {dir_key} needs {task_dir} + """ + ) + raise FileNotFoundError(msg) # Link the fix files and check that their resolution is # consistent @@ -1108,7 +1119,7 @@ def get_location(xcs, fmt, expt_cfg): run_task=False, sfc_climo_fields=fixed_files["SFC_CLIMO_FIELDS"], ) - if res_in_fixlam_filenames is None: + if not res_in_fixlam_filenames: res_in_fixlam_filenames = res_in_fns else: if res_in_fixlam_filenames != res_in_fns: